From f1452a5582b92cec694d722f6791ef645c0dbfc0 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 24 May 2024 14:07:14 +0200 Subject: [PATCH 01/62] stashing changes --- package.json | 16 +- tsconfig.base.json | 2 + .../plugins/integration_assistant/README.md | 9 + .../integration_assistant/common/index.ts | 16 + .../integration_assistant/kibana.jsonc | 34 + .../public/api/services/apiRequest.tsx | 79 + .../api/services/categorizationService.tsx | 19 + .../public/api/services/ecsMappingService.tsx | 62 + .../services/integrationBuilderService.tsx | 35 + .../public/api/services/relatedService.tsx | 19 + .../public/api/services/runnableClient.tsx | 14 + .../integration_assistant/public/app.tsx | 90 + .../BuildIntegrationButtons.tsx | 72 + .../components/Buttons/ActionButton.tsx | 19 + .../Buttons/ContinueButton.test.tsx | 41 + .../components/Buttons/ContinueButton.tsx | 41 + .../components/Buttons/GoBackButton.tsx | 26 + .../public/components/Buttons/ResetButton.tsx | 28 + .../Categorization/CategorizationButtons.tsx | 60 + .../public/components/Ecs/EcsButtons.tsx | 56 + .../public/components/Ecs/EcsFileUpload.tsx | 67 + .../public/components/Ecs/EcsForm.tsx | 135 + .../public/components/Ecs/EcsFormStats.tsx | 33 + .../public/components/Ecs/EcsTable.tsx | 180 + .../components/EmptyPrompt/EmptyPrompt.tsx | 19 + .../Headers/IntegrationBuilderHeader.tsx | 21 + .../IntegrationBuilderSteps.tsx | 64 + .../IntegrationResults/DocsResults.tsx | 32 + .../IntegrationResults/PipelineResults.tsx | 32 + .../public/components/Links/CustomLink.tsx | 31 + .../components/Portal/ProgressPortal.tsx | 11 + .../components/Related/RelatedButtons.tsx | 60 + .../public/components/SideNav/SideNav.tsx | 79 + .../ViewResults/ViewResultsButtons.tsx | 15 + .../public/constants/ecsFields.tsx | 2280 ++++++++++ .../public/constants/headerTitles.tsx | 11 + .../public/constants/routePaths.tsx | 11 + .../public/get_message_example.tsx | 85 + .../integration_assistant/public/index.ts | 11 + .../BuildIntegrationPage.tsx | 12 + .../IntegrationBuilder/CategorizationPage.tsx | 30 + .../IntegrationBuilder/EcsMapperPage.tsx | 27 + .../pages/IntegrationBuilder/RelatedPage.tsx | 30 + .../IntegrationBuilder/ViewResultsPage.tsx | 33 + .../public/pages/Main/MainPage.tsx | 11 + .../integration_assistant/public/plugin.tsx | 35 + .../public/post_message_example.tsx | 92 + .../public/random_number_between_example.tsx | 87 + .../public/random_number_example.tsx | 67 + .../integration_assistant/public/services.ts | 68 + .../public/stores/integrationBuilderStore.tsx | 184 + .../public/stores/sideNavStore.tsx | 10 + .../public/stores/useGlobalStore.tsx | 35 + .../integration_assistant/public/types.ts | 17 + .../public/types/ApiRequests.tsx | 37 + .../public/types/ApiResponses.tsx | 20 + .../public/types/IntegrationBuilder.tsx | 83 + .../public/types/SideNav.tsx | 4 + .../public/utils/samples.tsx | 78 + .../graphs/categorization/categorization.ts | 30 + .../server/graphs/categorization/constants.ts | 236 + .../server/graphs/categorization/errors.ts | 32 + .../server/graphs/categorization/graph.ts | 175 + .../server/graphs/categorization/index.ts | 1 + .../server/graphs/categorization/invalid.ts | 32 + .../server/graphs/categorization/prompts.ts | 195 + .../server/graphs/categorization/review.ts | 33 + .../server/graphs/categorization/validate.ts | 139 + .../server/graphs/ecs/constants.ts | 3863 +++++++++++++++++ .../server/graphs/ecs/duplicates.ts | 22 + .../server/graphs/ecs/graph.ts | 162 + .../server/graphs/ecs/index.ts | 1 + .../server/graphs/ecs/invalid.ts | 23 + .../server/graphs/ecs/mapping.ts | 23 + .../server/graphs/ecs/missing.ts | 23 + .../server/graphs/ecs/pipeline.ts | 179 + .../server/graphs/ecs/prompts.ts | 173 + .../server/graphs/ecs/validate.ts | 150 + .../server/graphs/related/constants.ts | 53 + .../server/graphs/related/errors.ts | 32 + .../server/graphs/related/graph.ts | 158 + .../server/graphs/related/index.ts | 1 + .../server/graphs/related/prompts.ts | 136 + .../server/graphs/related/related.ts | 30 + .../server/graphs/related/review.ts | 30 + .../integration_assistant/server/index.ts | 15 + .../integration_assistant/server/plugin.ts | 34 + .../server/providers/bedrock.ts | 20 + .../server/routes/build_integration_routes.ts | 21 + .../server/routes/categorization_routes.ts | 21 + .../server/routes/ecs_routes.ts | 29 + .../server/routes/index.ts | 9 + .../server/routes/register_routes.ts | 19 + .../server/routes/related_routes.ts | 21 + .../server/templates/pipeline.yml.njk | 131 + .../integration_assistant/server/types.ts | 132 + .../integration_assistant/server/util/es.ts | 83 + .../server/util/pipeline.ts | 21 + .../server/util/samples.ts | 88 + .../integration_assistant/server/util/util.ts | 3 + .../integration_assistant/tsconfig.json | 22 + yarn.lock | 165 +- 102 files changed, 11564 insertions(+), 77 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/README.md create mode 100644 x-pack/plugins/integration_assistant/common/index.ts create mode 100644 x-pack/plugins/integration_assistant/kibana.jsonc create mode 100644 x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx create mode 100644 x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx create mode 100644 x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx create mode 100644 x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx create mode 100644 x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx create mode 100644 x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx create mode 100644 x-pack/plugins/integration_assistant/public/app.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx create mode 100644 x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx create mode 100644 x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx create mode 100644 x-pack/plugins/integration_assistant/public/constants/routePaths.tsx create mode 100644 x-pack/plugins/integration_assistant/public/get_message_example.tsx create mode 100644 x-pack/plugins/integration_assistant/public/index.ts create mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx create mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx create mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx create mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx create mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx create mode 100644 x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx create mode 100644 x-pack/plugins/integration_assistant/public/plugin.tsx create mode 100644 x-pack/plugins/integration_assistant/public/post_message_example.tsx create mode 100644 x-pack/plugins/integration_assistant/public/random_number_between_example.tsx create mode 100644 x-pack/plugins/integration_assistant/public/random_number_example.tsx create mode 100644 x-pack/plugins/integration_assistant/public/services.ts create mode 100644 x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx create mode 100644 x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx create mode 100644 x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx create mode 100644 x-pack/plugins/integration_assistant/public/types.ts create mode 100644 x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx create mode 100644 x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx create mode 100644 x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx create mode 100644 x-pack/plugins/integration_assistant/public/types/SideNav.tsx create mode 100644 x-pack/plugins/integration_assistant/public/utils/samples.tsx create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/constants.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/errors.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/graph.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/index.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/related.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/review.ts create mode 100644 x-pack/plugins/integration_assistant/server/index.ts create mode 100644 x-pack/plugins/integration_assistant/server/plugin.ts create mode 100644 x-pack/plugins/integration_assistant/server/providers/bedrock.ts create mode 100644 x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts create mode 100644 x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts create mode 100644 x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts create mode 100644 x-pack/plugins/integration_assistant/server/routes/index.ts create mode 100644 x-pack/plugins/integration_assistant/server/routes/register_routes.ts create mode 100644 x-pack/plugins/integration_assistant/server/routes/related_routes.ts create mode 100644 x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/types.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/es.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/pipeline.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/samples.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/util.ts create mode 100644 x-pack/plugins/integration_assistant/tsconfig.json diff --git a/package.json b/package.json index f4f03cb30722e..129523e5fc1ad 100644 --- a/package.json +++ b/package.json @@ -80,7 +80,7 @@ "resolutions": { "**/@bazel/typescript/protobufjs": "6.11.4", "**/@hello-pangea/dnd": "16.6.0", - "**/@langchain/core": "0.1.53", + "**/@langchain/core": "0.2.0", "**/@types/node": "20.10.5", "**/@typescript-eslint/utils": "5.62.0", "**/chokidar": "^3.5.3", @@ -528,6 +528,7 @@ "@kbn/ingest-pipelines-plugin": "link:x-pack/plugins/ingest_pipelines", "@kbn/input-control-vis-plugin": "link:src/plugins/input_control_vis", "@kbn/inspector-plugin": "link:src/plugins/inspector", + "@kbn/integration-assistant-plugin": "link:x-pack/plugins/integration_assistant", "@kbn/interactive-setup-plugin": "link:src/plugins/interactive_setup", "@kbn/interactive-setup-test-endpoints-plugin": "link:test/interactive_setup_api_integration/plugins/test_endpoints", "@kbn/interpreter": "link:packages/kbn-interpreter", @@ -912,9 +913,10 @@ "@kbn/watcher-plugin": "link:x-pack/plugins/watcher", "@kbn/xstate-utils": "link:packages/kbn-xstate-utils", "@kbn/zod-helpers": "link:packages/kbn-zod-helpers", - "@langchain/community": "^0.0.44", - "@langchain/core": "^0.1.53", - "@langchain/openai": "^0.0.25", + "@langchain/community": "^0.2.2", + "@langchain/core": "^0.2.0", + "@langchain/langgraph": "^0.0.20", + "@langchain/openai": "^0.0.33", "@loaders.gl/core": "^3.4.7", "@loaders.gl/json": "^3.4.7", "@loaders.gl/shapefile": "^3.4.7", @@ -1051,8 +1053,8 @@ "jsonwebtoken": "^9.0.2", "jsts": "^1.6.2", "kea": "^2.6.0", - "langchain": "^0.1.30", - "langsmith": "^0.1.14", + "langchain": "^0.2.2", + "langsmith": "^0.1.28", "launchdarkly-js-client-sdk": "^3.1.4", "launchdarkly-node-server-sdk": "^7.0.3", "load-json-file": "^6.2.0", @@ -1077,6 +1079,7 @@ "node-forge": "^1.3.1", "nodemailer": "^6.9.9", "normalize-path": "^3.0.0", + "nunjucks": "^3.2.4", "object-hash": "^1.3.1", "object-path-immutable": "^3.1.1", "openai": "^4.24.1", @@ -1487,6 +1490,7 @@ "@types/node-forge": "^1.3.10", "@types/nodemailer": "^6.4.0", "@types/normalize-path": "^3.0.0", + "@types/nunjucks": "^3.2.6", "@types/object-hash": "^1.3.0", "@types/opn": "^5.1.0", "@types/ora": "^1.3.5", diff --git a/tsconfig.base.json b/tsconfig.base.json index 23c8774271af9..5dcb59b73d924 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -988,6 +988,8 @@ "@kbn/input-control-vis-plugin/*": ["src/plugins/input_control_vis/*"], "@kbn/inspector-plugin": ["src/plugins/inspector"], "@kbn/inspector-plugin/*": ["src/plugins/inspector/*"], + "@kbn/integration-assistant-plugin": ["x-pack/plugins/integration_assistant"], + "@kbn/integration-assistant-plugin/*": ["x-pack/plugins/integration_assistant/*"], "@kbn/interactive-setup-plugin": ["src/plugins/interactive_setup"], "@kbn/interactive-setup-plugin/*": ["src/plugins/interactive_setup/*"], "@kbn/interactive-setup-test-endpoints-plugin": ["test/interactive_setup_api_integration/plugins/test_endpoints"], diff --git a/x-pack/plugins/integration_assistant/README.md b/x-pack/plugins/integration_assistant/README.md new file mode 100644 index 0000000000000..1ac7540508360 --- /dev/null +++ b/x-pack/plugins/integration_assistant/README.md @@ -0,0 +1,9 @@ +Team owner: Platform + +A working example of a plugin that registers and uses multiple custom routes. + +Read more: + +- [IRouter API Docs](../../docs/development/core/server/kibana-plugin-core-server.irouter.md) +- [HttpHandler (core.http.fetch) API Docs](../../docs/development/core/public/kibana-plugin-core-public.httphandler.md) +- [Routing Conventions](../../STYLEGUIDE.mdx#api-endpoints) \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts new file mode 100644 index 0000000000000..40004e801c99a --- /dev/null +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -0,0 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export const INTEGRATION_ASSISTANT_BASE_PATH = '/api/integration_assistant'; + +export const ECS_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/ecs`; + +export const CATEGORZATION_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/categorization`; + +export const RELATED_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/related`; + +export const INTEGRATION_BUILDER_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/build`; diff --git a/x-pack/plugins/integration_assistant/kibana.jsonc b/x-pack/plugins/integration_assistant/kibana.jsonc new file mode 100644 index 0000000000000..b972696804d85 --- /dev/null +++ b/x-pack/plugins/integration_assistant/kibana.jsonc @@ -0,0 +1,34 @@ +{ + "type": "plugin", + "id": "@kbn/integration-assistant-plugin", + "owner": "@elastic/security-solution", + "description": "A simple example of how to use core's routing services test", + "plugin": { + "id": "integrationAssistant", + "server": true, + "browser": true, + "configPath": [ + "xpack", + "integration_assistant" + ], + "requiredPlugins": [ + "licensing", + "management", + "features", + "share", + "fileUpload" + ], + "optionalPlugins": [ + "security", + "usageCollection", + "console" + ], + "requiredBundles": [ + "esUiShared", + "kibanaReact" + ], + "extraPublicDirs": [ + "common" + ] + } +} diff --git a/x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx b/x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx new file mode 100644 index 0000000000000..c82701ed2f130 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx @@ -0,0 +1,79 @@ +export const apiPostWithFileResponse = async ( + path: string, + body: string, + filename: string, + customHeaders?: Record, +): Promise => { + try { + const url = `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`; + const response = await fetch(url, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*.sit.estc.dev', + ...customHeaders, + }, + body: body, + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const blob = await response.blob(); + return new File([blob], filename); + } catch (e) { + console.error(e); + return null; + } +}; + +export const apiPost = async ( + path: string, + body: string | File, + customHeaders?: Record, +): Promise => { + try { + const url = `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`; + const response = await fetch(url, { + method: 'POST', + headers: { + 'Access-Control-Allow-Origin': '*.sit.estc.dev', + 'Content-Type': 'application/json', + ...customHeaders, + }, + body: body, + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + return response.json(); + } catch (e) { + console.error(e); + return null; + } +}; + +export const apiGet = async (path: string, customHeaders?: Record): Promise => { + try { + const url = `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`; + const response = await fetch(url, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + ...customHeaders, + }, + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + return response.json(); + } catch (e) { + console.error(e); + return null; + } +}; diff --git a/x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx b/x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx new file mode 100644 index 0000000000000..7d51a05473dca --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx @@ -0,0 +1,19 @@ +import { newRunnable } from '@api/services/runnableClient'; + +export async function getCategorization(req: CategorizationAPIRequest) { + const client = newRunnable('categorization'); + let response = { results: { pipeline: {}, docs: [] } } as CategorizationApiResponse; + try { + response = (await client.invoke({ + package_name: req.packageName, + data_stream_name: req.dataStreamName, + raw_samples: req.formSamples, + current_pipeline: req.ingestPipeline, + })) as CategorizationApiResponse; + } catch (e) { + console.error(e); + return response; + } + + return response; +} diff --git a/x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx b/x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx new file mode 100644 index 0000000000000..8017028358cda --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx @@ -0,0 +1,62 @@ +import { newRunnable } from '@Api/services/runnableClient'; +import { mergeDeeply, traverseAndMatchFields } from '@Utils/samples'; + +export function formatEcsResponse( + response: EcsMappingApiResponse, + packageName: string, + dataStreamName: string, + rawSamples: string[], +): EcsMappingTableItem[] { + const rawObjects = rawSamples.map((str) => JSON.parse(str)); + const mergedObject = mergeDeeply(rawObjects); + const matches = traverseAndMatchFields( + response.results.mapping[packageName][dataStreamName], + mergedObject, + packageName, + dataStreamName, + ); + // Sorting the matches by isEcs then alphabetically on sourceField: + matches.sort((a, b) => { + // First, sort by `isEcs` status, true first + if (a.isEcs && !b.isEcs) return -1; + if (!a.isEcs && b.isEcs) return 1; + + // Then, if `isEcs` status is the same, sort alphabetically by `source_field` + return a.sourceField.localeCompare(b.sourceField); + }); + + return matches; +} + +export async function getEcsMapping(req: EcsMappingAPIRequest) { + let response = { results: { mapping: {}, current_pipeline: {} } } as EcsMappingApiResponse; + const client = newRunnable('ecs'); + try { + response = (await client.invoke({ + package_name: req.packageName, + data_stream_name: req.dataStreamName, + raw_samples: req.formSamples, + })) as EcsMappingApiResponse; + } catch (e) { + console.error(e); + return response; + } + return response; +} + +export async function getUpdatedPipeline(req: EcsMappingNewPipelineAPIRequest) { + const client = newRunnable('ecs'); + let response = { results: { mapping: {}, current_pipeline: {} } } as EcsMappingApiResponse; + try { + response = (await client.invoke({ + package_name: req.packageName, + data_stream_name: req.dataStreamName, + raw_samples: req.formSamples, + current_mapping: req.mapping, + })) as EcsMappingApiResponse; + } catch (e) { + console.error(e); + return response; + } + return response; +} diff --git a/x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx b/x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx new file mode 100644 index 0000000000000..79aaabf043a1c --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx @@ -0,0 +1,35 @@ +import { apiPostWithFileResponse, apiPost } from '@Api/services/apiRequest'; + +export async function buildIntegration(req: BuildIntegrationAPIRequest) { + const requestBody = { + package_name: req.packageName, + title: req.packageTitle, + description: 'test description', + data_stream: [ + { + name: req.dataStreamName, + type: req.inputTypes, + title: 'Test data stream title', + description: 'Test data stream description', + format: 'json', + samples: req.formSamples, + pipeline: req.ingestPipeline, + docs: req.docs, + }, + ], + }; + + const response = apiPostWithFileResponse( + 'integration_builder/package', + JSON.stringify(requestBody), + `${req.packageName}-${req.packageVersion}.zip`, + ); + return response; +} + +export async function installIntegration(file: File) { + const path = 'api/fleet/epm/packages'; + + const response = apiPost(path, file); + return response; +} diff --git a/x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx b/x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx new file mode 100644 index 0000000000000..cf59ee1823471 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx @@ -0,0 +1,19 @@ +import { newRunnable } from '@api/services/runnableClient'; + +export async function getRelated(req: RelatedAPIRequest) { + const client = newRunnable('related'); + let response = { results: { pipeline: {}, docs: [] } } as RelatedApiResponse; + try { + response = (await client.invoke({ + package_name: req.packageName, + data_stream_name: req.dataStreamName, + raw_samples: req.formSamples, + current_pipeline: req.ingestPipeline, + })) as RelatedApiResponse; + } catch (e) { + console.error(e); + return response; + } + + return response; +} diff --git a/x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx b/x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx new file mode 100644 index 0000000000000..16a943dc55bcf --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx @@ -0,0 +1,14 @@ +import { RemoteRunnable } from '@langchain/core/runnables/remote'; + +export const newRunnable = (path: string) => { + const remoteChain = new RemoteRunnable({ + url: `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`, + options: { + timeout: 2000000, + headers: { + 'Access-Control-Allow-Origin': '*.sit.estc.dev', + }, + }, + }); + return remoteChain; +}; diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx new file mode 100644 index 0000000000000..eb9589919d2ab --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/app.tsx @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import React from 'react'; +import ReactDOM from 'react-dom'; +import { AppMountParameters } from '@kbn/core/public'; +import { + EuiPageTemplate, + EuiPageSection, + EuiText, + EuiHorizontalRule, + EuiListGroup, +} from '@elastic/eui'; +import { RandomNumberRouteExample } from './random_number_example'; +import { RandomNumberBetweenRouteExample } from './random_number_between_example'; +import { Services } from './services'; +import { PostMessageRouteExample } from './post_message_example'; +import { GetMessageRouteExample } from './get_message_example'; + +type Props = Services; + +function RoutingExplorer({ + fetchRandomNumber, + fetchRandomNumberBetween, + addSuccessToast, + postMessage, + getMessageById, +}: Props) { + return ( + + + +

Routing examples

+
+
+ + + + + + + + + + + + + + + + + +
+ ); +} + +export const renderApp = (props: Props, element: AppMountParameters['element']) => { + ReactDOM.render(, element); + + return () => ReactDOM.unmountComponentAtNode(element); +}; diff --git a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx new file mode 100644 index 0000000000000..27db590329bbb --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx @@ -0,0 +1,72 @@ +import { EuiFlexGroup } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; + +import { buildIntegration, installIntegration } from '@api/services/integrationBuilderService'; +import RoutePaths from '@Constants/routePaths'; +import ActionButton from '@Components/Buttons/ActionButton'; +import GoBackButton from '@Components/Buttons/GoBackButton'; + +const BuildIntegrationButtons = () => { + const integrationBuilderZipFile = useGlobalStore((state) => state.integrationBuilderZipFile); + const packageName = useGlobalStore((state) => state.packageName); + const packageTitle = useGlobalStore((state) => state.packageTitle); + const packageVersion = useGlobalStore((state) => state.packageVersion); + const dataStreamName = useGlobalStore((state) => state.dataStreamName); + const inputTypes = useGlobalStore((state) => state.inputTypes); + const formSamples = useGlobalStore((state) => state.formSamples); + const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); + const docs = useGlobalStore((state) => state.docs); + + const setIntegrationBuilderZipFile = useGlobalStore((state) => state.setIntegrationBuilderZipFile); + const setIntegrationBuilderStepsState = useGlobalStore((state) => state.setIntegrationBuilderStepsState); + + const onBuildClick = async () => { + const req = { + packageName, + packageTitle, + packageVersion, + dataStreamName, + inputTypes, + formSamples, + ingestPipeline, + docs, + }; + const response = await buildIntegration(req); + if (response) { + setIntegrationBuilderZipFile(response); + console.log('Integration built successfully', response.name); + setIntegrationBuilderStepsState('integrationBuilderStep5', 'complete'); + } + }; + + const onDownloadClick = () => { + if (integrationBuilderZipFile) { + const url = window.URL.createObjectURL(integrationBuilderZipFile); + const a = document.createElement('a'); + a.href = url; + a.download = integrationBuilderZipFile.name; + document.body.appendChild(a); + a.click(); + a.remove(); + window.URL.revokeObjectURL(url); + } + }; + + const onInstallClick = async () => { + if (integrationBuilderZipFile) { + installIntegration(integrationBuilderZipFile); + } + console.log('installed'); + }; + + return ( + + + + + + + ); +}; + +export default BuildIntegrationButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx new file mode 100644 index 0000000000000..83f6f4be3735a --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx @@ -0,0 +1,19 @@ +import { EuiButton } from '@elastic/eui'; +import { MouseEventHandler } from 'react'; + +interface ActionButtonProps { + text: string; + onActionClick: MouseEventHandler; + isLoading?: boolean; + isDisabled?: boolean; +} + +const ActionButton = ({ text, onActionClick, isLoading = false, isDisabled = false }: ActionButtonProps) => { + return ( + + {text} + + ); +}; + +export default ActionButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx new file mode 100644 index 0000000000000..3bdfeeefb520b --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx @@ -0,0 +1,41 @@ +import { beforeEach, describe, expect, it } from 'vitest'; +import { act, render, screen } from '@testing-library/react'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import { userEvent } from '@testing-library/user-event'; +import ContinueButton from '@Components/Buttons/ContinueButton'; +import RoutePaths from '@Constants/routePaths'; +import { BrowserRouter as Router } from 'react-router-dom'; + +describe('ContinueButton Tests', () => { + beforeEach(() => { + useGlobalStore.getState().setContinueButtonState('ecsButtonContinue', false); + useGlobalStore.getState().setSelected(RoutePaths.ECS_MAPPING_PATH); + }); + describe('Click', () => { + it('Check State changes', async () => { + useGlobalStore.getState().setSelected(RoutePaths.ECS_MAPPING_PATH); + render( + + + , + ); + const user = userEvent.setup(); + await act(async () => { + await user.click(await screen.getByText('Continue')); + }); + expect(useGlobalStore.getState().selected === RoutePaths.INGEST_PIPELINES_PATH).toBe(true); + }); + }); + describe('Rendering', () => { + it('ContinueButton Render', async () => { + const ecsButtonContinue = useGlobalStore.getState().ecsButtonContinue; + render( + + + , + ); + expect(screen.getByLabelText('continue-button')).toBeDefined(); + expect(screen.getByLabelText('continue-button')).toBeDisabled(); + }); + }); +}); diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx new file mode 100644 index 0000000000000..8370c76f95bbf --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx @@ -0,0 +1,41 @@ +import { EuiButton } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import { useNavigate } from 'react-router-dom'; + +interface ContinueButtonProps { + continuePath: string; + isDisabled: boolean; + currentStep: string; + completeStep: string +} + +const ContinueButton = ({ continuePath, isDisabled, currentStep, completeStep }: ContinueButtonProps) => { + const setSelected = useGlobalStore((state) => state.setSelected); + const setIntegrationBuilderStepsState = useGlobalStore((state) => state.setIntegrationBuilderStepsState); + + const navigate = useNavigate(); + const selectAndNavigate = (path) => { + setSelected(path); + navigate(path); + }; + + const onContinueClick = () => { + selectAndNavigate(continuePath); + setIntegrationBuilderStepsState(completeStep, 'complete'); + setIntegrationBuilderStepsState(currentStep, 'current'); + }; + + return ( + + Continue + + ); +}; + +export default ContinueButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx new file mode 100644 index 0000000000000..a5c3209dec9dc --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx @@ -0,0 +1,26 @@ +import { EuiButton } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import RoutePaths from '@Constants/routePaths'; +import { useNavigate } from 'react-router-dom'; + +interface GoBackButtonProps { + path: RoutePaths; +} + +const GoBackButton = ({ path }: GoBackButtonProps) => { + const setSelected = useGlobalStore((state) => state.setSelected); + const navigate = useNavigate(); + + const onGoBackClick = () => { + setSelected(path); + navigate(-1); + }; + + return ( + + Go Back + + ); +}; + +export default GoBackButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx new file mode 100644 index 0000000000000..9a8f77445a70f --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx @@ -0,0 +1,28 @@ +import { EuiButton } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; + +const ResetButton = () => { + const resetEcsMappingFormState = useGlobalStore((state) => state.resetEcsMappingFormState); + const resetChainItemsState = useGlobalStore((state) => state.resetChainItemsState); + const resetEcsMappingTableState = useGlobalStore((state) => state.resetEcsMappingTableState); + const resetIntegrationBuilderStepsState = useGlobalStore((state) => state.resetIntegrationBuilderStepsState); + const resetContinueButtonState = useGlobalStore((state) => state.resetContinueButtonState); + const resetIsLoadingState = useGlobalStore((state) => state.resetIsLoadingState); + + const onResetClick = () => { + resetEcsMappingFormState(); + resetChainItemsState(); + resetEcsMappingTableState(); + resetIntegrationBuilderStepsState(); + resetContinueButtonState(); + resetIsLoadingState(); + }; + + return ( + + Reset + + ); +}; + +export default ResetButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx new file mode 100644 index 0000000000000..32ad72c34d9c5 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx @@ -0,0 +1,60 @@ +import { useGlobalStore } from '@Stores/useGlobalStore'; +import { EuiFlexGroup } from '@elastic/eui'; +import { getCategorization } from '@Api/services/categorizationService'; +import RoutePaths from '@Constants/routePaths'; + +import ContinueButton from '@Components/Buttons/ContinueButton'; +import ActionButton from '@Components/Buttons/ActionButton'; +import GoBackButton from '@Components/Buttons/GoBackButton'; + +const CategorizationButtons = () => { + const packageName = useGlobalStore((state) => state.packageName); + const dataStreamName = useGlobalStore((state) => state.dataStreamName); + const formSamples = useGlobalStore((state) => state.formSamples); + const categorizationIsLoading = useGlobalStore((state) => state.categorizationIsLoading); + const categorizationButtonContinue = useGlobalStore((state) => state.categorizationButtonContinue); + const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); + const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); + const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); + const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); + + const onCreateCategorizationClick = async () => { + setIsLoadingState('categorizationIsLoading', true); + setIsPortalLoadingState(true); + if (ingestPipeline === undefined) { + setIsLoadingState('categorizationIsLoading', false); + setIsPortalLoadingState(false); + return; + } + const req = { packageName, dataStreamName, formSamples, ingestPipeline }; + const response = await getCategorization(req); + if (response.results.pipeline !== undefined) { + setIntegrationBuilderChainItemsState('ingestPipeline', response.results.pipeline); + setIntegrationBuilderChainItemsState('docs', response.results.docs); + setContinueButtonState('categorizationButtonContinue', true); + } + setIsLoadingState('categorizationIsLoading', false); + setIsPortalLoadingState(false); + }; + + return ( + + + + + + ); +}; + +export default CategorizationButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx new file mode 100644 index 0000000000000..3b464b3e1aae9 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx @@ -0,0 +1,56 @@ +import { EuiFlexGroup } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import { getEcsMapping, formatEcsResponse } from '@Api/services/ecsMappingService'; +import RoutePaths from '@Constants/routePaths'; +import ContinueButton from '@Components/Buttons/ContinueButton'; +import ActionButton from '@Components/Buttons/ActionButton'; +import ResetButton from '@components/Buttons/ResetButton'; + +const EcsButtons = () => { + const packageName = useGlobalStore((state) => state.packageName); + const dataStreamName = useGlobalStore((state) => state.dataStreamName); + const formSamples = useGlobalStore((state) => state.formSamples); + const ecsMappingIsLoading = useGlobalStore((state) => state.ecsMappingIsLoading); + const ecsButtonContinue = useGlobalStore((state) => state.ecsButtonContinue); + const setEcsMappingTableState = useGlobalStore((state) => state.setEcsMappingTableState); + const setEcsMappingTableItemsWithEcs = useGlobalStore((state) => state.setEcsMappingTableItemsWithEcs); + const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); + const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); + const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); + + const onCreateEcsMappingClick = async () => { + setIsLoadingState('ecsMappingIsLoading', true); + setIsPortalLoadingState(true); + const req = { packageName, dataStreamName, formSamples }; + const response = await getEcsMapping(req); + if (response.results.mapping !== undefined) { + setIntegrationBuilderChainItemsState('mapping', response.results.mapping); + setIntegrationBuilderChainItemsState('ingestPipeline', response.results.current_pipeline); + + const formatedEcsTableData = formatEcsResponse(response, packageName, dataStreamName, formSamples); + setEcsMappingTableState(formatedEcsTableData); + + const count = formatedEcsTableData.filter((item) => item.isEcs === true).length; + setEcsMappingTableItemsWithEcs(count); + + setContinueButtonState('ecsButtonContinue', true); + } + setIsLoadingState('ecsMappingIsLoading', false); + setIsPortalLoadingState(false); + }; + return ( + + + + + + ); +}; + +export default EcsButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx new file mode 100644 index 0000000000000..af0d942ac139b --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx @@ -0,0 +1,67 @@ +import { EuiFilePicker, useGeneratedHtmlId } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; + +const EcsFileUpload = () => { + const filePickerId = useGeneratedHtmlId({ prefix: 'filePicker' }); + const addFormSamples = useGlobalStore((state) => state.addFormSamples); + + const onHandleFileChange = (files: FileList | null) => { + if (!files) return; + + Array.from(files).forEach(processFile); + }; + + const processFile = (file: File) => { + if (!file.name.endsWith('.ndjson')) { + console.warn(`Skipped file ${file.name}, unsupported file extension.`); + return; + } + + const reader = new FileReader(); + + reader.onload = handleFileLoad; + reader.onerror = handleFileError; + + reader.readAsText(file); + }; + + const handleFileLoad = (e: ProgressEvent) => { + const text = e.target?.result; + if (typeof text !== 'string') return; + + const validLines = validateAndExtractLines(text); + if (validLines.length === 0) return; + addFormSamples(validLines); + }; + + const validateAndExtractLines = (text: string): string[] => { + const validLines: string[] = []; + text.split('\n').forEach((line, index) => { + try { + if (line.trim()) { + JSON.parse(line); + validLines.push(line); + } + } catch (error) { + console.error(`Error parsing line ${index + 1}: ${line}`, error); + } + }); + return validLines; + }; + + const handleFileError = (e: ProgressEvent) => { + console.error('Failed to read file:', e); + }; + + return ( + + ); +}; + +export default EcsFileUpload; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx new file mode 100644 index 0000000000000..fa97e8a07129b --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx @@ -0,0 +1,135 @@ +import { + EuiForm, + EuiFieldText, + EuiFormRow, + EuiComboBox, + EuiPanel, + EuiFlexGroup, + EuiSpacer, + EuiSelect, + useGeneratedHtmlId, +} from '@elastic/eui'; +import EcsButtons from '@components/Ecs/EcsButtons'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import EcsFileUpload from '@Components/Ecs/EcsFileUpload'; + +const EcsForm = () => { + const packageName = useGlobalStore((state) => state.packageName); + const packageTitle = useGlobalStore((state) => state.packageTitle); + const packageVersion = useGlobalStore((state) => state.packageVersion); + const dataStreamName = useGlobalStore((state) => state.dataStreamName); + const dataStreamTitle = useGlobalStore((state) => state.dataStreamTitle); + const logFormat = useGlobalStore((state) => state.logFormat); + const inputTypes = useGlobalStore((state) => state.inputTypes); + const setEcsMappingFormValue = useGlobalStore((state) => state.setEcsMappingFormValue); + const setEcsMappingFormArrayValue = useGlobalStore((state) => state.setEcsMappingFormArrayValue); + const selectLogId = useGeneratedHtmlId({ prefix: 'log_format' }); + const handleFormStateChange = (key: string, value: string) => { + setEcsMappingFormValue(key, value); + }; + const onInputTypeChange = (selected) => { + setEcsMappingFormArrayValue( + 'inputTypes', + selected.map((item) => item.label), + ); + }; + + return ( + + + + + handleFormStateChange('packageName', e.target.value)} + /> + + + handleFormStateChange('packageTitle', e.target.value)} + /> + + + handleFormStateChange('packageVersion', e.target.value)} + /> + + + handleFormStateChange('dataStreamName', e.target.value)} + /> + + + handleFormStateChange('dataStreamTitle', e.target.value)} + /> + + + handleFormStateChange('logFormat', e.target.value)} + aria-label="log-format-select" + /> + + + ({ label: type }))} + options={[ + { label: 'filestream', content: 'Log File' }, + { label: 'tcp', content: 'TCP' }, + { label: 'udp', content: 'UDP' }, + { label: 'cel', content: 'HTTP API' }, + { label: 'gcp-pubsub', content: 'GCP Pubsub' }, + { label: 'gcs', content: 'Google Cloud Storage' }, + { label: 'http_endpoint', content: 'Incoming HTTP Webhooks' }, + { label: 'journald', content: 'JournalD' }, + { label: 'kafka', content: 'Kafka' }, + { label: 'cloudfoundry', content: 'CloudFoundry' }, + { label: 'aws-cloudwatch', content: 'AWS Cloudwatch' }, + { label: 'aws-s3', content: 'AWS S3' }, + { label: 'azure-blob-storage', content: 'Azure Blob Storage' }, + { label: 'azure-eventhub', content: 'Azure Eventhub' }, + ]} + onChange={onInputTypeChange} + /> + + + + + + + + + + ); +}; + +export default EcsForm; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx new file mode 100644 index 0000000000000..34e383584a7e2 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx @@ -0,0 +1,33 @@ +import { EuiStat, EuiFlexGroup, EuiFlexItem, EuiPanel } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; + +const EcsFormStats = () => { + const sampleCount = useGlobalStore((state) => state.sampleCount); + const uniqueKeysCount = useGlobalStore((state) => state.uniqueKeysCount); + const ecsMappingTableItemsWithEcs = useGlobalStore((state) => state.ecsMappingTableItemsWithEcs); + + return ( + + + + + + + + + + + + + + + + + ); +}; + +export default EcsFormStats; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx new file mode 100644 index 0000000000000..db954ea90423a --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx @@ -0,0 +1,180 @@ +import { + EuiBasicTable, + EuiFlexItem, + EuiButtonEmpty, + EuiPopover, + EuiText, + EuiBasicTableColumn, + EuiInlineEditText, + EuiFlexGroup, + EuiPanel, +} from '@elastic/eui'; +import { useState } from 'react'; +import { getUpdatedPipeline } from '@Api/services/ecsMappingService'; +import { useGlobalStore } from '@Stores/useGlobalStore'; + +const EcsTable = () => { + const packageName = useGlobalStore((state) => state.packageName); + const dataStreamName = useGlobalStore((state) => state.dataStreamName); + const formSamples = useGlobalStore((state) => state.formSamples); + const ecsMappingIsLoading = useGlobalStore((state) => state.ecsMappingIsLoading); + const ecsMappingTablePopoverState = useGlobalStore((state) => state.ecsMappingTablePopoverState); + const ecsMappingTableState = useGlobalStore((state) => state.ecsMappingTableState); + const mapping = useGlobalStore((state) => state.mapping); + const setEcsMappingTablePopoverState = useGlobalStore((state) => state.setEcsMappingTablePopoverState); + const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); + const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); + const updateEcsMappingTableItem = useGlobalStore((state) => state.updateEcsMappingTableItem); + const updateChainItem = useGlobalStore((state) => state.updateChainItem); + const [pageIndex, setPageIndex] = useState(0); + const [pageSize, setPageSize] = useState(10); + const closePopover = (id) => { + setEcsMappingTablePopoverState(id); + }; + + const onSaveDestinationFieldClick = async (id, newDestinationField, sourceField) => { + setIsLoadingState('ecsMappingIsLoading', true); + setContinueButtonState('ecsButtonContinue', false); + updateEcsMappingTableItem(id, newDestinationField); + updateChainItem(`${packageName}.${dataStreamName}.${sourceField}`, newDestinationField, 'mapping'); + + const req = { packageName, dataStreamName, formSamples, mapping }; + const response = await getUpdatedPipeline(req); + if (response.results.mapping !== undefined) { + setIntegrationBuilderChainItemsState('mapping', response.results.mapping); + setContinueButtonState('ecsButtonContinue', true); + } + if (response.results.current_pipeline !== undefined) { + setIntegrationBuilderChainItemsState('ingestPipeline', response.results.current_pipeline); + setContinueButtonState('ecsButtonContinue', true); + } + setIsLoadingState('ecsMappingIsLoading', false); + }; + + const onViewDocumentationButtonClick = (id) => { + setEcsMappingTablePopoverState(id); + }; + const onTableChange = ({ page }) => { + if (page) { + const { index: pageIndex, size: pageSize } = page; + setPageIndex(pageIndex); + setPageSize(pageSize); + } + }; + + const getEcsTablePage = (fields: EcsMappingTableItem[], pageIndex: number, pageSize: number) => { + let pageOfItems; + + if (!pageIndex && !pageSize) { + pageOfItems = fields; + } else { + const startIndex = pageIndex * pageSize; + pageOfItems = fields.slice(startIndex, Math.min(startIndex + pageSize, fields.length)); + } + + return { + pageOfItems, + totalItemCount: fields.length, + }; + }; + + const { pageOfItems, totalItemCount } = getEcsTablePage(ecsMappingTableState, pageIndex, pageSize); + + const pagination = { + pageIndex, + pageSize, + totalItemCount, + pageSizeOptions: [10, 0], + showPerPageOptions: true, + }; + + const columns: Array> = [ + { + field: 'sourceField', + name: 'Source Field', + truncateText: true, + dataType: 'string', + width: '20%', + }, + { + field: 'destinationField', + name: 'Destination Field', + truncateText: true, + width: '20%', + render: (destinationField, item) => { + const label = `destination-field-${item.id}`; + return ( + + onSaveDestinationFieldClick(item.id, newDestinationField, item.sourceField) + } + defaultValue={destinationField} + placeholder="destination.field.name" + /> + ); + }, + }, + { + field: 'isEcs', + name: 'ECS Field', + dataType: 'boolean', + textOnly: true, + width: '5%', + render: (isEcs) => (isEcs ? 'Yes' : 'No'), + }, + { + field: 'exampleValue', + name: 'Example Value', + dataType: 'string', + textOnly: true, + truncateText: true, + width: '15%', + }, + { + field: 'description', + name: 'Documentation', + width: '10%', + render: (description, item) => { + const button = ( + onViewDocumentationButtonClick(item.id)} + > + View Documentation + + ); + return ( + closePopover(item.id)} + > + {description || 'No documentation available'} + + ); + }, + }, + ]; + + return ( + + + + + + + + ); +}; + +export default EcsTable; diff --git a/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx b/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx new file mode 100644 index 0000000000000..e17be1430d695 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx @@ -0,0 +1,19 @@ +import { EuiPageTemplate } from '@elastic/eui'; +import GoBackButton from '@Components/Buttons/GoBackButton'; +import RoutePaths from '@Constants/routePaths'; + +interface EmptyPromptProps { + title: string; + description: string; + goBackPath: RoutePaths; +} + +const EmptyPrompt = ({ title, description, goBackPath }: EmptyPromptProps) => { + return ( + {title}} actions={}> + {description} + + ); +}; + +export default EmptyPrompt; diff --git a/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx b/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx new file mode 100644 index 0000000000000..bd86b792c3a85 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx @@ -0,0 +1,21 @@ +import { EuiPageTemplate } from '@elastic/eui'; +import { useLocation } from 'react-router-dom'; +import IntegrationBuilderSteps from '@Components/IntegrationBuilderSteps/IntegrationBuilderSteps'; +import ProgressPortal from '@Components/Portal/ProgressPortal'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import HeaderTitles from '@Constants/headerTitles'; + +const IntegrationBuilderHeader = () => { + const location = useLocation(); + const isPortalLoading = useGlobalStore((state) => state.isPortalLoading); + const pageTitle = HeaderTitles[location.pathname as keyof typeof HeaderTitles] || 'Unknown Page'; + return ( + <> + + {pageTitle && pageTitle != 'Base Page' && } + {isPortalLoading && } + + ); +}; + +export default IntegrationBuilderHeader; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx new file mode 100644 index 0000000000000..470c23e1717a3 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx @@ -0,0 +1,64 @@ +import { EuiStepsHorizontal, EuiStepsHorizontalProps } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import { useNavigate } from 'react-router-dom'; + +import RoutePaths from '@Constants/routePaths'; + +const IntegrationBuilderSteps = () => { + const step1 = useGlobalStore((state) => state.integrationBuilderStep1); + const step2 = useGlobalStore((state) => state.integrationBuilderStep2); + const step3 = useGlobalStore((state) => state.integrationBuilderStep3); + const step4 = useGlobalStore((state) => state.integrationBuilderStep4); + const step5 = useGlobalStore((state) => state.integrationBuilderStep5); + + const setSelected = useGlobalStore((state) => state.setSelected); + + const navigate = useNavigate(); + + const selectAndNavigate = (path) => { + setSelected(path); + navigate(path); + }; + + const horizontalSteps = [ + { + title: 'ECS Mapping', + status: step1, + onClick: () => { + selectAndNavigate(RoutePaths.ECS_MAPPING_PATH); + }, + }, + { + title: 'Add Categorization', + status: step2, + onClick: () => { + selectAndNavigate(RoutePaths.CATEGORIZATION_PATH); + }, + }, + { + title: 'Add Related Fields', + status: step3, + onClick: () => { + selectAndNavigate(RoutePaths.RELATED_PATH); + }, + }, + { + title: 'View Results', + status: step4, + onClick: () => { + selectAndNavigate(RoutePaths.INTEGRATION_BUILDER_RESULTS_PATH); + }, + }, + { + title: 'Build & Deploy', + status: step5, + onClick: () => { + selectAndNavigate(RoutePaths.INTEGRATION_BUILDER_BUILD_PATH); + }, + }, + ] as EuiStepsHorizontalProps['steps']; + + return ; +}; + +export default IntegrationBuilderSteps; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx new file mode 100644 index 0000000000000..f896b96cd0af4 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx @@ -0,0 +1,32 @@ +import { EuiCodeBlock, EuiAccordion, EuiPanel, useGeneratedHtmlId } from '@elastic/eui'; +import { css } from '@emotion/react'; + +interface DocsResultsProps { + docs: Array; +} + +const DocResults = ({ docs }: DocsResultsProps) => { + const simpleAccordionId = useGeneratedHtmlId({ prefix: 'docs_results' }); + + return ( +
+ div:nth-child(2) { + block-size: auto !important; + } + `} + > + + + {JSON.stringify(docs, null, 2)} + + + +
+ ); +}; + +export default DocResults; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx new file mode 100644 index 0000000000000..395926f4e3e39 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx @@ -0,0 +1,32 @@ +import { EuiCodeBlock, EuiAccordion, EuiPanel, useGeneratedHtmlId } from '@elastic/eui'; +import { css } from '@emotion/react'; + +interface PipelineResultsProps { + pipeline: object; +} + +const PipelineResults = ({ pipeline }: PipelineResultsProps) => { + const simpleAccordionId = useGeneratedHtmlId({ prefix: 'ingest_pipeline_results' }); + + return ( +
+ div:nth-child(2) { + block-size: auto !important; + } + `} + > + + + {JSON.stringify(pipeline, null, 2)} + + + +
+ ); +}; + +export default PipelineResults; diff --git a/x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx b/x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx new file mode 100644 index 0000000000000..a78ea50f8e5c4 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx @@ -0,0 +1,31 @@ +import { EuiLink } from '@elastic/eui'; +import { useNavigate } from 'react-router'; + +const isModifiedEvent = (event) => !!(event.metaKey || event.altKey || event.ctrlKey || event.shiftKey); + +const isLeftClickEvent = (event) => event.button === 0; + +export default function CustomLink({ to, ...props }) { + const navigate = useNavigate(); + + function onClick(event) { + if (event.defaultPrevented) { + return; + } + + // If target prop is set (e.g. to "_blank"), let browser handle link. + if (event.target.getAttribute('target')) { + return; + } + + if (isModifiedEvent(event) || !isLeftClickEvent(event)) { + return; + } + + // Prevent regular link behavior, which causes a browser refresh. + event.preventDefault(); + navigate(to); + } + + return ; +} diff --git a/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx b/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx new file mode 100644 index 0000000000000..df81bd15a0e85 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx @@ -0,0 +1,11 @@ +import { EuiPortal, EuiProgress } from '@elastic/eui'; + +const ProgressPortal = () => { + return ( + + + + ); +}; + +export default ProgressPortal; diff --git a/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx new file mode 100644 index 0000000000000..26971736e969f --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx @@ -0,0 +1,60 @@ +import { useGlobalStore } from '@Stores/useGlobalStore'; +import { EuiFlexGroup } from '@elastic/eui'; +import { getCategorization } from '@Api/services/categorizationService'; +import RoutePaths from '@Constants/routePaths'; + +import ContinueButton from '@Components/Buttons/ContinueButton'; +import ActionButton from '@Components/Buttons/ActionButton'; +import GoBackButton from '@Components/Buttons/GoBackButton'; + +const RelatedButtons = () => { + const packageName = useGlobalStore((state) => state.packageName); + const dataStreamName = useGlobalStore((state) => state.dataStreamName); + const formSamples = useGlobalStore((state) => state.formSamples); + const relatedIsLoading = useGlobalStore((state) => state.relatedIsLoading); + const relatedButtonContinue = useGlobalStore((state) => state.relatedButtonContinue); + const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); + const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); + const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); + const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); + + const onCreateCategorizationClick = async () => { + setIsLoadingState('relatedIsLoading', true); + setIsPortalLoadingState(true); + if (ingestPipeline === undefined) { + setIsLoadingState('relatedIsLoading', false); + setIsPortalLoadingState(false); + return; + } + const req = { packageName, dataStreamName, formSamples, ingestPipeline }; + const response = await getCategorization(req); + if (response.results.pipeline !== undefined) { + setIntegrationBuilderChainItemsState('ingestPipeline', response.results.pipeline); + setIntegrationBuilderChainItemsState('docs', response.results.docs); + setContinueButtonState('relatedButtonContinue', true); + } + setIsLoadingState('relatedIsLoading', false); + setIsPortalLoadingState(false); + }; + + return ( + + + + + + ); +}; + +export default RelatedButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx b/x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx new file mode 100644 index 0000000000000..6e33e7dbe7b21 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx @@ -0,0 +1,79 @@ +import { useNavigate, useLocation } from 'react-router-dom'; +import { useEffect } from 'react'; +import { EuiSideNav, EuiIcon } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import RoutePaths from '@Constants/routePaths'; + +const SideNav = () => { + const navigate = useNavigate(); + const location = useLocation(); + const selected = useGlobalStore((state) => state.selected); + const setSelected = useGlobalStore((state) => state.setSelected); + const selectItem = (name) => { + setSelected(name); + navigate(name); + }; + useEffect(() => { + if (!selected) { + setSelected(location.pathname); + } + }, []); + + return ( + , + id: '0', + href: '/', + items: [ + { + name: 'Integration Builder', + icon: , + id: '0.1', + items: [ + { + name: 'ECS Mapping', + id: '0.1.0', + isSelected: selected === RoutePaths.ECS_MAPPING_PATH, + onClick: () => selectItem(RoutePaths.ECS_MAPPING_PATH), + }, + { + name: 'Add Categorization', + id: '0.1.1', + isSelected: selected === RoutePaths.CATEGORIZATION_PATH, + onClick: () => selectItem(RoutePaths.CATEGORIZATION_PATH), + href: '#', + }, + { + name: 'Add Related Fields', + id: '0.1.2', + isSelected: selected === RoutePaths.RELATED_PATH, + onClick: () => selectItem(RoutePaths.RELATED_PATH), + href: '#', + }, + { + name: 'View Results', + id: '0.1.3', + isSelected: selected === RoutePaths.INTEGRATION_BUILDER_RESULTS_PATH, + onClick: () => selectItem(RoutePaths.INTEGRATION_BUILDER_RESULTS_PATH), + href: '#', + }, + { + name: 'Build & Deploy', + id: '0.1.4', + isSelected: selected === RoutePaths.INTEGRATION_BUILDER_BUILD_PATH, + onClick: () => selectItem(RoutePaths.INTEGRATION_BUILDER_BUILD_PATH), + href: '#', + }, + ], + }, + ], + }, + ]} + /> + ); +}; + +export default SideNav; diff --git a/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx new file mode 100644 index 0000000000000..c1ea0d1becb29 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx @@ -0,0 +1,15 @@ +import { EuiFlexGroup } from '@elastic/eui'; +import ContinueButton from '@Components/Buttons/ContinueButton'; +import GoBackButton from '@Components/Buttons/GoBackButton'; +import RoutePaths from '@Constants/routePaths'; + +const ViewResults = () => { + return ( + + + + + ); +}; + +export default ViewResults; diff --git a/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx b/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx new file mode 100644 index 0000000000000..73cacd4b6a863 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx @@ -0,0 +1,2280 @@ +export const ECSFIELDS = { + '@timestamp': + 'Date/time when the event originated.\nThis is the date/time extracted from the event, typically representing when the event was generated by the source.\nIf the event source has no original timestamp, this value is typically populated by the first time the event was received by the pipeline.\nRequired field for all events.', + 'agent.build.original': + 'Extended build information for the agent.\nThis field is intended to contain any build information that a data source may provide, no specific formatting is required.', + 'agent.ephemeral_id': + 'Ephemeral identifier of this agent (if one exists).\nThis id normally changes across restarts, but `agent.id` does not.', + 'agent.id': 'Unique identifier of this agent (if one exists).\nExample: For Beats this would be beat.id.', + 'agent.name': + 'Custom name of the agent.\nThis is a name that can be given to an agent. This can be helpful if for example two Filebeat instances are running on the same host but a human readable separation is needed on which Filebeat instance data is coming from.', + 'agent.type': + 'Type of the agent.\nThe agent type always stays the same and should be given by the agent used. In case of Filebeat the agent would always be Filebeat also if two Filebeat instances are run on the same machine.', + 'agent.version': 'Version of the agent.', + 'client.address': + 'Some event client addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', + 'client.as.number': + 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', + 'client.as.organization.name': 'Organization name.', + 'client.bytes': 'Bytes sent from the client to the server.', + 'client.domain': + 'The domain name of the client system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', + 'client.geo.city_name': 'City name.', + 'client.geo.continent_code': "Two-letter code representing continent's name.", + 'client.geo.continent_name': 'Name of the continent.', + 'client.geo.country_iso_code': 'Country ISO code.', + 'client.geo.country_name': 'Country name.', + 'client.geo.location': 'Longitude and latitude.', + 'client.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'client.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'client.geo.region_iso_code': 'Region ISO code.', + 'client.geo.region_name': 'Region name.', + 'client.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'client.ip': 'IP address of the client (IPv4 or IPv6).', + 'client.mac': + 'MAC address of the client.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', + 'client.nat.ip': + 'Translated IP of source based NAT sessions (e.g. internal client to internet).\nTypically connections traversing load balancers, firewalls, or routers.', + 'client.nat.port': + 'Translated port of source based NAT sessions (e.g. internal client to internet).\nTypically connections traversing load balancers, firewalls, or routers.', + 'client.packets': 'Packets sent from the client to the server.', + 'client.port': 'Port of the client.', + 'client.registered_domain': + 'The highest registered client domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'client.subdomain': + 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'client.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'client.user.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'client.user.email': 'User email address.', + 'client.user.full_name': "User's full name, if available.", + 'client.user.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'client.user.group.id': 'Unique identifier for the group on the system/platform.', + 'client.user.group.name': 'Name of the group.', + 'client.user.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'client.user.id': 'Unique identifier of the user.', + 'client.user.name': 'Short name or login of the user.', + 'client.user.roles': 'Array of user roles at the time of the event.', + 'cloud.account.id': + 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', + 'cloud.account.name': + 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', + 'cloud.availability_zone': 'Availability zone in which this host, resource, or service is located.', + 'cloud.instance.id': 'Instance ID of the host machine.', + 'cloud.instance.name': 'Instance name of the host machine.', + 'cloud.machine.type': 'Machine type of the host machine.', + 'cloud.origin.account.id': + 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', + 'cloud.origin.account.name': + 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', + 'cloud.origin.availability_zone': 'Availability zone in which this host, resource, or service is located.', + 'cloud.origin.instance.id': 'Instance ID of the host machine.', + 'cloud.origin.instance.name': 'Instance name of the host machine.', + 'cloud.origin.machine.type': 'Machine type of the host machine.', + 'cloud.origin.project.id': 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', + 'cloud.origin.project.name': 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', + 'cloud.origin.provider': 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', + 'cloud.origin.region': 'Region in which this host, resource, or service is located.', + 'cloud.origin.service.name': + 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', + 'cloud.project.id': 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', + 'cloud.project.name': 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', + 'cloud.provider': 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', + 'cloud.region': 'Region in which this host, resource, or service is located.', + 'cloud.service.name': + 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', + 'cloud.target.account.id': + 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', + 'cloud.target.account.name': + 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', + 'cloud.target.availability_zone': 'Availability zone in which this host, resource, or service is located.', + 'cloud.target.instance.id': 'Instance ID of the host machine.', + 'cloud.target.instance.name': 'Instance name of the host machine.', + 'cloud.target.machine.type': 'Machine type of the host machine.', + 'cloud.target.project.id': 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', + 'cloud.target.project.name': 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', + 'cloud.target.provider': 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', + 'cloud.target.region': 'Region in which this host, resource, or service is located.', + 'cloud.target.service.name': + 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', + 'container.cpu.usage': + 'Percent CPU used which is normalized by the number of CPU cores and it ranges from 0 to 1. Scaling factor: 1000.', + 'container.disk.read.bytes': + 'The total number of bytes (gauge) read successfully (aggregated from all disks) since the last metric collection.', + 'container.disk.write.bytes': + 'The total number of bytes (gauge) written successfully (aggregated from all disks) since the last metric collection.', + 'container.id': 'Unique container id.', + 'container.image.hash.all': + 'An array of digests of the image the container was built on. Each digest consists of the hash algorithm and value in this format: `algorithm:value`. Algorithm names should align with the field names in the ECS hash field set.', + 'container.image.name': 'Name of the image the container was built on.', + 'container.image.tag': 'Container image tags.', + 'container.labels': 'Image labels.', + 'container.memory.usage': 'Memory usage percentage and it ranges from 0 to 1. Scaling factor: 1000.', + 'container.name': 'Container name.', + 'container.network.egress.bytes': + 'The number of bytes (gauge) sent out on all network interfaces by the container since the last metric collection.', + 'container.network.ingress.bytes': + 'The number of bytes received (gauge) on all network interfaces by the container since the last metric collection.', + 'container.runtime': 'Runtime managing this container.', + 'data_stream.dataset': + 'The field can contain anything that makes sense to signify the source of the data.\nExamples include `nginx.access`, `prometheus`, `endpoint` etc. For data streams that otherwise fit, but that do not have dataset set we use the value "generic" for the dataset value. `event.dataset` should have the same value as `data_stream.dataset`.\nBeyond the Elasticsearch data stream naming criteria noted above, the `dataset` value has additional restrictions:\n * Must not contain `-`\n * No longer than 100 characters', + 'data_stream.namespace': + 'A user defined namespace. Namespaces are useful to allow grouping of data.\nMany users already organize their indices this way, and the data stream naming scheme now provides this best practice as a default. Many users will populate this field with `default`. If no value is used, it falls back to `default`.\nBeyond the Elasticsearch index naming criteria noted above, `namespace` value has the additional restrictions:\n * Must not contain `-`\n * No longer than 100 characters', + 'data_stream.type': + 'An overarching type for the data stream.\nCurrently allowed values are "logs" and "metrics". We expect to also add "traces" and "synthetics" in the near future.', + 'destination.address': + 'Some event destination addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', + 'destination.as.number': + 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', + 'destination.as.organization.name': 'Organization name.', + 'destination.bytes': 'Bytes sent from the destination to the source.', + 'destination.domain': + 'The domain name of the destination system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', + 'destination.geo.city_name': 'City name.', + 'destination.geo.continent_code': "Two-letter code representing continent's name.", + 'destination.geo.continent_name': 'Name of the continent.', + 'destination.geo.country_iso_code': 'Country ISO code.', + 'destination.geo.country_name': 'Country name.', + 'destination.geo.location': 'Longitude and latitude.', + 'destination.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'destination.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'destination.geo.region_iso_code': 'Region ISO code.', + 'destination.geo.region_name': 'Region name.', + 'destination.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'destination.ip': 'IP address of the destination (IPv4 or IPv6).', + 'destination.mac': + 'MAC address of the destination.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', + 'destination.nat.ip': + 'Translated ip of destination based NAT sessions (e.g. internet to private DMZ)\nTypically used with load balancers, firewalls, or routers.', + 'destination.nat.port': + 'Port the source session is translated to by NAT Device.\nTypically used with load balancers, firewalls, or routers.', + 'destination.packets': 'Packets sent from the destination to the source.', + 'destination.port': 'Port of the destination.', + 'destination.registered_domain': + 'The highest registered destination domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'destination.subdomain': + 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'destination.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'destination.user.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'destination.user.email': 'User email address.', + 'destination.user.full_name': "User's full name, if available.", + 'destination.user.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'destination.user.group.id': 'Unique identifier for the group on the system/platform.', + 'destination.user.group.name': 'Name of the group.', + 'destination.user.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'destination.user.id': 'Unique identifier of the user.', + 'destination.user.name': 'Short name or login of the user.', + 'destination.user.roles': 'Array of user roles at the time of the event.', + 'device.id': + 'The unique identifier of a device. The identifier must not change across application sessions but stay fixed for an instance of a (mobile) device. \nOn iOS, this value must be equal to the vendor identifier (https://developer.apple.com/documentation/uikit/uidevice/1620059-identifierforvendor). On Android, this value must be equal to the Firebase Installation ID or a globally unique UUID which is persisted across sessions in your application.\nFor GDPR and data protection law reasons this identifier should not carry information that would allow to identify a user.', + 'device.manufacturer': 'The vendor name of the device manufacturer.', + 'device.model.identifier': 'The machine readable identifier of the device model.', + 'device.model.name': 'The human readable marketing name of the device model.', + 'dll.code_signature.digest_algorithm': + 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', + 'dll.code_signature.exists': 'Boolean to capture if a signature is present.', + 'dll.code_signature.signing_id': + 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', + 'dll.code_signature.status': + 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', + 'dll.code_signature.subject_name': 'Subject name of the code signer', + 'dll.code_signature.team_id': + 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', + 'dll.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'dll.code_signature.trusted': + 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', + 'dll.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', + 'dll.hash.md5': 'MD5 hash.', + 'dll.hash.sha1': 'SHA1 hash.', + 'dll.hash.sha256': 'SHA256 hash.', + 'dll.hash.sha384': 'SHA384 hash.', + 'dll.hash.sha512': 'SHA512 hash.', + 'dll.hash.ssdeep': 'SSDEEP hash.', + 'dll.hash.tlsh': 'TLSH hash.', + 'dll.name': 'Name of the library.\nThis generally maps to the name of the file on disk.', + 'dll.path': 'Full file path of the library.', + 'dll.pe.architecture': 'CPU architecture target for the file.', + 'dll.pe.company': 'Internal company name of the file, provided at compile-time.', + 'dll.pe.description': 'Internal description of the file, provided at compile-time.', + 'dll.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'dll.pe.go_import_hash': + 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'dll.pe.go_imports': 'List of imported Go language element names and types.', + 'dll.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'dll.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'dll.pe.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'dll.pe.imphash': + 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', + 'dll.pe.import_hash': + 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', + 'dll.pe.imports': 'List of imported element names and types.', + 'dll.pe.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'dll.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'dll.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'dll.pe.pehash': + 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', + 'dll.pe.product': 'Internal product name of the file, provided at compile-time.', + 'dll.pe.sections': + 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', + 'dll.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'dll.pe.sections.name': 'PE Section List name.', + 'dll.pe.sections.physical_size': 'PE Section List physical size.', + 'dll.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'dll.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', + 'dns.answers': + 'An array containing an object for each answer section returned by the server.\nThe main keys that should be present in these objects are defined by ECS. Records that have more information may contain more keys than what ECS defines.\nNot all DNS data sources give all details about DNS answers. At minimum, answer objects must contain the `data` key. If more information is available, map as much of it to ECS as possible, and add any additional fields to the answer objects as custom fields.', + 'dns.answers.class': 'The class of DNS data contained in this resource record.', + 'dns.answers.data': + 'The data describing the resource.\nThe meaning of this data depends on the type and class of the resource record.', + 'dns.answers.name': + "The domain name to which this resource record pertains.\nIf a chain of CNAME is being resolved, each answer's `name` should be the one that corresponds with the answer's `data`. It should not simply be the original `question.name` repeated.", + 'dns.answers.ttl': + 'The time interval in seconds that this resource record may be cached before it should be discarded. Zero values mean that the data should not be cached.', + 'dns.answers.type': 'The type of data contained in this resource record.', + 'dns.header_flags': 'Array of 2 letter DNS header flags.', + 'dns.id': + 'The DNS packet identifier assigned by the program that generated the query. The identifier is copied to the response.', + 'dns.op_code': + 'The DNS operation code that specifies the kind of query in the message. This value is set by the originator of a query and copied into the response.', + 'dns.question.class': 'The class of records being queried.', + 'dns.question.name': + 'The name being queried.\nIf the name field contains non-printable characters (below 32 or above 126), those characters should be represented as escaped base 10 integers (\\DDD). Back slashes and quotes should be escaped. Tabs, carriage returns, and line feeds should be converted to \\t, \\r, and \\n respectively.', + 'dns.question.registered_domain': + 'The highest registered domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'dns.question.subdomain': + 'The subdomain is all of the labels under the registered_domain.\nIf the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'dns.question.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'dns.question.type': 'The type of record being queried.', + 'dns.resolved_ip': + 'Array containing all IPs seen in `answers.data`.\nThe `answers` array can be difficult to use, because of the variety of data formats it can contain. Extracting all IP addresses seen in there to `dns.resolved_ip` makes it possible to index them as IP addresses, and makes them easier to visualize and query for.', + 'dns.response_code': 'The DNS response code.', + 'dns.type': + 'The type of DNS event captured, query or answer.\nIf your source of DNS events only gives you DNS queries, you should only create dns events of type `dns.type:query`.\nIf your source of DNS events gives you answers as well, you should create one event per query (optionally as soon as the query is seen). And a second event containing all query details as well as an array of answers.', + 'ecs.version': + 'ECS version this event conforms to. `ecs.version` is a required field and must exist in all events.\nWhen querying across multiple indices -- which may conform to slightly different ECS versions -- this field lets integrations adjust to the schema version of the events.', + 'email.attachments': 'A list of objects describing the attachment files sent along with an email message.', + 'email.attachments.file.extension': 'Attachment file extension, excluding the leading dot.', + 'email.attachments.file.hash.md5': 'MD5 hash.', + 'email.attachments.file.hash.sha1': 'SHA1 hash.', + 'email.attachments.file.hash.sha256': 'SHA256 hash.', + 'email.attachments.file.hash.sha384': 'SHA384 hash.', + 'email.attachments.file.hash.sha512': 'SHA512 hash.', + 'email.attachments.file.hash.ssdeep': 'SSDEEP hash.', + 'email.attachments.file.hash.tlsh': 'TLSH hash.', + 'email.attachments.file.mime_type': + 'The MIME media type of the attachment.\nThis value will typically be extracted from the `Content-Type` MIME header field.', + 'email.attachments.file.name': 'Name of the attachment file including the file extension.', + 'email.attachments.file.size': 'Attachment file size in bytes.', + 'email.bcc.address': 'The email address of BCC recipient', + 'email.cc.address': 'The email address of CC recipient', + 'email.content_type': 'Information about how the message is to be displayed.\nTypically a MIME type.', + 'email.delivery_timestamp': 'The date and time when the email message was received by the service or client.', + 'email.direction': 'The direction of the message based on the sending and receiving domains.', + 'email.from.address': 'The email address of the sender, typically from the RFC 5322 `From:` header field.', + 'email.local_id': + 'Unique identifier given to the email by the source that created the event.\nIdentifier is not persistent across hops.', + 'email.message_id': + 'Identifier from the RFC 5322 `Message-ID:` email header that refers to a particular email message.', + 'email.origination_timestamp': + 'The date and time the email message was composed. Many email clients will fill in this value automatically when the message is sent by a user.', + 'email.reply_to.address': + 'The address that replies should be delivered to based on the value in the RFC 5322 `Reply-To:` header.', + 'email.sender.address': 'Per RFC 5322, specifies the address responsible for the actual transmission of the message.', + 'email.subject': 'A brief summary of the topic of the message.', + 'email.to.address': 'The email address of recipient', + 'email.x_mailer': 'The name of the application that was used to draft and send the original email message.', + 'error.code': 'Error code describing the error.', + 'error.id': 'Unique identifier for the error.', + 'error.message': 'Error message.', + 'error.stack_trace': 'The stack trace of this error in plain text.', + 'error.type': 'The type of the error, for example the class name of the exception.', + 'event.action': + 'The action captured by the event.\nThis describes the information in the event. It is more specific than `event.category`. Examples are `group-add`, `process-started`, `file-created`. The value is normally defined by the implementer.', + 'event.agent_id_status': + "Agents are normally responsible for populating the `agent.id` field value. If the system receiving events is capable of validating the value based on authentication information for the client then this field can be used to reflect the outcome of that validation.\nFor example if the agent's connection is authenticated with mTLS and the client cert contains the ID of the agent to which the cert was issued then the `agent.id` value in events can be checked against the certificate. If the values match then `event.agent_id_status: verified` is added to the event, otherwise one of the other allowed values should be used.\nIf no validation is performed then the field should be omitted.\nThe allowed values are:\n`verified` - The `agent.id` field value matches expected value obtained from auth metadata.\n`mismatch` - The `agent.id` field value does not match the expected value obtained from auth metadata.\n`missing` - There was no `agent.id` field in the event to validate.\n`auth_metadata_missing` - There was no auth metadata or it was missing information about the agent ID.", + 'event.category': + 'This is one of four ECS Categorization Fields, and indicates the second level in the ECS category hierarchy.\n`event.category` represents the "big buckets" of ECS categories. For example, filtering on `event.category:process` yields all events relating to process activity. This field is closely related to `event.type`, which is used as a subcategory.\nThis field is an array. This will allow proper categorization of some events that fall in multiple categories.', + 'event.code': + 'Identification code for this event, if one exists.\nSome event sources use event codes to identify messages unambiguously, regardless of message language or wording adjustments over time. An example of this is the Windows Event ID.', + 'event.created': + "event.created contains the date/time when the event was first read by an agent, or by your pipeline.\nThis field is distinct from @timestamp in that @timestamp typically contain the time extracted from the original event.\nIn most situations, these two timestamps will be slightly different. The difference can be used to calculate the delay between your source generating an event, and the time when your agent first processed it. This can be used to monitor your agent's or pipeline's ability to keep up with your event source.\nIn case the two timestamps are identical, @timestamp should be used.", + 'event.dataset': + "Name of the dataset.\nIf an event source publishes more than one type of log or events (e.g. access log, error log), the dataset is used to specify which one the event comes from.\nIt's recommended but not required to start the dataset name with the module name, followed by a dot, then the dataset name.", + 'event.duration': + 'Duration of the event in nanoseconds.\nIf event.start and event.end are known this value should be the difference between the end and start time.', + 'event.end': 'event.end contains the date when the event ended or when the activity was last observed.', + 'event.hash': 'Hash (perhaps logstash fingerprint) of raw field to be able to demonstrate log integrity.', + 'event.id': 'Unique ID to describe the event.', + 'event.ingested': + "Timestamp when an event arrived in the central data store.\nThis is different from `@timestamp`, which is when the event originally occurred. It's also different from `event.created`, which is meant to capture the first time an agent saw the event.\nIn normal conditions, assuming no tampering, the timestamps should chronologically look like this: `@timestamp` < `event.created` < `event.ingested`.", + 'event.kind': + 'This is one of four ECS Categorization Fields, and indicates the highest level in the ECS category hierarchy.\n`event.kind` gives high-level information about what type of information the event contains, without being specific to the contents of the event. For example, values of this field distinguish alert events from metric events.\nThe value of this field can be used to inform how these kinds of events should be handled. They may warrant different retention, different access control, it may also help understand whether the data coming in at a regular interval or not.', + 'event.module': + 'Name of the module this data is coming from.\nIf your monitoring agent supports the concept of modules or plugins to process events of a given source (e.g. Apache logs), `event.module` should contain the name of this module.', + 'event.original': + 'Raw text message of entire event. Used to demonstrate log integrity or where the full log message (before splitting it up in multiple parts) may be required, e.g. for reindex.\nThis field is not indexed and doc_values are disabled. It cannot be searched, but it can be retrieved from `_source`. If users wish to override this and index this field, please see `Field data types` in the `Elasticsearch Reference`.', + 'event.outcome': + 'This is one of four ECS Categorization Fields, and indicates the lowest level in the ECS category hierarchy.\n`event.outcome` simply denotes whether the event represents a success or a failure from the perspective of the entity that produced the event.\nNote that when a single transaction is described in multiple events, each event may populate different values of `event.outcome`, according to their perspective.\nAlso note that in the case of a compound event (a single event that contains multiple logical events), this field should be populated with the value that best captures the overall success or failure from the perspective of the event producer.\nFurther note that not all events will have an associated outcome. For example, this field is generally not populated for metric events, events with `event.type:info`, or any events for which an outcome does not make logical sense.', + 'event.provider': + 'Source of the event.\nEvent transports such as Syslog or the Windows Event Log typically mention the source of an event. It can be the name of the software that generated the event (e.g. Sysmon, httpd), or of a subsystem of the operating system (kernel, Microsoft-Windows-Security-Auditing).', + 'event.reason': + 'Reason why this event happened, according to the source.\nThis describes the why of a particular action or outcome captured in the event. Where `event.action` captures the action from the event, `event.reason` describes why that action was taken. For example, a web proxy with an `event.action` which denied the request may also populate `event.reason` with the reason why (e.g. `blocked site`).', + 'event.reference': + 'Reference URL linking to additional information about this event.\nThis URL links to a static definition of this event. Alert events, indicated by `event.kind:alert`, are a common use case for this field.', + 'event.risk_score': + "Risk score or priority of the event (e.g. security solutions). Use your system's original value here.", + 'event.risk_score_norm': + 'Normalized risk score or priority of the event, on a scale of 0 to 100.\nThis is mainly useful if you use more than one system that assigns risk scores, and you want to see a normalized value across all systems.', + 'event.sequence': + 'Sequence number of the event.\nThe sequence number is a value published by some event sources, to make the exact ordering of events unambiguous, regardless of the timestamp precision.', + 'event.severity': + "The numeric severity of the event according to your event source.\nWhat the different severity values mean can be different between sources and use cases. It's up to the implementer to make sure severities are consistent across events from the same source.\nThe Syslog severity belongs in `log.syslog.severity.code`. `event.severity` is meant to represent the severity according to the event source (e.g. firewall, IDS). If the event source does not publish its own severity, you may optionally copy the `log.syslog.severity.code` to `event.severity`.", + 'event.start': 'event.start contains the date when the event started or when the activity was first observed.', + 'event.timezone': + 'This field should be populated when the event\'s timestamp does not include timezone information already (e.g. default Syslog timestamps). It\'s optional otherwise.\nAcceptable timezone formats are: a canonical ID (e.g. "Europe/Amsterdam"), abbreviated (e.g. "EST") or an HH:mm differential (e.g. "-05:00").', + 'event.type': + 'This is one of four ECS Categorization Fields, and indicates the third level in the ECS category hierarchy.\n`event.type` represents a categorization "sub-bucket" that, when used along with the `event.category` field values, enables filtering events down to a level appropriate for single visualization.\nThis field is an array. This will allow proper categorization of some events that fall in multiple event types.', + 'event.url': + 'URL linking to an external system to continue investigation of this event.\nThis URL links to another system where in-depth investigation of the specific occurrence of this event can take place. Alert events, indicated by `event.kind:alert`, are a common use case for this field.', + 'faas.coldstart': 'Boolean value indicating a cold start of a function.', + 'faas.execution': 'The execution ID of the current function execution.', + 'faas.id': + "The unique identifier of a serverless function.\nFor AWS Lambda it's the function ARN (Amazon Resource Name) without a version or alias suffix.", + 'faas.name': 'The name of a serverless function.', + 'faas.trigger.request_id': 'The ID of the trigger request , message, event, etc.', + 'faas.trigger.type': 'The trigger for the function execution.', + 'faas.version': 'The version of a serverless function.', + 'file.accessed': 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', + 'file.attributes': + "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", + 'file.code_signature.digest_algorithm': + 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', + 'file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'file.code_signature.signing_id': + 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', + 'file.code_signature.status': + 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', + 'file.code_signature.subject_name': 'Subject name of the code signer', + 'file.code_signature.team_id': + 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', + 'file.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'file.code_signature.trusted': + 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', + 'file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', + 'file.created': 'File creation time.\nNote that not all filesystems store the creation time.', + 'file.ctime': + 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', + 'file.device': 'Device that is the source of the file.', + 'file.directory': 'Directory where the file is located. It should include the drive letter, when appropriate.', + 'file.drive_letter': + 'Drive letter where the file is located. This field is only relevant on Windows.\nThe value should be uppercase, and not include the colon.', + 'file.elf.architecture': 'Machine architecture of the ELF file.', + 'file.elf.byte_order': 'Byte sequence of ELF file.', + 'file.elf.cpu_type': 'CPU type of the ELF file.', + 'file.elf.creation_date': + "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", + 'file.elf.exports': 'List of exported element names and types.', + 'file.elf.go_import_hash': + 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'file.elf.go_imports': 'List of imported Go language element names and types.', + 'file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.elf.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.elf.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'file.elf.header.class': 'Header class of the ELF file.', + 'file.elf.header.data': 'Data table of the ELF header.', + 'file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'file.elf.header.object_version': '"0x1" for original ELF files.', + 'file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'file.elf.header.type': 'Header type of the ELF file.', + 'file.elf.header.version': 'Version of the ELF header.', + 'file.elf.import_hash': + 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', + 'file.elf.imports': 'List of imported element names and types.', + 'file.elf.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.elf.sections': + 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', + 'file.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.elf.sections.flags': 'ELF Section List flags.', + 'file.elf.sections.name': 'ELF Section List name.', + 'file.elf.sections.physical_offset': 'ELF Section List offset.', + 'file.elf.sections.physical_size': 'ELF Section List physical size.', + 'file.elf.sections.type': 'ELF Section List type.', + 'file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'file.elf.segments': + 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', + 'file.elf.segments.sections': 'ELF object segment sections.', + 'file.elf.segments.type': 'ELF object segment type.', + 'file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'file.elf.telfhash': 'telfhash symbol hash for ELF file.', + 'file.extension': + 'File extension, excluding the leading dot.\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', + 'file.fork_name': + 'A fork is additional data associated with a filesystem object.\nOn Linux, a resource fork is used to store additional data with a filesystem object. A file always has at least one fork for the data portion, and additional forks may exist.\nOn NTFS, this is analogous to an Alternate Data Stream (ADS), and the default data stream for a file is just called $DATA. Zone.Identifier is commonly used by Windows to track contents downloaded from the Internet. An ADS is typically of the form: `C:\\path\\to\\filename.extension:some_fork_name`, and `some_fork_name` is the value that should populate `fork_name`. `filename.extension` should populate `file.name`, and `extension` should populate `file.extension`. The full path, `file.path`, will include the fork name.', + 'file.gid': 'Primary group ID (GID) of the file.', + 'file.group': 'Primary group name of the file.', + 'file.hash.md5': 'MD5 hash.', + 'file.hash.sha1': 'SHA1 hash.', + 'file.hash.sha256': 'SHA256 hash.', + 'file.hash.sha384': 'SHA384 hash.', + 'file.hash.sha512': 'SHA512 hash.', + 'file.hash.ssdeep': 'SSDEEP hash.', + 'file.hash.tlsh': 'TLSH hash.', + 'file.inode': 'Inode representing the file in the filesystem.', + 'file.macho.go_import_hash': + 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'file.macho.go_imports': 'List of imported Go language element names and types.', + 'file.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.macho.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.macho.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'file.macho.import_hash': + 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for symhash.', + 'file.macho.imports': 'List of imported element names and types.', + 'file.macho.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'file.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.macho.sections': + 'An array containing an object for each section of the Mach-O file.\nThe keys that should be present in these objects are defined by sub-fields underneath `macho.sections.*`.', + 'file.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.macho.sections.name': 'Mach-O Section List name.', + 'file.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'file.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.macho.sections.virtual_size': 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'file.macho.symhash': + 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a Mach-O implementation of the Windows PE imphash', + 'file.mime_type': + 'MIME type should identify the format of the file or stream of bytes using https://www.iana.org/assignments/media-types/media-types.xhtml[IANA official types], where possible. When more than one type is applicable, the most specific type should be used.', + 'file.mode': 'Mode of the file in octal representation.', + 'file.mtime': 'Last time the file content was modified.', + 'file.name': 'Name of the file including the extension, without the directory.', + 'file.owner': "File owner's username.", + 'file.path': 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', + 'file.pe.architecture': 'CPU architecture target for the file.', + 'file.pe.company': 'Internal company name of the file, provided at compile-time.', + 'file.pe.description': 'Internal description of the file, provided at compile-time.', + 'file.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'file.pe.go_import_hash': + 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'file.pe.go_imports': 'List of imported Go language element names and types.', + 'file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.pe.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'file.pe.imphash': + 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', + 'file.pe.import_hash': + 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', + 'file.pe.imports': 'List of imported element names and types.', + 'file.pe.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'file.pe.pehash': + 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', + 'file.pe.product': 'Internal product name of the file, provided at compile-time.', + 'file.pe.sections': + 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', + 'file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.pe.sections.name': 'PE Section List name.', + 'file.pe.sections.physical_size': 'PE Section List physical size.', + 'file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', + 'file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', + 'file.target_path': 'Target path for symlinks.', + 'file.type': 'File type (file, dir, or symlink).', + 'file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'file.x509.alternative_names': + 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', + 'file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'file.x509.issuer.country': 'List of country \\(C) codes', + 'file.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'file.x509.issuer.locality': 'List of locality names (L)', + 'file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'file.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', + 'file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'file.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'file.x509.not_before': 'Time at which the certificate is first considered valid.', + 'file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'file.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'file.x509.public_key_size': 'The size of the public key space in bits.', + 'file.x509.serial_number': + 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', + 'file.x509.signature_algorithm': + 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', + 'file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'file.x509.subject.country': 'List of country \\(C) code', + 'file.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'file.x509.subject.locality': 'List of locality names (L)', + 'file.x509.subject.organization': 'List of organizations (O) of subject.', + 'file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'file.x509.version_number': 'Version of x509 format.', + 'group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'group.id': 'Unique identifier for the group on the system/platform.', + 'group.name': 'Name of the group.', + 'host.architecture': 'Operating system architecture.', + 'host.boot.id': + 'Linux boot uuid taken from /proc/sys/kernel/random/boot_id. Note the boot_id value from /proc may or may not be the same in containers as on the host. Some container runtimes will bind mount a new boot_id value onto the proc file in each container.', + 'host.cpu.usage': + 'Percent CPU used which is normalized by the number of CPU cores and it ranges from 0 to 1.\nScaling factor: 1000.\nFor example: For a two core host, this value should be the average of the two cores, between 0 and 1.', + 'host.disk.read.bytes': + 'The total number of bytes (gauge) read successfully (aggregated from all disks) since the last metric collection.', + 'host.disk.write.bytes': + 'The total number of bytes (gauge) written successfully (aggregated from all disks) since the last metric collection.', + 'host.domain': + "Name of the domain of which the host is a member.\nFor example, on Windows this could be the host's Active Directory domain or NetBIOS domain name. For Linux this could be the domain of the host's LDAP provider.", + 'host.geo.city_name': 'City name.', + 'host.geo.continent_code': "Two-letter code representing continent's name.", + 'host.geo.continent_name': 'Name of the continent.', + 'host.geo.country_iso_code': 'Country ISO code.', + 'host.geo.country_name': 'Country name.', + 'host.geo.location': 'Longitude and latitude.', + 'host.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'host.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'host.geo.region_iso_code': 'Region ISO code.', + 'host.geo.region_name': 'Region name.', + 'host.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'host.hostname': + 'Hostname of the host.\nIt normally contains what the `hostname` command returns on the host machine.', + 'host.id': + 'Unique host id.\nAs hostname is not always unique, use values that are meaningful in your environment.\nExample: The current usage of `beat.name`.', + 'host.ip': 'Host ip addresses.', + 'host.mac': + 'Host MAC addresses.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', + 'host.name': + 'Name of the host.\nIt can contain what hostname returns on Unix systems, the fully qualified domain name (FQDN), or a name specified by the user. The recommended value is the lowercase FQDN of the host.', + 'host.network.egress.bytes': + 'The number of bytes (gauge) sent out on all network interfaces by the host since the last metric collection.', + 'host.network.egress.packets': + 'The number of packets (gauge) sent out on all network interfaces by the host since the last metric collection.', + 'host.network.ingress.bytes': + 'The number of bytes received (gauge) on all network interfaces by the host since the last metric collection.', + 'host.network.ingress.packets': + 'The number of packets (gauge) received on all network interfaces by the host since the last metric collection.', + 'host.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'host.os.full': 'Operating system name, including the version or code name.', + 'host.os.kernel': 'Operating system kernel version as a raw string.', + 'host.os.name': 'Operating system name, without the version.', + 'host.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'host.os.type': + "Use the `os.type` field to categorize the operating system into one of the broad commercial families.\nIf the OS you're dealing with is not listed as an expected value, the field should not be populated. Please let us know by opening an issue with ECS, to propose its addition.", + 'host.os.version': 'Operating system version as a raw string.', + 'host.pid_ns_ino': + 'This is the inode number of the namespace in the namespace file system (nsfs). Unsigned int inum in include/linux/ns_common.h.', + 'host.risk.calculated_level': + 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring.', + 'host.risk.calculated_score': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring.', + 'host.risk.calculated_score_norm': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring, and normalized to a range of 0 to 100.', + 'host.risk.static_level': + 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'host.risk.static_score': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'host.risk.static_score_norm': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform, and normalized to a range of 0 to 100.', + 'host.type': + 'Type of host.\nFor Cloud providers this can be the machine type like `t2.medium`. If vm, this could be the container, for example, or other information meaningful in your environment.', + 'host.uptime': 'Seconds the host has been up.', + 'http.request.body.bytes': 'Size in bytes of the request body.', + 'http.request.body.content': 'The full HTTP request body.', + 'http.request.bytes': 'Total size in bytes of the request (body and headers).', + 'http.request.id': + 'A unique identifier for each HTTP request to correlate logs between clients and servers in transactions.\nThe id may be contained in a non-standard HTTP header, such as `X-Request-ID` or `X-Correlation-ID`.', + 'http.request.method': + 'HTTP request method.\nThe value should retain its casing from the original event. For example, `GET`, `get`, and `GeT` are all considered valid values for this field.', + 'http.request.mime_type': + "Mime type of the body of the request.\nThis value must only be populated based on the content of the request body, not on the `Content-Type` header. Comparing the mime type of a request with the request's Content-Type header can be helpful in detecting threats or misconfigured clients.", + 'http.request.referrer': 'Referrer for this HTTP request.', + 'http.response.body.bytes': 'Size in bytes of the response body.', + 'http.response.body.content': 'The full HTTP response body.', + 'http.response.bytes': 'Total size in bytes of the response (body and headers).', + 'http.response.mime_type': + "Mime type of the body of the response.\nThis value must only be populated based on the content of the response body, not on the `Content-Type` header. Comparing the mime type of a response with the response's Content-Type header can be helpful in detecting misconfigured servers.", + 'http.response.status_code': 'HTTP response status code.', + 'http.version': 'HTTP version.', + labels: + 'Custom key/value pairs.\nCan be used to add meta information to events. Should not contain nested objects. All values are stored as keyword.\nExample: `docker` and `k8s` labels.', + 'log.file.path': + "Full path to the log file this event came from, including the file name. It should include the drive letter, when appropriate.\nIf the event wasn't read from a log file, do not populate this field.", + 'log.level': + "Original log level of the log event.\nIf the source of the event provides a log level or textual severity, this is the one that goes in `log.level`. If your source doesn't specify one, you may put your event transport's severity here (e.g. Syslog severity).\nSome examples are `warn`, `err`, `i`, `informational`.", + 'log.logger': + 'The name of the logger inside an application. This is usually the name of the class which initialized the logger, or can be a custom name.', + 'log.origin.file.line': 'The line number of the file containing the source code which originated the log event.', + 'log.origin.file.name': + 'The name of the file containing the source code which originated the log event.\nNote that this field is not meant to capture the log file. The correct field to capture the log file is `log.file.path`.', + 'log.origin.function': 'The name of the function or method which originated the log event.', + 'log.syslog': + 'The Syslog metadata of the event, if the event was transmitted via Syslog. Please see RFCs 5424 or 3164.', + 'log.syslog.appname': 'The device or application that originated the Syslog message, if available.', + 'log.syslog.facility.code': + 'The Syslog numeric facility of the log event, if available.\nAccording to RFCs 5424 and 3164, this value should be an integer between 0 and 23.', + 'log.syslog.facility.name': 'The Syslog text-based facility of the log event, if available.', + 'log.syslog.hostname': + 'The hostname, FQDN, or IP of the machine that originally sent the Syslog message. This is sourced from the hostname field of the syslog header. Depending on the environment, this value may be different from the host that handled the event, especially if the host handling the events is acting as a collector.', + 'log.syslog.msgid': + 'An identifier for the type of Syslog message, if available. Only applicable for RFC 5424 messages.', + 'log.syslog.priority': + 'Syslog numeric priority of the event, if available.\nAccording to RFCs 5424 and 3164, the priority is 8 * facility + severity. This number is therefore expected to contain a value between 0 and 191.', + 'log.syslog.procid': 'The process name or ID that originated the Syslog message, if available.', + 'log.syslog.severity.code': + "The Syslog numeric severity of the log event, if available.\nIf the event source publishing via Syslog provides a different numeric severity value (e.g. firewall, IDS), your source's numeric severity should go to `event.severity`. If the event source does not specify a distinct severity, you can optionally copy the Syslog severity to `event.severity`.", + 'log.syslog.severity.name': + "The Syslog numeric severity of the log event, if available.\nIf the event source publishing via Syslog provides a different severity value (e.g. firewall, IDS), your source's text severity should go to `log.level`. If the event source does not specify a distinct severity, you can optionally copy the Syslog severity to `log.level`.", + 'log.syslog.structured_data': + 'Structured data expressed in RFC 5424 messages, if available. These are key-value pairs formed from the structured data portion of the syslog message, as defined in RFC 5424 Section 6.3.', + 'log.syslog.version': 'The version of the Syslog protocol specification. Only applicable for RFC 5424 messages.', + message: + 'For log events the message field contains the log message, optimized for viewing in a log viewer.\nFor structured logs without an original message field, other fields can be concatenated to form a human-readable summary of the event.\nIf multiple messages exist, they can be combined into one message.', + 'network.application': + "When a specific application or service is identified from network connection details (source/dest IPs, ports, certificates, or wire format), this field captures the application's or service's name.\nFor example, the original event identifies the network connection being from a specific web service in a `https` network connection, like `facebook` or `twitter`.\nThe field value must be normalized to lowercase for querying.", + 'network.bytes': + 'Total bytes transferred in both directions.\nIf `source.bytes` and `destination.bytes` are known, `network.bytes` is their sum.', + 'network.community_id': + 'A hash of source and destination IPs and ports, as well as the protocol used in a communication. This is a tool-agnostic standard to identify flows.\nLearn more at https://github.com/corelight/community-id-spec.', + 'network.direction': + 'Direction of the network traffic.\nWhen mapping events from a host-based monitoring context, populate this field from the host\'s point of view, using the values "ingress" or "egress".\nWhen mapping events from a network or perimeter-based monitoring context, populate this field from the point of view of the network perimeter, using the values "inbound", "outbound", "internal" or "external".\nNote that "internal" is not crossing perimeter boundaries, and is meant to describe communication between two hosts within the perimeter. Note also that "external" is meant to describe traffic between two hosts that are external to the perimeter. This could for example be useful for ISPs or VPN service providers.', + 'network.forwarded_ip': 'Host IP address when the source IP address is the proxy.', + 'network.iana_number': + 'IANA Protocol Number (https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml). Standardized list of protocols. This aligns well with NetFlow and sFlow related logs which use the IANA Protocol Number.', + 'network.inner': + 'Network.inner fields are added in addition to network.vlan fields to describe the innermost VLAN when q-in-q VLAN tagging is present. Allowed fields include vlan.id and vlan.name. Inner vlan fields are typically used when sending traffic with multiple 802.1q encapsulations to a network sensor (e.g. Zeek, Wireshark.)', + 'network.inner.vlan.id': 'VLAN ID as reported by the observer.', + 'network.inner.vlan.name': 'Optional VLAN name as reported by the observer.', + 'network.name': 'Name given by operators to sections of their network.', + 'network.packets': + 'Total packets transferred in both directions.\nIf `source.packets` and `destination.packets` are known, `network.packets` is their sum.', + 'network.protocol': + 'In the OSI Model this would be the Application Layer protocol. For example, `http`, `dns`, or `ssh`.\nThe field value must be normalized to lowercase for querying.', + 'network.transport': + 'Same as network.iana_number, but instead using the Keyword name of the transport layer (udp, tcp, ipv6-icmp, etc.)\nThe field value must be normalized to lowercase for querying.', + 'network.type': + 'In the OSI Model this would be the Network Layer. ipv4, ipv6, ipsec, pim, etc\nThe field value must be normalized to lowercase for querying.', + 'network.vlan.id': 'VLAN ID as reported by the observer.', + 'network.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.egress': + 'Observer.egress holds information like interface number and name, vlan, and zone information to classify egress traffic. Single armed monitoring such as a network sensor on a span port should only use observer.ingress to categorize traffic.', + 'observer.egress.interface.alias': + 'Interface alias as reported by the system, typically used in firewall implementations for e.g. inside, outside, or dmz logical interface naming.', + 'observer.egress.interface.id': 'Interface ID as reported by an observer (typically SNMP interface ID).', + 'observer.egress.interface.name': 'Interface name as reported by the system.', + 'observer.egress.vlan.id': 'VLAN ID as reported by the observer.', + 'observer.egress.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.egress.zone': + 'Network zone of outbound traffic as reported by the observer to categorize the destination area of egress traffic, e.g. Internal, External, DMZ, HR, Legal, etc.', + 'observer.geo.city_name': 'City name.', + 'observer.geo.continent_code': "Two-letter code representing continent's name.", + 'observer.geo.continent_name': 'Name of the continent.', + 'observer.geo.country_iso_code': 'Country ISO code.', + 'observer.geo.country_name': 'Country name.', + 'observer.geo.location': 'Longitude and latitude.', + 'observer.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'observer.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'observer.geo.region_iso_code': 'Region ISO code.', + 'observer.geo.region_name': 'Region name.', + 'observer.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'observer.hostname': 'Hostname of the observer.', + 'observer.ingress': + 'Observer.ingress holds information like interface number and name, vlan, and zone information to classify ingress traffic. Single armed monitoring such as a network sensor on a span port should only use observer.ingress to categorize traffic.', + 'observer.ingress.interface.alias': + 'Interface alias as reported by the system, typically used in firewall implementations for e.g. inside, outside, or dmz logical interface naming.', + 'observer.ingress.interface.id': 'Interface ID as reported by an observer (typically SNMP interface ID).', + 'observer.ingress.interface.name': 'Interface name as reported by the system.', + 'observer.ingress.vlan.id': 'VLAN ID as reported by the observer.', + 'observer.ingress.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.ingress.zone': + 'Network zone of incoming traffic as reported by the observer to categorize the source area of ingress traffic. e.g. internal, External, DMZ, HR, Legal, etc.', + 'observer.ip': 'IP addresses of the observer.', + 'observer.mac': + 'MAC addresses of the observer.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', + 'observer.name': + 'Custom name of the observer.\nThis is a name that can be given to an observer. This can be helpful for example if multiple firewalls of the same model are used in an organization.\nIf no custom name is needed, the field can be left empty.', + 'observer.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'observer.os.full': 'Operating system name, including the version or code name.', + 'observer.os.kernel': 'Operating system kernel version as a raw string.', + 'observer.os.name': 'Operating system name, without the version.', + 'observer.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'observer.os.type': + "Use the `os.type` field to categorize the operating system into one of the broad commercial families.\nIf the OS you're dealing with is not listed as an expected value, the field should not be populated. Please let us know by opening an issue with ECS, to propose its addition.", + 'observer.os.version': 'Operating system version as a raw string.', + 'observer.product': 'The product name of the observer.', + 'observer.serial_number': 'Observer serial number.', + 'observer.type': + 'The type of the observer the data is coming from.\nThere is no predefined list of observer types. Some examples are `forwarder`, `firewall`, `ids`, `ips`, `proxy`, `poller`, `sensor`, `APM server`.', + 'observer.vendor': 'Vendor name of the observer.', + 'observer.version': 'Observer version.', + 'orchestrator.api_version': 'API version being used to carry out the action', + 'orchestrator.cluster.id': 'Unique ID of the cluster.', + 'orchestrator.cluster.name': 'Name of the cluster.', + 'orchestrator.cluster.url': 'URL of the API used to manage the cluster.', + 'orchestrator.cluster.version': 'The version of the cluster.', + 'orchestrator.namespace': 'Namespace in which the action is taking place.', + 'orchestrator.organization': 'Organization affected by the event (for multi-tenant orchestrator setups).', + 'orchestrator.resource.annotation': 'The list of annotations added to the resource.', + 'orchestrator.resource.id': 'Unique ID of the resource being acted upon.', + 'orchestrator.resource.ip': + 'IP address assigned to the resource associated with the event being observed. In the case of a Kubernetes Pod, this array would contain only one element: the IP of the Pod (as opposed to the Node on which the Pod is running).', + 'orchestrator.resource.label': 'The list of labels added to the resource.', + 'orchestrator.resource.name': 'Name of the resource being acted upon.', + 'orchestrator.resource.parent.type': + 'Type or kind of the parent resource associated with the event being observed. In Kubernetes, this will be the name of a built-in workload resource (e.g., Deployment, StatefulSet, DaemonSet).', + 'orchestrator.resource.type': 'Type of resource being acted upon.', + 'orchestrator.type': 'Orchestrator cluster type (e.g. kubernetes, nomad or cloudfoundry).', + 'organization.id': 'Unique identifier for the organization.', + 'organization.name': 'Organization name.', + 'package.architecture': 'Package architecture.', + 'package.build_version': + 'Additional information about the build version of the installed package.\nFor example use the commit SHA of a non-released package.', + 'package.checksum': 'Checksum of the installed package for verification.', + 'package.description': 'Description of the package.', + 'package.install_scope': 'Indicating how the package was installed, e.g. user-local, global.', + 'package.installed': 'Time when package was installed.', + 'package.license': + 'License under which the package was released.\nUse a short name, e.g. the license identifier from SPDX License List where possible (https://spdx.org/licenses/).', + 'package.name': 'Package name', + 'package.path': 'Path where the package is installed.', + 'package.reference': 'Home page or reference URL of the software in this package, if available.', + 'package.size': 'Package size in bytes.', + 'package.type': + 'Type of package.\nThis should contain the package file type, rather than the package manager name. Examples: rpm, dpkg, brew, npm, gem, nupkg, jar.', + 'package.version': 'Package version', + 'process.args': + 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', + 'process.args_count': + 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', + 'process.code_signature.digest_algorithm': + 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', + 'process.code_signature.exists': 'Boolean to capture if a signature is present.', + 'process.code_signature.signing_id': + 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', + 'process.code_signature.status': + 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', + 'process.code_signature.subject_name': 'Subject name of the code signer', + 'process.code_signature.team_id': + 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', + 'process.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'process.code_signature.trusted': + 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', + 'process.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', + 'process.command_line': + 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', + 'process.elf.architecture': 'Machine architecture of the ELF file.', + 'process.elf.byte_order': 'Byte sequence of ELF file.', + 'process.elf.cpu_type': 'CPU type of the ELF file.', + 'process.elf.creation_date': + "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", + 'process.elf.exports': 'List of exported element names and types.', + 'process.elf.go_import_hash': + 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'process.elf.go_imports': 'List of imported Go language element names and types.', + 'process.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'process.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'process.elf.header.class': 'Header class of the ELF file.', + 'process.elf.header.data': 'Data table of the ELF header.', + 'process.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'process.elf.header.object_version': '"0x1" for original ELF files.', + 'process.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'process.elf.header.type': 'Header type of the ELF file.', + 'process.elf.header.version': 'Version of the ELF header.', + 'process.elf.import_hash': + 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', + 'process.elf.imports': 'List of imported element names and types.', + 'process.elf.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'process.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.elf.sections': + 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', + 'process.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'process.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.elf.sections.flags': 'ELF Section List flags.', + 'process.elf.sections.name': 'ELF Section List name.', + 'process.elf.sections.physical_offset': 'ELF Section List offset.', + 'process.elf.sections.physical_size': 'ELF Section List physical size.', + 'process.elf.sections.type': 'ELF Section List type.', + 'process.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'process.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'process.elf.segments': + 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', + 'process.elf.segments.sections': 'ELF object segment sections.', + 'process.elf.segments.type': 'ELF object segment type.', + 'process.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'process.elf.telfhash': 'telfhash symbol hash for ELF file.', + 'process.end': 'The time the process ended.', + 'process.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.entry_leader.args': + 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', + 'process.entry_leader.args_count': + 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', + 'process.entry_leader.attested_groups.name': 'Name of the group.', + 'process.entry_leader.attested_user.id': 'Unique identifier of the user.', + 'process.entry_leader.attested_user.name': 'Short name or login of the user.', + 'process.entry_leader.command_line': + 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', + 'process.entry_leader.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.entry_leader.entry_meta.source.ip': 'IP address of the source (IPv4 or IPv6).', + 'process.entry_leader.entry_meta.type': + 'The entry type for the entry session leader. Values include: init(e.g systemd), sshd, ssm, kubelet, teleport, terminal, console\nNote: This field is only set on process.session_leader.', + 'process.entry_leader.executable': 'Absolute path to the process executable.', + 'process.entry_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.group.name': 'Name of the group.', + 'process.entry_leader.interactive': + 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', + 'process.entry_leader.name': 'Process name.\nSometimes called program name or similar.', + 'process.entry_leader.parent.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.entry_leader.parent.pid': 'Process id.', + 'process.entry_leader.parent.session_leader.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.entry_leader.parent.session_leader.pid': 'Process id.', + 'process.entry_leader.parent.session_leader.start': 'The time the process started.', + 'process.entry_leader.parent.start': 'The time the process started.', + 'process.entry_leader.pid': 'Process id.', + 'process.entry_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.real_group.name': 'Name of the group.', + 'process.entry_leader.real_user.id': 'Unique identifier of the user.', + 'process.entry_leader.real_user.name': 'Short name or login of the user.', + 'process.entry_leader.same_as_process': + "This boolean is used to identify if a leader process is the same as the top level process.\nFor example, if `process.group_leader.same_as_process = true`, it means the process event in question is the leader of its process group. Details under `process.*` like `pid` would be the same under `process.group_leader.*` The same applies for both `process.session_leader` and `process.entry_leader`.\nThis field exists to the benefit of EQL and other rule engines since it's not possible to compare equality between two fields in a single document. e.g `process.entity_id` = `process.group_leader.entity_id` (top level process is the process group leader) OR `process.entity_id` = `process.entry_leader.entity_id` (top level process is the entry session leader)\nInstead these rules could be written like: `process.group_leader.same_as_process: true` OR `process.entry_leader.same_as_process: true`\nNote: This field is only set on `process.entry_leader`, `process.session_leader` and `process.group_leader`.", + 'process.entry_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.saved_group.name': 'Name of the group.', + 'process.entry_leader.saved_user.id': 'Unique identifier of the user.', + 'process.entry_leader.saved_user.name': 'Short name or login of the user.', + 'process.entry_leader.start': 'The time the process started.', + 'process.entry_leader.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.supplemental_groups.name': 'Name of the group.', + 'process.entry_leader.tty': + 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', + 'process.entry_leader.tty.char_device.major': + 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', + 'process.entry_leader.tty.char_device.minor': + 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', + 'process.entry_leader.user.id': 'Unique identifier of the user.', + 'process.entry_leader.user.name': 'Short name or login of the user.', + 'process.entry_leader.working_directory': 'The working directory of the process.', + 'process.env_vars': + 'Array of environment variable bindings. Captured from a snapshot of the environment at the time of execution.\nMay be filtered to protect sensitive information.', + 'process.executable': 'Absolute path to the process executable.', + 'process.exit_code': + 'The exit code of the process, if this is a termination event.\nThe field should be absent if there is no exit code for the event (e.g. process start).', + 'process.group_leader.args': + 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', + 'process.group_leader.args_count': + 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', + 'process.group_leader.command_line': + 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', + 'process.group_leader.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.group_leader.executable': 'Absolute path to the process executable.', + 'process.group_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.group.name': 'Name of the group.', + 'process.group_leader.interactive': + 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', + 'process.group_leader.name': 'Process name.\nSometimes called program name or similar.', + 'process.group_leader.pid': 'Process id.', + 'process.group_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.real_group.name': 'Name of the group.', + 'process.group_leader.real_user.id': 'Unique identifier of the user.', + 'process.group_leader.real_user.name': 'Short name or login of the user.', + 'process.group_leader.same_as_process': + "This boolean is used to identify if a leader process is the same as the top level process.\nFor example, if `process.group_leader.same_as_process = true`, it means the process event in question is the leader of its process group. Details under `process.*` like `pid` would be the same under `process.group_leader.*` The same applies for both `process.session_leader` and `process.entry_leader`.\nThis field exists to the benefit of EQL and other rule engines since it's not possible to compare equality between two fields in a single document. e.g `process.entity_id` = `process.group_leader.entity_id` (top level process is the process group leader) OR `process.entity_id` = `process.entry_leader.entity_id` (top level process is the entry session leader)\nInstead these rules could be written like: `process.group_leader.same_as_process: true` OR `process.entry_leader.same_as_process: true`\nNote: This field is only set on `process.entry_leader`, `process.session_leader` and `process.group_leader`.", + 'process.group_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.saved_group.name': 'Name of the group.', + 'process.group_leader.saved_user.id': 'Unique identifier of the user.', + 'process.group_leader.saved_user.name': 'Short name or login of the user.', + 'process.group_leader.start': 'The time the process started.', + 'process.group_leader.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.supplemental_groups.name': 'Name of the group.', + 'process.group_leader.tty': + 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', + 'process.group_leader.tty.char_device.major': + 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', + 'process.group_leader.tty.char_device.minor': + 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', + 'process.group_leader.user.id': 'Unique identifier of the user.', + 'process.group_leader.user.name': 'Short name or login of the user.', + 'process.group_leader.working_directory': 'The working directory of the process.', + 'process.hash.md5': 'MD5 hash.', + 'process.hash.sha1': 'SHA1 hash.', + 'process.hash.sha256': 'SHA256 hash.', + 'process.hash.sha384': 'SHA384 hash.', + 'process.hash.sha512': 'SHA512 hash.', + 'process.hash.ssdeep': 'SSDEEP hash.', + 'process.hash.tlsh': 'TLSH hash.', + 'process.interactive': + 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', + 'process.io': + 'A chunk of input or output (IO) from a single process.\nThis field only appears on the top level process object, which is the process that wrote the output or read the input.', + 'process.io.bytes_skipped': 'An array of byte offsets and lengths denoting where IO data has been skipped.', + 'process.io.bytes_skipped.length': 'The length of bytes skipped.', + 'process.io.bytes_skipped.offset': + "The byte offset into this event's io.text (or io.bytes in the future) where length bytes were skipped.", + 'process.io.max_bytes_per_process_exceeded': + 'If true, the process producing the output has exceeded the max_kilobytes_per_process configuration setting.', + 'process.io.text': + 'A chunk of output or input sanitized to UTF-8.\nBest efforts are made to ensure complete lines are captured in these events. Assumptions should NOT be made that multiple lines will appear in the same event. TTY output may contain terminal control codes such as for cursor movement, so some string queries may not match due to terminal codes inserted between characters of a word.', + 'process.io.total_bytes_captured': 'The total number of bytes captured in this event.', + 'process.io.total_bytes_skipped': + 'The total number of bytes that were not captured due to implementation restrictions such as buffer size limits. Implementors should strive to ensure this value is always zero', + 'process.io.type': + "The type of object on which the IO action (read or write) was taken.\nCurrently only 'tty' is supported. Other types may be added in the future for 'file' and 'socket' support.", + 'process.macho.go_import_hash': + 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'process.macho.go_imports': 'List of imported Go language element names and types.', + 'process.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'process.macho.import_hash': + 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for symhash.', + 'process.macho.imports': 'List of imported element names and types.', + 'process.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.macho.sections': + 'An array containing an object for each section of the Mach-O file.\nThe keys that should be present in these objects are defined by sub-fields underneath `macho.sections.*`.', + 'process.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.macho.sections.name': 'Mach-O Section List name.', + 'process.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'process.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'process.macho.symhash': + 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a Mach-O implementation of the Windows PE imphash', + 'process.name': 'Process name.\nSometimes called program name or similar.', + 'process.parent.args': + 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', + 'process.parent.args_count': + 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', + 'process.parent.code_signature.digest_algorithm': + 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', + 'process.parent.code_signature.exists': 'Boolean to capture if a signature is present.', + 'process.parent.code_signature.signing_id': + 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', + 'process.parent.code_signature.status': + 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', + 'process.parent.code_signature.subject_name': 'Subject name of the code signer', + 'process.parent.code_signature.team_id': + 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', + 'process.parent.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'process.parent.code_signature.trusted': + 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', + 'process.parent.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', + 'process.parent.command_line': + 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', + 'process.parent.elf.architecture': 'Machine architecture of the ELF file.', + 'process.parent.elf.byte_order': 'Byte sequence of ELF file.', + 'process.parent.elf.cpu_type': 'CPU type of the ELF file.', + 'process.parent.elf.creation_date': + "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", + 'process.parent.elf.exports': 'List of exported element names and types.', + 'process.parent.elf.go_import_hash': + 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'process.parent.elf.go_imports': 'List of imported Go language element names and types.', + 'process.parent.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.elf.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'process.parent.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'process.parent.elf.header.class': 'Header class of the ELF file.', + 'process.parent.elf.header.data': 'Data table of the ELF header.', + 'process.parent.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'process.parent.elf.header.object_version': '"0x1" for original ELF files.', + 'process.parent.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'process.parent.elf.header.type': 'Header type of the ELF file.', + 'process.parent.elf.header.version': 'Version of the ELF header.', + 'process.parent.elf.import_hash': + 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', + 'process.parent.elf.imports': 'List of imported element names and types.', + 'process.parent.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.elf.sections': + 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', + 'process.parent.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'process.parent.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.elf.sections.flags': 'ELF Section List flags.', + 'process.parent.elf.sections.name': 'ELF Section List name.', + 'process.parent.elf.sections.physical_offset': 'ELF Section List offset.', + 'process.parent.elf.sections.physical_size': 'ELF Section List physical size.', + 'process.parent.elf.sections.type': 'ELF Section List type.', + 'process.parent.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.parent.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'process.parent.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'process.parent.elf.segments': + 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', + 'process.parent.elf.segments.sections': 'ELF object segment sections.', + 'process.parent.elf.segments.type': 'ELF object segment type.', + 'process.parent.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'process.parent.elf.telfhash': 'telfhash symbol hash for ELF file.', + 'process.parent.end': 'The time the process ended.', + 'process.parent.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.parent.executable': 'Absolute path to the process executable.', + 'process.parent.exit_code': + 'The exit code of the process, if this is a termination event.\nThe field should be absent if there is no exit code for the event (e.g. process start).', + 'process.parent.group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.group.name': 'Name of the group.', + 'process.parent.group_leader.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.parent.group_leader.pid': 'Process id.', + 'process.parent.group_leader.start': 'The time the process started.', + 'process.parent.hash.md5': 'MD5 hash.', + 'process.parent.hash.sha1': 'SHA1 hash.', + 'process.parent.hash.sha256': 'SHA256 hash.', + 'process.parent.hash.sha384': 'SHA384 hash.', + 'process.parent.hash.sha512': 'SHA512 hash.', + 'process.parent.hash.ssdeep': 'SSDEEP hash.', + 'process.parent.hash.tlsh': 'TLSH hash.', + 'process.parent.interactive': + 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', + 'process.parent.macho.go_import_hash': + 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'process.parent.macho.go_imports': 'List of imported Go language element names and types.', + 'process.parent.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.macho.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'process.parent.macho.import_hash': + 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for symhash.', + 'process.parent.macho.imports': 'List of imported element names and types.', + 'process.parent.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.macho.sections': + 'An array containing an object for each section of the Mach-O file.\nThe keys that should be present in these objects are defined by sub-fields underneath `macho.sections.*`.', + 'process.parent.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.macho.sections.name': 'Mach-O Section List name.', + 'process.parent.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'process.parent.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.parent.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'process.parent.macho.symhash': + 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a Mach-O implementation of the Windows PE imphash', + 'process.parent.name': 'Process name.\nSometimes called program name or similar.', + 'process.parent.pe.architecture': 'CPU architecture target for the file.', + 'process.parent.pe.company': 'Internal company name of the file, provided at compile-time.', + 'process.parent.pe.description': 'Internal description of the file, provided at compile-time.', + 'process.parent.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'process.parent.pe.go_import_hash': + 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'process.parent.pe.go_imports': 'List of imported Go language element names and types.', + 'process.parent.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.pe.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'process.parent.pe.imphash': + 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', + 'process.parent.pe.import_hash': + 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', + 'process.parent.pe.imports': 'List of imported element names and types.', + 'process.parent.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'process.parent.pe.pehash': + 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', + 'process.parent.pe.product': 'Internal product name of the file, provided at compile-time.', + 'process.parent.pe.sections': + 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', + 'process.parent.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.pe.sections.name': 'PE Section List name.', + 'process.parent.pe.sections.physical_size': 'PE Section List physical size.', + 'process.parent.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.parent.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'process.parent.pgid': + 'Deprecated for removal in next major version release. This field is superseded by `process.group_leader.pid`.\nIdentifier of the group of processes the process belongs to.', + 'process.parent.pid': 'Process id.', + 'process.parent.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.real_group.name': 'Name of the group.', + 'process.parent.real_user.id': 'Unique identifier of the user.', + 'process.parent.real_user.name': 'Short name or login of the user.', + 'process.parent.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.saved_group.name': 'Name of the group.', + 'process.parent.saved_user.id': 'Unique identifier of the user.', + 'process.parent.saved_user.name': 'Short name or login of the user.', + 'process.parent.start': 'The time the process started.', + 'process.parent.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.supplemental_groups.name': 'Name of the group.', + 'process.parent.thread.id': 'Thread ID.', + 'process.parent.thread.name': 'Thread name.', + 'process.parent.title': + 'Process title.\nThe proctitle, some times the same as process name. Can also be different: for example a browser setting its title to the web page currently opened.', + 'process.parent.tty': + 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', + 'process.parent.tty.char_device.major': + 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', + 'process.parent.tty.char_device.minor': + 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', + 'process.parent.uptime': 'Seconds the process has been up.', + 'process.parent.user.id': 'Unique identifier of the user.', + 'process.parent.user.name': 'Short name or login of the user.', + 'process.parent.working_directory': 'The working directory of the process.', + 'process.pe.architecture': 'CPU architecture target for the file.', + 'process.pe.company': 'Internal company name of the file, provided at compile-time.', + 'process.pe.description': 'Internal description of the file, provided at compile-time.', + 'process.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'process.pe.go_import_hash': + 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'process.pe.go_imports': 'List of imported Go language element names and types.', + 'process.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.pe.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'process.pe.imphash': + 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', + 'process.pe.import_hash': + 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', + 'process.pe.imports': 'List of imported element names and types.', + 'process.pe.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'process.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'process.pe.pehash': + 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', + 'process.pe.product': 'Internal product name of the file, provided at compile-time.', + 'process.pe.sections': + 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', + 'process.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.pe.sections.name': 'PE Section List name.', + 'process.pe.sections.physical_size': 'PE Section List physical size.', + 'process.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', + 'process.pgid': + 'Deprecated for removal in next major version release. This field is superseded by `process.group_leader.pid`.\nIdentifier of the group of processes the process belongs to.', + 'process.pid': 'Process id.', + 'process.previous.args': + 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', + 'process.previous.args_count': + 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', + 'process.previous.executable': 'Absolute path to the process executable.', + 'process.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.real_group.name': 'Name of the group.', + 'process.real_user.id': 'Unique identifier of the user.', + 'process.real_user.name': 'Short name or login of the user.', + 'process.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.saved_group.name': 'Name of the group.', + 'process.saved_user.id': 'Unique identifier of the user.', + 'process.saved_user.name': 'Short name or login of the user.', + 'process.session_leader.args': + 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', + 'process.session_leader.args_count': + 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', + 'process.session_leader.command_line': + 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', + 'process.session_leader.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.session_leader.executable': 'Absolute path to the process executable.', + 'process.session_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.group.name': 'Name of the group.', + 'process.session_leader.interactive': + 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', + 'process.session_leader.name': 'Process name.\nSometimes called program name or similar.', + 'process.session_leader.parent.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.session_leader.parent.pid': 'Process id.', + 'process.session_leader.parent.session_leader.entity_id': + 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', + 'process.session_leader.parent.session_leader.pid': 'Process id.', + 'process.session_leader.parent.session_leader.start': 'The time the process started.', + 'process.session_leader.parent.start': 'The time the process started.', + 'process.session_leader.pid': 'Process id.', + 'process.session_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.real_group.name': 'Name of the group.', + 'process.session_leader.real_user.id': 'Unique identifier of the user.', + 'process.session_leader.real_user.name': 'Short name or login of the user.', + 'process.session_leader.same_as_process': + "This boolean is used to identify if a leader process is the same as the top level process.\nFor example, if `process.group_leader.same_as_process = true`, it means the process event in question is the leader of its process group. Details under `process.*` like `pid` would be the same under `process.group_leader.*` The same applies for both `process.session_leader` and `process.entry_leader`.\nThis field exists to the benefit of EQL and other rule engines since it's not possible to compare equality between two fields in a single document. e.g `process.entity_id` = `process.group_leader.entity_id` (top level process is the process group leader) OR `process.entity_id` = `process.entry_leader.entity_id` (top level process is the entry session leader)\nInstead these rules could be written like: `process.group_leader.same_as_process: true` OR `process.entry_leader.same_as_process: true`\nNote: This field is only set on `process.entry_leader`, `process.session_leader` and `process.group_leader`.", + 'process.session_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.saved_group.name': 'Name of the group.', + 'process.session_leader.saved_user.id': 'Unique identifier of the user.', + 'process.session_leader.saved_user.name': 'Short name or login of the user.', + 'process.session_leader.start': 'The time the process started.', + 'process.session_leader.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.supplemental_groups.name': 'Name of the group.', + 'process.session_leader.tty': + 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', + 'process.session_leader.tty.char_device.major': + 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', + 'process.session_leader.tty.char_device.minor': + 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', + 'process.session_leader.user.id': 'Unique identifier of the user.', + 'process.session_leader.user.name': 'Short name or login of the user.', + 'process.session_leader.working_directory': 'The working directory of the process.', + 'process.start': 'The time the process started.', + 'process.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.supplemental_groups.name': 'Name of the group.', + 'process.thread.id': 'Thread ID.', + 'process.thread.name': 'Thread name.', + 'process.title': + 'Process title.\nThe proctitle, some times the same as process name. Can also be different: for example a browser setting its title to the web page currently opened.', + 'process.tty': 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', + 'process.tty.char_device.major': + 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', + 'process.tty.char_device.minor': + 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', + 'process.tty.columns': + "The number of character columns per line. e.g terminal width\nTerminal sizes can change, so this value reflects the maximum value for a given IO event. i.e. where event.action = 'text_output'", + 'process.tty.rows': + "The number of character rows in the terminal. e.g terminal height\nTerminal sizes can change, so this value reflects the maximum value for a given IO event. i.e. where event.action = 'text_output'", + 'process.uptime': 'Seconds the process has been up.', + 'process.user.id': 'Unique identifier of the user.', + 'process.user.name': 'Short name or login of the user.', + 'process.working_directory': 'The working directory of the process.', + 'registry.data.bytes': + 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', + 'registry.data.strings': + 'Content when writing string types.\nPopulated as an array when writing string data to the registry. For single string registry types (REG_SZ, REG_EXPAND_SZ), this should be an array with one string. For sequences of string with REG_MULTI_SZ, this array will be variable length. For numeric data, such as REG_DWORD and REG_QWORD, this should be populated with the decimal representation (e.g `"1"`).', + 'registry.data.type': 'Standard registry type for encoding contents', + 'registry.hive': 'Abbreviated name for the hive.', + 'registry.key': 'Hive-relative path of keys.', + 'registry.path': 'Full path, including hive, key and value', + 'registry.value': 'Name of the value written.', + 'related.hash': + "All the hashes seen on your event. Populating this field, then using it to search for hashes can help in situations where you're unsure what the hash algorithm is (and therefore which key name to search).", + 'related.hosts': + 'All hostnames or other host identifiers seen on your event. Example identifiers include FQDNs, domain names, workstation names, or aliases.', + 'related.ip': 'All of the IPs seen on your event.', + 'related.user': 'All the user names or other user identifiers seen on the event.', + 'rule.author': + 'Name, organization, or pseudonym of the author or authors who created the rule used to generate this event.', + 'rule.category': 'A categorization value keyword used by the entity using the rule for detection of this event.', + 'rule.description': 'The description of the rule generating the event.', + 'rule.id': + 'A rule ID that is unique within the scope of an agent, observer, or other entity using the rule for detection of this event.', + 'rule.license': 'Name of the license under which the rule used to generate this event is made available.', + 'rule.name': 'The name of the rule or signature generating the event.', + 'rule.reference': + "Reference URL to additional information about the rule used to generate this event.\nThe URL can point to the vendor's documentation about the rule. If that's not available, it can also be a link to a more general page describing this type of alert.", + 'rule.ruleset': + 'Name of the ruleset, policy, group, or parent category in which the rule used to generate this event is a member.', + 'rule.uuid': + 'A rule ID that is unique within the scope of a set or group of agents, observers, or other entities using the rule for detection of this event.', + 'rule.version': 'The version / revision of the rule being used for analysis.', + 'server.address': + 'Some event server addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', + 'server.as.number': + 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', + 'server.as.organization.name': 'Organization name.', + 'server.bytes': 'Bytes sent from the server to the client.', + 'server.domain': + 'The domain name of the server system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', + 'server.geo.city_name': 'City name.', + 'server.geo.continent_code': "Two-letter code representing continent's name.", + 'server.geo.continent_name': 'Name of the continent.', + 'server.geo.country_iso_code': 'Country ISO code.', + 'server.geo.country_name': 'Country name.', + 'server.geo.location': 'Longitude and latitude.', + 'server.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'server.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'server.geo.region_iso_code': 'Region ISO code.', + 'server.geo.region_name': 'Region name.', + 'server.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'server.ip': 'IP address of the server (IPv4 or IPv6).', + 'server.mac': + 'MAC address of the server.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', + 'server.nat.ip': + 'Translated ip of destination based NAT sessions (e.g. internet to private DMZ)\nTypically used with load balancers, firewalls, or routers.', + 'server.nat.port': + 'Translated port of destination based NAT sessions (e.g. internet to private DMZ)\nTypically used with load balancers, firewalls, or routers.', + 'server.packets': 'Packets sent from the server to the client.', + 'server.port': 'Port of the server.', + 'server.registered_domain': + 'The highest registered server domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'server.subdomain': + 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'server.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'server.user.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'server.user.email': 'User email address.', + 'server.user.full_name': "User's full name, if available.", + 'server.user.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'server.user.group.id': 'Unique identifier for the group on the system/platform.', + 'server.user.group.name': 'Name of the group.', + 'server.user.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'server.user.id': 'Unique identifier of the user.', + 'server.user.name': 'Short name or login of the user.', + 'server.user.roles': 'Array of user roles at the time of the event.', + 'service.address': + 'Address where data about this service was collected from.\nThis should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets).', + 'service.environment': + 'Identifies the environment where the service is running.\nIf the same service runs in different environments (production, staging, QA, development, etc.), the environment can identify other instances of the same service. Can also group services and applications from the same environment.', + 'service.ephemeral_id': + 'Ephemeral identifier of this service (if one exists).\nThis id normally changes across restarts, but `service.id` does not.', + 'service.id': + 'Unique identifier of the running service. If the service is comprised of many nodes, the `service.id` should be the same for all nodes.\nThis id should uniquely identify the service. This makes it possible to correlate logs and metrics for one specific service, no matter which particular node emitted the event.\nNote that if you need to see the events from one specific host of the service, you should filter on that `host.name` or `host.id` instead.', + 'service.name': + 'Name of the service data is collected from.\nThe name of the service is normally user given. This allows for distributed services that run on multiple hosts to correlate the related instances based on the name.\nIn the case of Elasticsearch the `service.name` could contain the cluster name. For Beats the `service.name` is by default a copy of the `service.type` field if no name is specified.', + 'service.node.name': + "Name of a service node.\nThis allows for two nodes of the same service running on the same host to be differentiated. Therefore, `service.node.name` should typically be unique across nodes of a given service.\nIn the case of Elasticsearch, the `service.node.name` could contain the unique node name within the Elasticsearch cluster. In cases where the service doesn't have the concept of a node name, the host name or container name can be used to distinguish running instances that make up this service. If those do not provide uniqueness (e.g. multiple instances of the service running on the same host) - the node name can be manually set.", + 'service.node.role': + 'Deprecated for removal in next major version release. This field will be superseded by `node.roles`.\nRole of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks`.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data`.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', + 'service.node.roles': + 'Roles of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks` or both.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data` or both.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', + 'service.origin.address': + 'Address where data about this service was collected from.\nThis should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets).', + 'service.origin.environment': + 'Identifies the environment where the service is running.\nIf the same service runs in different environments (production, staging, QA, development, etc.), the environment can identify other instances of the same service. Can also group services and applications from the same environment.', + 'service.origin.ephemeral_id': + 'Ephemeral identifier of this service (if one exists).\nThis id normally changes across restarts, but `service.id` does not.', + 'service.origin.id': + 'Unique identifier of the running service. If the service is comprised of many nodes, the `service.id` should be the same for all nodes.\nThis id should uniquely identify the service. This makes it possible to correlate logs and metrics for one specific service, no matter which particular node emitted the event.\nNote that if you need to see the events from one specific host of the service, you should filter on that `host.name` or `host.id` instead.', + 'service.origin.name': + 'Name of the service data is collected from.\nThe name of the service is normally user given. This allows for distributed services that run on multiple hosts to correlate the related instances based on the name.\nIn the case of Elasticsearch the `service.name` could contain the cluster name. For Beats the `service.name` is by default a copy of the `service.type` field if no name is specified.', + 'service.origin.node.name': + "Name of a service node.\nThis allows for two nodes of the same service running on the same host to be differentiated. Therefore, `service.node.name` should typically be unique across nodes of a given service.\nIn the case of Elasticsearch, the `service.node.name` could contain the unique node name within the Elasticsearch cluster. In cases where the service doesn't have the concept of a node name, the host name or container name can be used to distinguish running instances that make up this service. If those do not provide uniqueness (e.g. multiple instances of the service running on the same host) - the node name can be manually set.", + 'service.origin.node.role': + 'Deprecated for removal in next major version release. This field will be superseded by `node.roles`.\nRole of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks`.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data`.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', + 'service.origin.node.roles': + 'Roles of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks` or both.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data` or both.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', + 'service.origin.state': 'Current state of the service.', + 'service.origin.type': + 'The type of the service data is collected from.\nThe type can be used to group and correlate logs and metrics from one service type.\nExample: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`.', + 'service.origin.version': + 'Version of the service the data was collected from.\nThis allows to look at a data set only for a specific version of a service.', + 'service.state': 'Current state of the service.', + 'service.target.address': + 'Address where data about this service was collected from.\nThis should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets).', + 'service.target.environment': + 'Identifies the environment where the service is running.\nIf the same service runs in different environments (production, staging, QA, development, etc.), the environment can identify other instances of the same service. Can also group services and applications from the same environment.', + 'service.target.ephemeral_id': + 'Ephemeral identifier of this service (if one exists).\nThis id normally changes across restarts, but `service.id` does not.', + 'service.target.id': + 'Unique identifier of the running service. If the service is comprised of many nodes, the `service.id` should be the same for all nodes.\nThis id should uniquely identify the service. This makes it possible to correlate logs and metrics for one specific service, no matter which particular node emitted the event.\nNote that if you need to see the events from one specific host of the service, you should filter on that `host.name` or `host.id` instead.', + 'service.target.name': + 'Name of the service data is collected from.\nThe name of the service is normally user given. This allows for distributed services that run on multiple hosts to correlate the related instances based on the name.\nIn the case of Elasticsearch the `service.name` could contain the cluster name. For Beats the `service.name` is by default a copy of the `service.type` field if no name is specified.', + 'service.target.node.name': + "Name of a service node.\nThis allows for two nodes of the same service running on the same host to be differentiated. Therefore, `service.node.name` should typically be unique across nodes of a given service.\nIn the case of Elasticsearch, the `service.node.name` could contain the unique node name within the Elasticsearch cluster. In cases where the service doesn't have the concept of a node name, the host name or container name can be used to distinguish running instances that make up this service. If those do not provide uniqueness (e.g. multiple instances of the service running on the same host) - the node name can be manually set.", + 'service.target.node.role': + 'Deprecated for removal in next major version release. This field will be superseded by `node.roles`.\nRole of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks`.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data`.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', + 'service.target.node.roles': + 'Roles of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks` or both.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data` or both.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', + 'service.target.state': 'Current state of the service.', + 'service.target.type': + 'The type of the service data is collected from.\nThe type can be used to group and correlate logs and metrics from one service type.\nExample: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`.', + 'service.target.version': + 'Version of the service the data was collected from.\nThis allows to look at a data set only for a specific version of a service.', + 'service.type': + 'The type of the service data is collected from.\nThe type can be used to group and correlate logs and metrics from one service type.\nExample: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`.', + 'service.version': + 'Version of the service the data was collected from.\nThis allows to look at a data set only for a specific version of a service.', + 'source.address': + 'Some event source addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', + 'source.as.number': + 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', + 'source.as.organization.name': 'Organization name.', + 'source.bytes': 'Bytes sent from the source to the destination.', + 'source.domain': + 'The domain name of the source system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', + 'source.geo.city_name': 'City name.', + 'source.geo.continent_code': "Two-letter code representing continent's name.", + 'source.geo.continent_name': 'Name of the continent.', + 'source.geo.country_iso_code': 'Country ISO code.', + 'source.geo.country_name': 'Country name.', + 'source.geo.location': 'Longitude and latitude.', + 'source.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'source.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'source.geo.region_iso_code': 'Region ISO code.', + 'source.geo.region_name': 'Region name.', + 'source.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'source.ip': 'IP address of the source (IPv4 or IPv6).', + 'source.mac': + 'MAC address of the source.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', + 'source.nat.ip': + 'Translated ip of source based NAT sessions (e.g. internal client to internet)\nTypically connections traversing load balancers, firewalls, or routers.', + 'source.nat.port': + 'Translated port of source based NAT sessions. (e.g. internal client to internet)\nTypically used with load balancers, firewalls, or routers.', + 'source.packets': 'Packets sent from the source to the destination.', + 'source.port': 'Port of the source.', + 'source.registered_domain': + 'The highest registered source domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'source.subdomain': + 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'source.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'source.user.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'source.user.email': 'User email address.', + 'source.user.full_name': "User's full name, if available.", + 'source.user.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'source.user.group.id': 'Unique identifier for the group on the system/platform.', + 'source.user.group.name': 'Name of the group.', + 'source.user.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'source.user.id': 'Unique identifier of the user.', + 'source.user.name': 'Short name or login of the user.', + 'source.user.roles': 'Array of user roles at the time of the event.', + 'span.id': + 'Unique identifier of the span within the scope of its trace.\nA span represents an operation within a transaction, such as a request to another service, or a database query.', + tags: 'List of keywords used to tag each event.', + 'threat.enrichments': + 'A list of associated indicators objects enriching the event, and the context of that association/enrichment.', + 'threat.enrichments.indicator': 'Object containing associated indicators enriching the event.', + 'threat.enrichments.indicator.as.number': + 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', + 'threat.enrichments.indicator.as.organization.name': 'Organization name.', + 'threat.enrichments.indicator.confidence': + 'Identifies the vendor-neutral confidence rating using the None/Low/Medium/High scale defined in Appendix A of the STIX 2.1 framework. Vendor-specific confidence scales may be added as custom fields.', + 'threat.enrichments.indicator.description': 'Describes the type of action conducted by the threat.', + 'threat.enrichments.indicator.email.address': + 'Identifies a threat indicator as an email address (irrespective of direction).', + 'threat.enrichments.indicator.file.accessed': + 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', + 'threat.enrichments.indicator.file.attributes': + "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", + 'threat.enrichments.indicator.file.code_signature.digest_algorithm': + 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', + 'threat.enrichments.indicator.file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'threat.enrichments.indicator.file.code_signature.signing_id': + 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', + 'threat.enrichments.indicator.file.code_signature.status': + 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', + 'threat.enrichments.indicator.file.code_signature.subject_name': 'Subject name of the code signer', + 'threat.enrichments.indicator.file.code_signature.team_id': + 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', + 'threat.enrichments.indicator.file.code_signature.timestamp': + 'Date and time when the code signature was generated and signed.', + 'threat.enrichments.indicator.file.code_signature.trusted': + 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', + 'threat.enrichments.indicator.file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', + 'threat.enrichments.indicator.file.created': + 'File creation time.\nNote that not all filesystems store the creation time.', + 'threat.enrichments.indicator.file.ctime': + 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', + 'threat.enrichments.indicator.file.device': 'Device that is the source of the file.', + 'threat.enrichments.indicator.file.directory': + 'Directory where the file is located. It should include the drive letter, when appropriate.', + 'threat.enrichments.indicator.file.drive_letter': + 'Drive letter where the file is located. This field is only relevant on Windows.\nThe value should be uppercase, and not include the colon.', + 'threat.enrichments.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', + 'threat.enrichments.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', + 'threat.enrichments.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', + 'threat.enrichments.indicator.file.elf.creation_date': + "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", + 'threat.enrichments.indicator.file.elf.exports': 'List of exported element names and types.', + 'threat.enrichments.indicator.file.elf.go_import_hash': + 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'threat.enrichments.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.elf.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'threat.enrichments.indicator.file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'threat.enrichments.indicator.file.elf.header.class': 'Header class of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.data': 'Data table of the ELF header.', + 'threat.enrichments.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', + 'threat.enrichments.indicator.file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'threat.enrichments.indicator.file.elf.header.type': 'Header type of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.version': 'Version of the ELF header.', + 'threat.enrichments.indicator.file.elf.import_hash': + 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', + 'threat.enrichments.indicator.file.elf.imports': 'List of imported element names and types.', + 'threat.enrichments.indicator.file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.elf.sections': + 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', + 'threat.enrichments.indicator.file.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'threat.enrichments.indicator.file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.elf.sections.flags': 'ELF Section List flags.', + 'threat.enrichments.indicator.file.elf.sections.name': 'ELF Section List name.', + 'threat.enrichments.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', + 'threat.enrichments.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', + 'threat.enrichments.indicator.file.elf.sections.type': 'ELF Section List type.', + 'threat.enrichments.indicator.file.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'threat.enrichments.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'threat.enrichments.indicator.file.elf.segments': + 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', + 'threat.enrichments.indicator.file.elf.segments.sections': 'ELF object segment sections.', + 'threat.enrichments.indicator.file.elf.segments.type': 'ELF object segment type.', + 'threat.enrichments.indicator.file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'threat.enrichments.indicator.file.elf.telfhash': 'telfhash symbol hash for ELF file.', + 'threat.enrichments.indicator.file.extension': + 'File extension, excluding the leading dot.\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', + 'threat.enrichments.indicator.file.fork_name': + 'A fork is additional data associated with a filesystem object.\nOn Linux, a resource fork is used to store additional data with a filesystem object. A file always has at least one fork for the data portion, and additional forks may exist.\nOn NTFS, this is analogous to an Alternate Data Stream (ADS), and the default data stream for a file is just called $DATA. Zone.Identifier is commonly used by Windows to track contents downloaded from the Internet. An ADS is typically of the form: `C:\\path\\to\\filename.extension:some_fork_name`, and `some_fork_name` is the value that should populate `fork_name`. `filename.extension` should populate `file.name`, and `extension` should populate `file.extension`. The full path, `file.path`, will include the fork name.', + 'threat.enrichments.indicator.file.gid': 'Primary group ID (GID) of the file.', + 'threat.enrichments.indicator.file.group': 'Primary group name of the file.', + 'threat.enrichments.indicator.file.hash.md5': 'MD5 hash.', + 'threat.enrichments.indicator.file.hash.sha1': 'SHA1 hash.', + 'threat.enrichments.indicator.file.hash.sha256': 'SHA256 hash.', + 'threat.enrichments.indicator.file.hash.sha384': 'SHA384 hash.', + 'threat.enrichments.indicator.file.hash.sha512': 'SHA512 hash.', + 'threat.enrichments.indicator.file.hash.ssdeep': 'SSDEEP hash.', + 'threat.enrichments.indicator.file.hash.tlsh': 'TLSH hash.', + 'threat.enrichments.indicator.file.inode': 'Inode representing the file in the filesystem.', + 'threat.enrichments.indicator.file.mime_type': + 'MIME type should identify the format of the file or stream of bytes using https://www.iana.org/assignments/media-types/media-types.xhtml[IANA official types], where possible. When more than one type is applicable, the most specific type should be used.', + 'threat.enrichments.indicator.file.mode': 'Mode of the file in octal representation.', + 'threat.enrichments.indicator.file.mtime': 'Last time the file content was modified.', + 'threat.enrichments.indicator.file.name': 'Name of the file including the extension, without the directory.', + 'threat.enrichments.indicator.file.owner': "File owner's username.", + 'threat.enrichments.indicator.file.path': + 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', + 'threat.enrichments.indicator.file.pe.architecture': 'CPU architecture target for the file.', + 'threat.enrichments.indicator.file.pe.company': 'Internal company name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.description': 'Internal description of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.go_import_hash': + 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'threat.enrichments.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.pe.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'threat.enrichments.indicator.file.pe.imphash': + 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', + 'threat.enrichments.indicator.file.pe.import_hash': + 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', + 'threat.enrichments.indicator.file.pe.imports': 'List of imported element names and types.', + 'threat.enrichments.indicator.file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.pehash': + 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', + 'threat.enrichments.indicator.file.pe.product': 'Internal product name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.sections': + 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', + 'threat.enrichments.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.pe.sections.name': 'PE Section List name.', + 'threat.enrichments.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', + 'threat.enrichments.indicator.file.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'threat.enrichments.indicator.file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', + 'threat.enrichments.indicator.file.target_path': 'Target path for symlinks.', + 'threat.enrichments.indicator.file.type': 'File type (file, dir, or symlink).', + 'threat.enrichments.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'threat.enrichments.indicator.file.x509.alternative_names': + 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', + 'threat.enrichments.indicator.file.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.country': 'List of country \\(C) codes', + 'threat.enrichments.indicator.file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.file.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.file.x509.not_before': 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.enrichments.indicator.file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.enrichments.indicator.file.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.enrichments.indicator.file.x509.serial_number': + 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', + 'threat.enrichments.indicator.file.x509.signature_algorithm': + 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', + 'threat.enrichments.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.enrichments.indicator.file.x509.subject.country': 'List of country \\(C) code', + 'threat.enrichments.indicator.file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.enrichments.indicator.file.x509.subject.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.enrichments.indicator.file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.version_number': 'Version of x509 format.', + 'threat.enrichments.indicator.first_seen': + 'The date and time when intelligence source first reported sighting this indicator.', + 'threat.enrichments.indicator.geo.city_name': 'City name.', + 'threat.enrichments.indicator.geo.continent_code': "Two-letter code representing continent's name.", + 'threat.enrichments.indicator.geo.continent_name': 'Name of the continent.', + 'threat.enrichments.indicator.geo.country_iso_code': 'Country ISO code.', + 'threat.enrichments.indicator.geo.country_name': 'Country name.', + 'threat.enrichments.indicator.geo.location': 'Longitude and latitude.', + 'threat.enrichments.indicator.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'threat.enrichments.indicator.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'threat.enrichments.indicator.geo.region_iso_code': 'Region ISO code.', + 'threat.enrichments.indicator.geo.region_name': 'Region name.', + 'threat.enrichments.indicator.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'threat.enrichments.indicator.ip': 'Identifies a threat indicator as an IP address (irrespective of direction).', + 'threat.enrichments.indicator.last_seen': + 'The date and time when intelligence source last reported sighting this indicator.', + 'threat.enrichments.indicator.marking.tlp': 'Traffic Light Protocol sharing markings.', + 'threat.enrichments.indicator.marking.tlp_version': 'Traffic Light Protocol version.', + 'threat.enrichments.indicator.modified_at': + 'The date and time when intelligence source last modified information for this indicator.', + 'threat.enrichments.indicator.name': 'The display name indicator in an UI friendly format', + 'threat.enrichments.indicator.port': 'Identifies a threat indicator as a port number (irrespective of direction).', + 'threat.enrichments.indicator.provider': "The name of the indicator's provider.", + 'threat.enrichments.indicator.reference': 'Reference URL linking to additional information about this indicator.', + 'threat.enrichments.indicator.registry.data.bytes': + 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', + 'threat.enrichments.indicator.registry.data.strings': + 'Content when writing string types.\nPopulated as an array when writing string data to the registry. For single string registry types (REG_SZ, REG_EXPAND_SZ), this should be an array with one string. For sequences of string with REG_MULTI_SZ, this array will be variable length. For numeric data, such as REG_DWORD and REG_QWORD, this should be populated with the decimal representation (e.g `"1"`).', + 'threat.enrichments.indicator.registry.data.type': 'Standard registry type for encoding contents', + 'threat.enrichments.indicator.registry.hive': 'Abbreviated name for the hive.', + 'threat.enrichments.indicator.registry.key': 'Hive-relative path of keys.', + 'threat.enrichments.indicator.registry.path': 'Full path, including hive, key and value', + 'threat.enrichments.indicator.registry.value': 'Name of the value written.', + 'threat.enrichments.indicator.scanner_stats': + 'Count of AV/EDR vendors that successfully detected malicious file or URL.', + 'threat.enrichments.indicator.sightings': 'Number of times this indicator was observed conducting threat activity.', + 'threat.enrichments.indicator.type': 'Type of indicator as represented by Cyber Observable in STIX 2.0.', + 'threat.enrichments.indicator.url.domain': + 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', + 'threat.enrichments.indicator.url.extension': + 'The field contains the file extension from the original request url, excluding the leading dot.\nThe file extension is only set if it exists, as not every url has a file extension.\nThe leading period must not be included. For example, the value must be "png", not ".png".\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', + 'threat.enrichments.indicator.url.fragment': + 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', + 'threat.enrichments.indicator.url.full': + 'If full URLs are important to your use case, they should be stored in `url.full`, whether this field is reconstructed or present in the event source.', + 'threat.enrichments.indicator.url.original': + 'Unmodified original url as seen in the event source.\nNote that in network monitoring, the observed URL may be a full URL, whereas in access logs, the URL is often just represented as a path.\nThis field is meant to represent the URL as it was observed, complete or not.', + 'threat.enrichments.indicator.url.password': 'Password of the request.', + 'threat.enrichments.indicator.url.path': 'Path of the request, such as "/search".', + 'threat.enrichments.indicator.url.port': 'Port of the request, such as 443.', + 'threat.enrichments.indicator.url.query': + 'The query field describes the query string of the request, such as "q=elasticsearch".\nThe `?` is excluded from the query string. If a URL contains no `?`, there is no query field. If there is a `?` but no query, the query field exists with an empty string. The `exists` query can be used to differentiate between the two cases.', + 'threat.enrichments.indicator.url.registered_domain': + 'The highest registered url domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'threat.enrichments.indicator.url.scheme': + 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', + 'threat.enrichments.indicator.url.subdomain': + 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'threat.enrichments.indicator.url.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'threat.enrichments.indicator.url.username': 'Username of the request.', + 'threat.enrichments.indicator.x509.alternative_names': + 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', + 'threat.enrichments.indicator.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.country': 'List of country \\(C) codes', + 'threat.enrichments.indicator.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.x509.not_before': 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.enrichments.indicator.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.enrichments.indicator.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.enrichments.indicator.x509.serial_number': + 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', + 'threat.enrichments.indicator.x509.signature_algorithm': + 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', + 'threat.enrichments.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.enrichments.indicator.x509.subject.country': 'List of country \\(C) code', + 'threat.enrichments.indicator.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.enrichments.indicator.x509.subject.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.enrichments.indicator.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.version_number': 'Version of x509 format.', + 'threat.enrichments.matched.atomic': + 'Identifies the atomic indicator value that matched a local environment endpoint or network event.', + 'threat.enrichments.matched.field': + 'Identifies the field of the atomic indicator that matched a local environment endpoint or network event.', + 'threat.enrichments.matched.id': 'Identifies the _id of the indicator document enriching the event.', + 'threat.enrichments.matched.index': 'Identifies the _index of the indicator document enriching the event.', + 'threat.enrichments.matched.occurred': 'Indicates when the indicator match was generated', + 'threat.enrichments.matched.type': + 'Identifies the type of match that caused the event to be enriched with the given indicator', + 'threat.feed.dashboard_id': + 'The saved object ID of the dashboard belonging to the threat feed for displaying dashboard links to threat feeds in Kibana.', + 'threat.feed.description': 'Description of the threat feed in a UI friendly format.', + 'threat.feed.name': 'The name of the threat feed in UI friendly format.', + 'threat.feed.reference': 'Reference information for the threat feed in a UI friendly format.', + 'threat.framework': + 'Name of the threat framework used to further categorize and classify the tactic and technique of the reported threat. Framework classification can be provided by detecting systems, evaluated at ingest time, or retrospectively tagged to events.', + 'threat.group.alias': + 'The alias(es) of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group alias(es).', + 'threat.group.id': + 'The id of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group id.', + 'threat.group.name': + 'The name of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group name.', + 'threat.group.reference': + 'The reference URL of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group reference URL.', + 'threat.indicator.as.number': + 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', + 'threat.indicator.as.organization.name': 'Organization name.', + 'threat.indicator.confidence': + 'Identifies the vendor-neutral confidence rating using the None/Low/Medium/High scale defined in Appendix A of the STIX 2.1 framework. Vendor-specific confidence scales may be added as custom fields.', + 'threat.indicator.description': 'Describes the type of action conducted by the threat.', + 'threat.indicator.email.address': 'Identifies a threat indicator as an email address (irrespective of direction).', + 'threat.indicator.file.accessed': + 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', + 'threat.indicator.file.attributes': + "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", + 'threat.indicator.file.code_signature.digest_algorithm': + 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', + 'threat.indicator.file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'threat.indicator.file.code_signature.signing_id': + 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', + 'threat.indicator.file.code_signature.status': + 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', + 'threat.indicator.file.code_signature.subject_name': 'Subject name of the code signer', + 'threat.indicator.file.code_signature.team_id': + 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', + 'threat.indicator.file.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'threat.indicator.file.code_signature.trusted': + 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', + 'threat.indicator.file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', + 'threat.indicator.file.created': 'File creation time.\nNote that not all filesystems store the creation time.', + 'threat.indicator.file.ctime': + 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', + 'threat.indicator.file.device': 'Device that is the source of the file.', + 'threat.indicator.file.directory': + 'Directory where the file is located. It should include the drive letter, when appropriate.', + 'threat.indicator.file.drive_letter': + 'Drive letter where the file is located. This field is only relevant on Windows.\nThe value should be uppercase, and not include the colon.', + 'threat.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', + 'threat.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', + 'threat.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', + 'threat.indicator.file.elf.creation_date': + "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", + 'threat.indicator.file.elf.exports': 'List of exported element names and types.', + 'threat.indicator.file.elf.go_import_hash': + 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'threat.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', + 'threat.indicator.file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.elf.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'threat.indicator.file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'threat.indicator.file.elf.header.class': 'Header class of the ELF file.', + 'threat.indicator.file.elf.header.data': 'Data table of the ELF header.', + 'threat.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'threat.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', + 'threat.indicator.file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'threat.indicator.file.elf.header.type': 'Header type of the ELF file.', + 'threat.indicator.file.elf.header.version': 'Version of the ELF header.', + 'threat.indicator.file.elf.import_hash': + 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', + 'threat.indicator.file.elf.imports': 'List of imported element names and types.', + 'threat.indicator.file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.elf.sections': + 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', + 'threat.indicator.file.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'threat.indicator.file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.indicator.file.elf.sections.flags': 'ELF Section List flags.', + 'threat.indicator.file.elf.sections.name': 'ELF Section List name.', + 'threat.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', + 'threat.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', + 'threat.indicator.file.elf.sections.type': 'ELF Section List type.', + 'threat.indicator.file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'threat.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'threat.indicator.file.elf.segments': + 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', + 'threat.indicator.file.elf.segments.sections': 'ELF object segment sections.', + 'threat.indicator.file.elf.segments.type': 'ELF object segment type.', + 'threat.indicator.file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'threat.indicator.file.elf.telfhash': 'telfhash symbol hash for ELF file.', + 'threat.indicator.file.extension': + 'File extension, excluding the leading dot.\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', + 'threat.indicator.file.fork_name': + 'A fork is additional data associated with a filesystem object.\nOn Linux, a resource fork is used to store additional data with a filesystem object. A file always has at least one fork for the data portion, and additional forks may exist.\nOn NTFS, this is analogous to an Alternate Data Stream (ADS), and the default data stream for a file is just called $DATA. Zone.Identifier is commonly used by Windows to track contents downloaded from the Internet. An ADS is typically of the form: `C:\\path\\to\\filename.extension:some_fork_name`, and `some_fork_name` is the value that should populate `fork_name`. `filename.extension` should populate `file.name`, and `extension` should populate `file.extension`. The full path, `file.path`, will include the fork name.', + 'threat.indicator.file.gid': 'Primary group ID (GID) of the file.', + 'threat.indicator.file.group': 'Primary group name of the file.', + 'threat.indicator.file.hash.md5': 'MD5 hash.', + 'threat.indicator.file.hash.sha1': 'SHA1 hash.', + 'threat.indicator.file.hash.sha256': 'SHA256 hash.', + 'threat.indicator.file.hash.sha384': 'SHA384 hash.', + 'threat.indicator.file.hash.sha512': 'SHA512 hash.', + 'threat.indicator.file.hash.ssdeep': 'SSDEEP hash.', + 'threat.indicator.file.hash.tlsh': 'TLSH hash.', + 'threat.indicator.file.inode': 'Inode representing the file in the filesystem.', + 'threat.indicator.file.mime_type': + 'MIME type should identify the format of the file or stream of bytes using https://www.iana.org/assignments/media-types/media-types.xhtml[IANA official types], where possible. When more than one type is applicable, the most specific type should be used.', + 'threat.indicator.file.mode': 'Mode of the file in octal representation.', + 'threat.indicator.file.mtime': 'Last time the file content was modified.', + 'threat.indicator.file.name': 'Name of the file including the extension, without the directory.', + 'threat.indicator.file.owner': "File owner's username.", + 'threat.indicator.file.path': + 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', + 'threat.indicator.file.pe.architecture': 'CPU architecture target for the file.', + 'threat.indicator.file.pe.company': 'Internal company name of the file, provided at compile-time.', + 'threat.indicator.file.pe.description': 'Internal description of the file, provided at compile-time.', + 'threat.indicator.file.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'threat.indicator.file.pe.go_import_hash': + 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', + 'threat.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', + 'threat.indicator.file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.pe.go_stripped': + 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', + 'threat.indicator.file.pe.imphash': + 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', + 'threat.indicator.file.pe.import_hash': + 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', + 'threat.indicator.file.pe.imports': 'List of imported element names and types.', + 'threat.indicator.file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'threat.indicator.file.pe.pehash': + 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', + 'threat.indicator.file.pe.product': 'Internal product name of the file, provided at compile-time.', + 'threat.indicator.file.pe.sections': + 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', + 'threat.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.indicator.file.pe.sections.name': 'PE Section List name.', + 'threat.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', + 'threat.indicator.file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'threat.indicator.file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', + 'threat.indicator.file.target_path': 'Target path for symlinks.', + 'threat.indicator.file.type': 'File type (file, dir, or symlink).', + 'threat.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'threat.indicator.file.x509.alternative_names': + 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', + 'threat.indicator.file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.country': 'List of country \\(C) codes', + 'threat.indicator.file.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.locality': 'List of locality names (L)', + 'threat.indicator.file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'threat.indicator.file.x509.not_before': 'Time at which the certificate is first considered valid.', + 'threat.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.indicator.file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.indicator.file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.indicator.file.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.indicator.file.x509.serial_number': + 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', + 'threat.indicator.file.x509.signature_algorithm': + 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', + 'threat.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.indicator.file.x509.subject.country': 'List of country \\(C) code', + 'threat.indicator.file.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.file.x509.subject.locality': 'List of locality names (L)', + 'threat.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.indicator.file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'threat.indicator.file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.version_number': 'Version of x509 format.', + 'threat.indicator.first_seen': 'The date and time when intelligence source first reported sighting this indicator.', + 'threat.indicator.geo.city_name': 'City name.', + 'threat.indicator.geo.continent_code': "Two-letter code representing continent's name.", + 'threat.indicator.geo.continent_name': 'Name of the continent.', + 'threat.indicator.geo.country_iso_code': 'Country ISO code.', + 'threat.indicator.geo.country_name': 'Country name.', + 'threat.indicator.geo.location': 'Longitude and latitude.', + 'threat.indicator.geo.name': + 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', + 'threat.indicator.geo.postal_code': + 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', + 'threat.indicator.geo.region_iso_code': 'Region ISO code.', + 'threat.indicator.geo.region_name': 'Region name.', + 'threat.indicator.geo.timezone': 'The time zone of the location, such as IANA time zone name.', + 'threat.indicator.ip': 'Identifies a threat indicator as an IP address (irrespective of direction).', + 'threat.indicator.last_seen': 'The date and time when intelligence source last reported sighting this indicator.', + 'threat.indicator.marking.tlp': 'Traffic Light Protocol sharing markings.', + 'threat.indicator.marking.tlp_version': 'Traffic Light Protocol version.', + 'threat.indicator.modified_at': + 'The date and time when intelligence source last modified information for this indicator.', + 'threat.indicator.name': 'The display name indicator in an UI friendly format', + 'threat.indicator.port': 'Identifies a threat indicator as a port number (irrespective of direction).', + 'threat.indicator.provider': "The name of the indicator's provider.", + 'threat.indicator.reference': 'Reference URL linking to additional information about this indicator.', + 'threat.indicator.registry.data.bytes': + 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', + 'threat.indicator.registry.data.strings': + 'Content when writing string types.\nPopulated as an array when writing string data to the registry. For single string registry types (REG_SZ, REG_EXPAND_SZ), this should be an array with one string. For sequences of string with REG_MULTI_SZ, this array will be variable length. For numeric data, such as REG_DWORD and REG_QWORD, this should be populated with the decimal representation (e.g `"1"`).', + 'threat.indicator.registry.data.type': 'Standard registry type for encoding contents', + 'threat.indicator.registry.hive': 'Abbreviated name for the hive.', + 'threat.indicator.registry.key': 'Hive-relative path of keys.', + 'threat.indicator.registry.path': 'Full path, including hive, key and value', + 'threat.indicator.registry.value': 'Name of the value written.', + 'threat.indicator.scanner_stats': 'Count of AV/EDR vendors that successfully detected malicious file or URL.', + 'threat.indicator.sightings': 'Number of times this indicator was observed conducting threat activity.', + 'threat.indicator.type': 'Type of indicator as represented by Cyber Observable in STIX 2.0.', + 'threat.indicator.url.domain': + 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', + 'threat.indicator.url.extension': + 'The field contains the file extension from the original request url, excluding the leading dot.\nThe file extension is only set if it exists, as not every url has a file extension.\nThe leading period must not be included. For example, the value must be "png", not ".png".\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', + 'threat.indicator.url.fragment': + 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', + 'threat.indicator.url.full': + 'If full URLs are important to your use case, they should be stored in `url.full`, whether this field is reconstructed or present in the event source.', + 'threat.indicator.url.original': + 'Unmodified original url as seen in the event source.\nNote that in network monitoring, the observed URL may be a full URL, whereas in access logs, the URL is often just represented as a path.\nThis field is meant to represent the URL as it was observed, complete or not.', + 'threat.indicator.url.password': 'Password of the request.', + 'threat.indicator.url.path': 'Path of the request, such as "/search".', + 'threat.indicator.url.port': 'Port of the request, such as 443.', + 'threat.indicator.url.query': + 'The query field describes the query string of the request, such as "q=elasticsearch".\nThe `?` is excluded from the query string. If a URL contains no `?`, there is no query field. If there is a `?` but no query, the query field exists with an empty string. The `exists` query can be used to differentiate between the two cases.', + 'threat.indicator.url.registered_domain': + 'The highest registered url domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'threat.indicator.url.scheme': 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', + 'threat.indicator.url.subdomain': + 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'threat.indicator.url.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'threat.indicator.url.username': 'Username of the request.', + 'threat.indicator.x509.alternative_names': + 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', + 'threat.indicator.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.country': 'List of country \\(C) codes', + 'threat.indicator.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.locality': 'List of locality names (L)', + 'threat.indicator.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'threat.indicator.x509.not_before': 'Time at which the certificate is first considered valid.', + 'threat.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.indicator.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.indicator.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.indicator.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.indicator.x509.serial_number': + 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', + 'threat.indicator.x509.signature_algorithm': + 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', + 'threat.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.indicator.x509.subject.country': 'List of country \\(C) code', + 'threat.indicator.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.x509.subject.locality': 'List of locality names (L)', + 'threat.indicator.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.indicator.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'threat.indicator.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.x509.version_number': 'Version of x509 format.', + 'threat.software.alias': + 'The alias(es) of the software for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae associated software description.', + 'threat.software.id': + 'The id of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software id.', + 'threat.software.name': + 'The name of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software name.', + 'threat.software.platforms': + 'The platforms of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use MITRE ATT&CK\u00ae software platform values.', + 'threat.software.reference': + 'The reference URL of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software reference URL.', + 'threat.software.type': + 'The type of software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software type.', + 'threat.tactic.id': + 'The id of tactic used by this threat. You can use a MITRE ATT&CK\u00ae tactic, for example. (ex. https://attack.mitre.org/tactics/TA0002/ )', + 'threat.tactic.name': + 'Name of the type of tactic used by this threat. You can use a MITRE ATT&CK\u00ae tactic, for example. (ex. https://attack.mitre.org/tactics/TA0002/)', + 'threat.tactic.reference': + 'The reference url of tactic used by this threat. You can use a MITRE ATT&CK\u00ae tactic, for example. (ex. https://attack.mitre.org/tactics/TA0002/ )', + 'threat.technique.id': + 'The id of technique used by this threat. You can use a MITRE ATT&CK\u00ae technique, for example. (ex. https://attack.mitre.org/techniques/T1059/)', + 'threat.technique.name': + 'The name of technique used by this threat. You can use a MITRE ATT&CK\u00ae technique, for example. (ex. https://attack.mitre.org/techniques/T1059/)', + 'threat.technique.reference': + 'The reference url of technique used by this threat. You can use a MITRE ATT&CK\u00ae technique, for example. (ex. https://attack.mitre.org/techniques/T1059/)', + 'threat.technique.subtechnique.id': + 'The full id of subtechnique used by this threat. You can use a MITRE ATT&CK\u00ae subtechnique, for example. (ex. https://attack.mitre.org/techniques/T1059/001/)', + 'threat.technique.subtechnique.name': + 'The name of subtechnique used by this threat. You can use a MITRE ATT&CK\u00ae subtechnique, for example. (ex. https://attack.mitre.org/techniques/T1059/001/)', + 'threat.technique.subtechnique.reference': + 'The reference url of subtechnique used by this threat. You can use a MITRE ATT&CK\u00ae subtechnique, for example. (ex. https://attack.mitre.org/techniques/T1059/001/)', + 'tls.cipher': 'String indicating the cipher used during the current connection.', + 'tls.client.certificate': + 'PEM-encoded stand-alone certificate offered by the client. This is usually mutually-exclusive of `client.certificate_chain` since this value also exists in that list.', + 'tls.client.certificate_chain': + 'Array of PEM-encoded certificates that make up the certificate chain offered by the client. This is usually mutually-exclusive of `client.certificate` since that value should be the first certificate in the chain.', + 'tls.client.hash.md5': + 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', + 'tls.client.hash.sha1': + 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', + 'tls.client.hash.sha256': + 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', + 'tls.client.issuer': 'Distinguished name of subject of the issuer of the x.509 certificate presented by the client.', + 'tls.client.ja3': 'A hash that identifies clients based on how they perform an SSL/TLS handshake.', + 'tls.client.not_after': 'Date/Time indicating when client certificate is no longer considered valid.', + 'tls.client.not_before': 'Date/Time indicating when client certificate is first considered valid.', + 'tls.client.server_name': + 'Also called an SNI, this tells the server which hostname to which the client is attempting to connect to. When this value is available, it should get copied to `destination.domain`.', + 'tls.client.subject': 'Distinguished name of subject of the x.509 certificate presented by the client.', + 'tls.client.supported_ciphers': 'Array of ciphers offered by the client during the client hello.', + 'tls.client.x509.alternative_names': + 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', + 'tls.client.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'tls.client.x509.issuer.country': 'List of country \\(C) codes', + 'tls.client.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'tls.client.x509.issuer.locality': 'List of locality names (L)', + 'tls.client.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'tls.client.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', + 'tls.client.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.client.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'tls.client.x509.not_before': 'Time at which the certificate is first considered valid.', + 'tls.client.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'tls.client.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'tls.client.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.client.x509.public_key_size': 'The size of the public key space in bits.', + 'tls.client.x509.serial_number': + 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', + 'tls.client.x509.signature_algorithm': + 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', + 'tls.client.x509.subject.common_name': 'List of common names (CN) of subject.', + 'tls.client.x509.subject.country': 'List of country \\(C) code', + 'tls.client.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'tls.client.x509.subject.locality': 'List of locality names (L)', + 'tls.client.x509.subject.organization': 'List of organizations (O) of subject.', + 'tls.client.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'tls.client.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.client.x509.version_number': 'Version of x509 format.', + 'tls.curve': 'String indicating the curve used for the given cipher, when applicable.', + 'tls.established': + 'Boolean flag indicating if the TLS negotiation was successful and transitioned to an encrypted tunnel.', + 'tls.next_protocol': + 'String indicating the protocol being tunneled. Per the values in the IANA registry (https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids), this string should be lower case.', + 'tls.resumed': 'Boolean flag indicating if this TLS connection was resumed from an existing TLS negotiation.', + 'tls.server.certificate': + 'PEM-encoded stand-alone certificate offered by the server. This is usually mutually-exclusive of `server.certificate_chain` since this value also exists in that list.', + 'tls.server.certificate_chain': + 'Array of PEM-encoded certificates that make up the certificate chain offered by the server. This is usually mutually-exclusive of `server.certificate` since that value should be the first certificate in the chain.', + 'tls.server.hash.md5': + 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the server. For consistency with other hash values, this value should be formatted as an uppercase hash.', + 'tls.server.hash.sha1': + 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the server. For consistency with other hash values, this value should be formatted as an uppercase hash.', + 'tls.server.hash.sha256': + 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the server. For consistency with other hash values, this value should be formatted as an uppercase hash.', + 'tls.server.issuer': 'Subject of the issuer of the x.509 certificate presented by the server.', + 'tls.server.ja3s': 'A hash that identifies servers based on how they perform an SSL/TLS handshake.', + 'tls.server.not_after': 'Timestamp indicating when server certificate is no longer considered valid.', + 'tls.server.not_before': 'Timestamp indicating when server certificate is first considered valid.', + 'tls.server.subject': 'Subject of the x.509 certificate presented by the server.', + 'tls.server.x509.alternative_names': + 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', + 'tls.server.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'tls.server.x509.issuer.country': 'List of country \\(C) codes', + 'tls.server.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'tls.server.x509.issuer.locality': 'List of locality names (L)', + 'tls.server.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'tls.server.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', + 'tls.server.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.server.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'tls.server.x509.not_before': 'Time at which the certificate is first considered valid.', + 'tls.server.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'tls.server.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'tls.server.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.server.x509.public_key_size': 'The size of the public key space in bits.', + 'tls.server.x509.serial_number': + 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', + 'tls.server.x509.signature_algorithm': + 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', + 'tls.server.x509.subject.common_name': 'List of common names (CN) of subject.', + 'tls.server.x509.subject.country': 'List of country \\(C) code', + 'tls.server.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'tls.server.x509.subject.locality': 'List of locality names (L)', + 'tls.server.x509.subject.organization': 'List of organizations (O) of subject.', + 'tls.server.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'tls.server.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.server.x509.version_number': 'Version of x509 format.', + 'tls.version': 'Numeric part of the version parsed from the original string.', + 'tls.version_protocol': 'Normalized lowercase protocol name parsed from original string.', + 'trace.id': + 'Unique identifier of the trace.\nA trace groups multiple events like transactions that belong together. For example, a user request handled by multiple inter-connected services.', + 'transaction.id': + 'Unique identifier of the transaction within the scope of its trace.\nA transaction is the highest level of work measured within a service, such as a request to a server.', + 'url.domain': + 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', + 'url.extension': + 'The field contains the file extension from the original request url, excluding the leading dot.\nThe file extension is only set if it exists, as not every url has a file extension.\nThe leading period must not be included. For example, the value must be "png", not ".png".\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', + 'url.fragment': 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', + 'url.full': + 'If full URLs are important to your use case, they should be stored in `url.full`, whether this field is reconstructed or present in the event source.', + 'url.original': + 'Unmodified original url as seen in the event source.\nNote that in network monitoring, the observed URL may be a full URL, whereas in access logs, the URL is often just represented as a path.\nThis field is meant to represent the URL as it was observed, complete or not.', + 'url.password': 'Password of the request.', + 'url.path': 'Path of the request, such as "/search".', + 'url.port': 'Port of the request, such as 443.', + 'url.query': + 'The query field describes the query string of the request, such as "q=elasticsearch".\nThe `?` is excluded from the query string. If a URL contains no `?`, there is no query field. If there is a `?` but no query, the query field exists with an empty string. The `exists` query can be used to differentiate between the two cases.', + 'url.registered_domain': + 'The highest registered url domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', + 'url.scheme': 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', + 'url.subdomain': + 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', + 'url.top_level_domain': + 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', + 'url.username': 'Username of the request.', + 'user.changes.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.changes.email': 'User email address.', + 'user.changes.full_name': "User's full name, if available.", + 'user.changes.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.changes.group.id': 'Unique identifier for the group on the system/platform.', + 'user.changes.group.name': 'Name of the group.', + 'user.changes.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'user.changes.id': 'Unique identifier of the user.', + 'user.changes.name': 'Short name or login of the user.', + 'user.changes.roles': 'Array of user roles at the time of the event.', + 'user.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.effective.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.effective.email': 'User email address.', + 'user.effective.full_name': "User's full name, if available.", + 'user.effective.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.effective.group.id': 'Unique identifier for the group on the system/platform.', + 'user.effective.group.name': 'Name of the group.', + 'user.effective.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'user.effective.id': 'Unique identifier of the user.', + 'user.effective.name': 'Short name or login of the user.', + 'user.effective.roles': 'Array of user roles at the time of the event.', + 'user.email': 'User email address.', + 'user.full_name': "User's full name, if available.", + 'user.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.group.id': 'Unique identifier for the group on the system/platform.', + 'user.group.name': 'Name of the group.', + 'user.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'user.id': 'Unique identifier of the user.', + 'user.name': 'Short name or login of the user.', + 'user.risk.calculated_level': + 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring.', + 'user.risk.calculated_score': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring.', + 'user.risk.calculated_score_norm': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring, and normalized to a range of 0 to 100.', + 'user.risk.static_level': + 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'user.risk.static_score': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'user.risk.static_score_norm': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform, and normalized to a range of 0 to 100.', + 'user.roles': 'Array of user roles at the time of the event.', + 'user.target.domain': + 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.target.email': 'User email address.', + 'user.target.full_name': "User's full name, if available.", + 'user.target.group.domain': + 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', + 'user.target.group.id': 'Unique identifier for the group on the system/platform.', + 'user.target.group.name': 'Name of the group.', + 'user.target.hash': + 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', + 'user.target.id': 'Unique identifier of the user.', + 'user.target.name': 'Short name or login of the user.', + 'user.target.roles': 'Array of user roles at the time of the event.', + 'user_agent.device.name': 'Name of the device.', + 'user_agent.name': 'Name of the user agent.', + 'user_agent.original': 'Unparsed user_agent string.', + 'user_agent.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'user_agent.os.full': 'Operating system name, including the version or code name.', + 'user_agent.os.kernel': 'Operating system kernel version as a raw string.', + 'user_agent.os.name': 'Operating system name, without the version.', + 'user_agent.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'user_agent.os.type': + "Use the `os.type` field to categorize the operating system into one of the broad commercial families.\nIf the OS you're dealing with is not listed as an expected value, the field should not be populated. Please let us know by opening an issue with ECS, to propose its addition.", + 'user_agent.os.version': 'Operating system version as a raw string.', + 'user_agent.version': 'Version of the user agent.', + 'vulnerability.category': + 'The type of system or architecture that the vulnerability affects. These may be platform-specific (for example, Debian or SUSE) or general (for example, Database or Firewall). For example (https://qualysguard.qualys.com/qwebhelp/fo_portal/knowledgebase/vulnerability_categories.htm[Qualys vulnerability categories])\nThis field must be an array.', + 'vulnerability.classification': + 'The classification of the vulnerability scoring system. For example (https://www.first.org/cvss/)', + 'vulnerability.description': + 'The description of the vulnerability that provides additional context of the vulnerability. For example (https://cve.mitre.org/about/faqs.html#cve_entry_descriptions_created[Common Vulnerabilities and Exposure CVE description])', + 'vulnerability.enumeration': + 'The type of identifier used for this vulnerability. For example (https://cve.mitre.org/about/)', + 'vulnerability.id': + 'The identification (ID) is the number portion of a vulnerability entry. It includes a unique identification number for the vulnerability. For example (https://cve.mitre.org/about/faqs.html#what_is_cve_id)[Common Vulnerabilities and Exposure CVE ID]', + 'vulnerability.reference': + 'A resource that provides additional information, context, and mitigations for the identified vulnerability.', + 'vulnerability.report_id': 'The report or scan identification number.', + 'vulnerability.scanner.vendor': 'The name of the vulnerability scanner vendor.', + 'vulnerability.score.base': + 'Scores can range from 0.0 to 10.0, with 10.0 being the most severe.\nBase scores cover an assessment for exploitability metrics (attack vector, complexity, privileges, and user interaction), impact metrics (confidentiality, integrity, and availability), and scope. For example (https://www.first.org/cvss/specification-document)', + 'vulnerability.score.environmental': + 'Scores can range from 0.0 to 10.0, with 10.0 being the most severe.\nEnvironmental scores cover an assessment for any modified Base metrics, confidentiality, integrity, and availability requirements. For example (https://www.first.org/cvss/specification-document)', + 'vulnerability.score.temporal': + 'Scores can range from 0.0 to 10.0, with 10.0 being the most severe.\nTemporal scores cover an assessment for code maturity, remediation level, and confidence. For example (https://www.first.org/cvss/specification-document)', + 'vulnerability.score.version': + 'The National Vulnerability Database (NVD) provides qualitative severity rankings of "Low", "Medium", and "High" for CVSS v2.0 base score ranges in addition to the severity ratings for CVSS v3.0 as they are defined in the CVSS v3.0 specification.\nCVSS is owned and managed by FIRST.Org, Inc. (FIRST), a US-based non-profit organization, whose mission is to help computer security incident response teams across the world. For example (https://nvd.nist.gov/vuln-metrics/cvss)', + 'vulnerability.severity': + 'The severity of the vulnerability can help with metrics and internal prioritization regarding remediation. For example (https://nvd.nist.gov/vuln-metrics/cvss)', +}; diff --git a/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx b/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx new file mode 100644 index 0000000000000..5c0681be954c3 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx @@ -0,0 +1,11 @@ +enum HeaderTitles { + '/'= 'Base Page', + '/integration_builder/ecs_mapping'= 'ECS Mapping', + '/integration_builder/categorization'= 'Add Categorization', + '/integration_builder/related'= 'Add Related Fields', + '/integration_builder/results'= 'View Results', + '/integration_builder/build'= 'Build & Deploy', + '/agent_analyzer'= 'Agent Analyzer', + } + +export default HeaderTitles; \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx b/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx new file mode 100644 index 0000000000000..c5f39e8b56e9b --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx @@ -0,0 +1,11 @@ +enum RoutePaths { + BASE_PATH = '/', + ECS_MAPPING_PATH = '/integration_builder/ecs_mapping', + CATEGORIZATION_PATH = '/integration_builder/categorization', + RELATED_PATH = '/integration_builder/related', + INTEGRATION_BUILDER_RESULTS_PATH = '/integration_builder/results', + INTEGRATION_BUILDER_BUILD_PATH = '/integration_builder/build', + AGENT_ANALYZER_PATH = '/agent_analyzer', +} + +export default RoutePaths; diff --git a/x-pack/plugins/integration_assistant/public/get_message_example.tsx b/x-pack/plugins/integration_assistant/public/get_message_example.tsx new file mode 100644 index 0000000000000..47d12a6fed491 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/get_message_example.tsx @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import React, { useCallback } from 'react'; +import { useState } from 'react'; +import { + EuiText, + EuiButton, + EuiLoadingSpinner, + EuiFieldText, + EuiCallOut, + EuiFormRow, +} from '@elastic/eui'; +import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; +import { Services } from './services'; + +interface Props { + getMessageById: Services['getMessageById']; +} + +export function GetMessageRouteExample({ getMessageById }: Props) { + const [error, setError] = useState(); + const [isFetching, setIsFetching] = useState(false); + const [message, setMessage] = useState(''); + const [id, setId] = useState(''); + + const doFetch = useCallback(async () => { + if (isFetching) return; + setIsFetching(true); + const response = await getMessageById(id); + + if (isHttpFetchError(response)) { + setError(response); + setMessage(''); + } else { + setError(undefined); + setMessage(response); + } + + setIsFetching(false); + }, [isFetching, getMessageById, setMessage, id]); + + return ( + + +

GET example with param

+ +

This examples uses a simple GET route that takes an id as a param in the route path.

+ + setId(e.target.value)} + data-test-subj="routingExampleGetMessageId" + /> + + + + doFetch()} + > + {isFetching ? : 'Get message'} + + + + {error !== undefined ? ( + + {error.message} + + ) : null} + {message !== '' ? ( +

+ Message is:

{message}
+

+ ) : null} +
+
+ ); +} diff --git a/x-pack/plugins/integration_assistant/public/index.ts b/x-pack/plugins/integration_assistant/public/index.ts new file mode 100644 index 0000000000000..df8a9d532f4a7 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/index.ts @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { PluginInitializer } from '@kbn/core/public'; +import { RoutingExamplePlugin } from './plugin'; + +export const plugin: PluginInitializer<{}, {}> = () => new RoutingExamplePlugin(); diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx new file mode 100644 index 0000000000000..31dccf6cd8fbc --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx @@ -0,0 +1,12 @@ +import { EuiPageTemplate } from '@elastic/eui'; +import BuildIntegrationButtons from '@components/BuildIntegration/BuildIntegrationButtons'; + +const BuildIntegration = () => { + return ( + + + + ); +}; + +export default BuildIntegration; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx new file mode 100644 index 0000000000000..9fd744d894b98 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx @@ -0,0 +1,30 @@ +import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; + +import EmptyPrompt from '@components/EmptyPrompt/EmptyPrompt'; +import CategorizationButtons from '@Components/Categorization/CategorizationButtons'; +import PipelineResults from '@Components/IntegrationResults/PipelineResults'; +import RoutePaths from '@Constants/routePaths'; + +const CategorizationPage = () => { + const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); + + if (Object.keys(ingestPipeline).length <= 0) { + return ( + + ); + } + return ( + + + + + + ); +}; + +export default CategorizationPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx new file mode 100644 index 0000000000000..079bd1b28b145 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx @@ -0,0 +1,27 @@ +import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import EcsFormStats from '@Components/Ecs/EcsFormStats'; +import EcsButtons from '@components/Ecs/EcsButtons'; +import EcsForm from '@Components/Ecs/EcsForm'; +import EcsTable from '@Components/Ecs/EcsTable'; + + +const EcsMapperPage = () => { + const ecsMappingTableState = useGlobalStore((state) => state.ecsMappingTableState); + return ( + + {ecsMappingTableState.length <= 0 && } + {ecsMappingTableState.length >= 1 && ( + <> + + + + + + + )} + + ); +}; + +export default EcsMapperPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx new file mode 100644 index 0000000000000..f2b20e2cf3b7f --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx @@ -0,0 +1,30 @@ +import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; + +import EmptyPrompt from '@components/EmptyPrompt/EmptyPrompt'; +import RelatedButtons from '@Components/Related/RelatedButtons'; +import PipelineResults from '@Components/IntegrationResults/PipelineResults'; +import RoutePaths from '@Constants/routePaths'; + +const RelatedPage = () => { + const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); + + if (Object.keys(ingestPipeline).length <= 0) { + return ( + + ); + } + return ( + + + + + + ); +}; + +export default RelatedPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx new file mode 100644 index 0000000000000..28a925b8b0198 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx @@ -0,0 +1,33 @@ +import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; +import { useGlobalStore } from '@Stores/useGlobalStore'; +import DocResults from '@Components/IntegrationResults/DocsResults'; +import PipelineResults from '@Components/IntegrationResults/PipelineResults'; +import EmptyPrompt from '@components/EmptyPrompt/EmptyPrompt'; +import FinalResultsButtons from '@components/ViewResults/ViewResultsButtons'; +import RoutePaths from '@Constants/routePaths'; + +const ViewResultsPage = () => { + const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); + const docs = useGlobalStore((state) => state.docs); + + if (Object.keys(ingestPipeline).length <= 0) { + return ( + + ); + } + return ( + + + + + + + + ); +}; + +export default ViewResultsPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx b/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx new file mode 100644 index 0000000000000..5fda4e9a684eb --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx @@ -0,0 +1,11 @@ +import { EuiPageSection } from '@elastic/eui'; + +const MainPage = () => { + return ( + +

Mainpage Test

+
+ ); +}; + +export default MainPage; diff --git a/x-pack/plugins/integration_assistant/public/plugin.tsx b/x-pack/plugins/integration_assistant/public/plugin.tsx new file mode 100644 index 0000000000000..dbfa414e48681 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/plugin.tsx @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { CoreStart, Plugin, CoreSetup, AppMountParameters } from '@kbn/core/public'; +import { getServices } from './services'; + +import { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; + +export class IntegrationAssistantPlugin + implements Plugin +{ + public setup(core: CoreSetup): IntegrationAssistantPluginSetup { + core.application.register({ + id: 'integrationAssistant', + title: 'Integration Assistant', + async mount(params: AppMountParameters) { + const [coreStart] = await core.getStartServices(); + const startServices = getServices(coreStart); + const { renderApp } = await import('./app'); + return renderApp(startServices, params.element); + }, + }); + return {}; + } + + public start(core: CoreStart) { + return {}; + } + + public stop() { } +} diff --git a/x-pack/plugins/integration_assistant/public/post_message_example.tsx b/x-pack/plugins/integration_assistant/public/post_message_example.tsx new file mode 100644 index 0000000000000..c60a41ca6fe81 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/post_message_example.tsx @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import React, { useCallback } from 'react'; +import { useState } from 'react'; +import { + EuiText, + EuiButton, + EuiLoadingSpinner, + EuiFieldText, + EuiCallOut, + EuiFormRow, + EuiTextArea, +} from '@elastic/eui'; +import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; +import { Services } from './services'; + +interface Props { + postMessage: Services['postMessage']; + addSuccessToast: Services['addSuccessToast']; +} + +export function PostMessageRouteExample({ postMessage, addSuccessToast }: Props) { + const [error, setError] = useState(); + const [isPosting, setIsPosting] = useState(false); + const [message, setMessage] = useState(''); + const [id, setId] = useState(''); + + const doFetch = useCallback(async () => { + if (isPosting) return; + setIsPosting(true); + const response = await postMessage(message, id); + + if (response && isHttpFetchError(response)) { + setError(response); + } else { + setError(undefined); + addSuccessToast('Message was added!'); + setMessage(''); + setId(''); + } + + setIsPosting(false); + }, [isPosting, postMessage, addSuccessToast, setMessage, message, id]); + + return ( + + +

POST example with body

+

+ This examples uses a simple POST route that takes a body parameter and an id as a param in + the route path. +

+ + setId(e.target.value)} + data-test-subj="routingExampleSetMessageId" + /> + + + setMessage(e.target.value)} + /> + + + + doFetch()} + > + {isPosting ? : 'Post message'} + + + + {error !== undefined ? ( + + {error.message} + + ) : null} +
+
+ ); +} diff --git a/x-pack/plugins/integration_assistant/public/random_number_between_example.tsx b/x-pack/plugins/integration_assistant/public/random_number_between_example.tsx new file mode 100644 index 0000000000000..68bea90fd88ff --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/random_number_between_example.tsx @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import React, { useCallback } from 'react'; +import { useState } from 'react'; +import { + EuiText, + EuiButton, + EuiLoadingSpinner, + EuiFieldText, + EuiCallOut, + EuiFormRow, +} from '@elastic/eui'; +import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; +import { Services } from './services'; + +interface Props { + fetchRandomNumberBetween: Services['fetchRandomNumberBetween']; +} + +export function RandomNumberBetweenRouteExample({ fetchRandomNumberBetween }: Props) { + const [error, setError] = useState(); + const [randomNumber, setRandomNumber] = useState(0); + const [isFetching, setIsFetching] = useState(false); + const [maxInput, setMaxInput] = useState('10'); + + const doFetch = useCallback(async () => { + if (isFetching) return; + setIsFetching(true); + const response = await fetchRandomNumberBetween(Number.parseInt(maxInput, 10)); + + if (isHttpFetchError(response)) { + setError(response); + } else { + setRandomNumber(response); + } + + setIsFetching(false); + }, [isFetching, maxInput, fetchRandomNumberBetween]); + + return ( + + +

GET example with query

+

+ This examples uses a simple GET route that takes a query parameter in the request and + returns a single number. +

+ + setMaxInput(e.target.value)} + isInvalid={isNaN(Number(maxInput))} + /> + + + + doFetch()} + > + {isFetching ? : 'Generate random number'} + + + + {error !== undefined ? ( + + {error.message} + + ) : null} + {randomNumber > -1 ? ( +

+ Random number is +
{randomNumber}
+

+ ) : null} +
+
+ ); +} diff --git a/x-pack/plugins/integration_assistant/public/random_number_example.tsx b/x-pack/plugins/integration_assistant/public/random_number_example.tsx new file mode 100644 index 0000000000000..8ae118a722b99 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/random_number_example.tsx @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import React, { useCallback } from 'react'; +import { useState } from 'react'; +import { EuiText, EuiButton, EuiLoadingSpinner, EuiCallOut } from '@elastic/eui'; +import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; +import { Services } from './services'; + +interface Props { + fetchRandomNumber: Services['fetchRandomNumber']; +} + +export function RandomNumberRouteExample({ fetchRandomNumber }: Props) { + const [error, setError] = useState(undefined); + const [randomNumber, setRandomNumber] = useState(0); + const [isFetching, setIsFetching] = useState(false); + + const doFetch = useCallback(async () => { + if (isFetching) return; + setIsFetching(true); + const response = await fetchRandomNumber(); + + if (isHttpFetchError(response)) { + setError(response); + } else { + setRandomNumber(response); + } + + setIsFetching(false); + }, [isFetching, fetchRandomNumber]); + + return ( + + +

GET example

+

+ This examples uses a simple GET route that takes no parameters or body in the request and + returns a single number. +

+ doFetch()} + > + {isFetching ? : 'Generate a random number'} + + + {error !== undefined ? ( + + {error} + + ) : null} + {randomNumber > -1 ? ( +

+ Random number is
{randomNumber}
+

+ ) : null} +
+
+ ); +} diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts new file mode 100644 index 0000000000000..d98791ff5c240 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/services.ts @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import type { CoreStart } from '@kbn/core/public'; +import type { IHttpFetchError } from '@kbn/core-http-browser'; +import { + RANDOM_NUMBER_ROUTE_PATH, + RANDOM_NUMBER_BETWEEN_ROUTE_PATH, + POST_MESSAGE_ROUTE_PATH, + INTERNAL_GET_MESSAGE_BY_ID_ROUTE, +} from '../common'; + +export interface Services { + fetchRandomNumber: () => Promise; + fetchRandomNumberBetween: (max: number) => Promise; + postMessage: (message: string, id: string) => Promise; + getMessageById: (id: string) => Promise; + addSuccessToast: (message: string) => void; +} + +export function getServices(core: CoreStart): Services { + return { + addSuccessToast: (message: string) => core.notifications.toasts.addSuccess(message), + fetchRandomNumber: async () => { + try { + const response = await core.http.fetch<{ randomNumber: number }>(RANDOM_NUMBER_ROUTE_PATH); + return response.randomNumber; + } catch (e) { + return e; + } + }, + fetchRandomNumberBetween: async (max: number) => { + try { + const response = await core.http.fetch<{ randomNumber: number }>( + RANDOM_NUMBER_BETWEEN_ROUTE_PATH, + { query: { max } } + ); + return response.randomNumber; + } catch (e) { + return e; + } + }, + postMessage: async (message: string, id: string) => { + try { + await core.http.post(`${POST_MESSAGE_ROUTE_PATH}/${id}`, { + body: JSON.stringify({ message }), + }); + } catch (e) { + return e; + } + }, + getMessageById: async (id: string) => { + try { + const response = await core.http.get<{ message: string }>( + `${INTERNAL_GET_MESSAGE_BY_ID_ROUTE}/${id}` + ); + return response.message; + } catch (e) { + return e; + } + }, + }; +} diff --git a/x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx b/x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx new file mode 100644 index 0000000000000..909ef95706dd0 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx @@ -0,0 +1,184 @@ +import { StateCreator } from 'zustand'; +import { mergeDeeply } from '@Utils/samples'; + +const initialIntegrationBuilderStepsState = { + integrationBuilderStep1: 'current', + integrationBuilderStep2: 'incomplete', + integrationBuilderStep3: 'incomplete', + integrationBuilderStep4: 'incomplete', + integrationBuilderStep5: 'incomplete', +}; + +export const integrationBuilderStepsState: StateCreator< + IntegrationBuilderStepsState, + [['zustand/devtools', never]], + [], + IntegrationBuilderStepsState +> = (set): IntegrationBuilderStepsState => ({ + ...initialIntegrationBuilderStepsState, + setIntegrationBuilderStepsState: (key, value) => set(() => ({ [key]: value })), + resetIntegrationBuilderStepsState: () => set(() => ({ ...initialIntegrationBuilderStepsState })), +}); + +const initialEcsMappingFormState = { + packageName: '', + packageTitle: '', + packageVersion: '0.1.0', + dataStreamName: '', + dataStreamTitle: '', + logFormat: 'json', + inputTypes: [], + formSamples: [], + sampleCount: 0, + uniqueKeysCount: 0, +}; + +export const ecsMappingFormState: StateCreator< + EcsMappingFormState, + [['zustand/devtools', never]], + [], + EcsMappingFormState +> = (set): EcsMappingFormState => ({ + ...initialEcsMappingFormState, + addFormSamples: (value) => + set((state) => { + // New formSamples after adding the new valid samples + const newFormSamples = [...state.formSamples, ...value]; + + // Calculate sampleCount as the length of newFormSamples + const newSampleCount = newFormSamples.length; + const rawObjects = newFormSamples.map((line) => JSON.parse(line)); + // Calculate uniqueKeysCount by merging all objects and counting the keys + const mergedObject = mergeDeeply(rawObjects); + const newUniqueKeysCount = Object.keys(mergedObject).length; + + return { + formSamples: newFormSamples, + sampleCount: newSampleCount, + uniqueKeysCount: newUniqueKeysCount, + }; + }), + setSampleCount: (value) => set(() => ({ sampleCount: value })), + setUniqueKeysCount: (value) => set(() => ({ uniqueKeysCount: value })), + setEcsMappingFormValue: (key, value) => set(() => ({ [key]: value })), + setEcsMappingFormArrayValue: (key, value) => set(() => ({ [key]: value })), + resetEcsMappingFormState: () => set(() => ({ ...initialEcsMappingFormState })), +}); + +const initialIntegrationBuilderChainItemsState = { + mapping: {}, + ingestPipeline: {}, + docs: [], + integrationBuilderZipFile: null, +}; + +export const integrationBuilderChainItemsState: StateCreator< + IntegrationBuilderChainItemsState, + [['zustand/devtools', never]], + [], + IntegrationBuilderChainItemsState +> = (set): IntegrationBuilderChainItemsState => ({ + ...initialIntegrationBuilderChainItemsState, + setIntegrationBuilderZipFile: (file) => set(() => ({ integrationBuilderZipFile: file })), + setIntegrationBuilderChainItemsState(key, value) { + set(() => ({ [key]: value })); + }, + updateChainItem: (path, newValue, itemType) => + set((state) => { + const keys = path.split('.'); + const lastKey = keys.pop(); + const lastObj = keys.reduce((acc, key) => (acc[key] = acc[key] || {}), state[itemType]); + if (lastKey) { + lastObj[lastKey] = newValue; + } + return { [itemType]: { ...state[itemType] } }; + }), + resetChainItemsState: () => set(() => ({ ...initialIntegrationBuilderChainItemsState })), +}); + +const initialEcsMappingTableState = { + ecsMappingTablePopoverState: {}, + ecsMappingTableState: [], + ecsMappingTableItemsWithEcs: 0, +}; + +export const ecsMappingTableState: StateCreator< + EcsMappingTableState, + [['zustand/devtools', never]], + [], + EcsMappingTableState +> = (set): EcsMappingTableState => ({ + ...initialEcsMappingTableState, + setEcsMappingTableItemsWithEcs: (value) => set(() => ({ ecsMappingTableItemsWithEcs: value })), + setEcsMappingTablePopoverState: (identifier) => + set((state) => ({ + ecsMappingTablePopoverState: { + ...state.ecsMappingTablePopoverState, + [identifier]: !state.ecsMappingTablePopoverState[identifier], + }, + })), + setEcsMappingTableState: (value) => set(() => ({ ecsMappingTableState: value })), + updateEcsMappingTableItem: (id, newDestinationField) => + set((state) => { + const updatedTableState = state.ecsMappingTableState.map((item) => { + if (item.id === id) { + return { ...item, destinationField: newDestinationField }; + } + return item; + }); + + return { ecsMappingTableState: updatedTableState }; + }), + resetEcsMappingTableState: () => set(() => ({ ...initialEcsMappingTableState })), +}); + +const initialIntegrationBuilderContinueState = { + ecsButtonContinue: false, + categorizationButtonContinue: false, + relatedButtonContinue: false, +}; + +export const integrationBuilderContinueState: StateCreator< + IntegrationBuilderContinueState, + [['zustand/devtools', never]], + [], + IntegrationBuilderContinueState +> = (set): IntegrationBuilderContinueState => ({ + ...initialIntegrationBuilderContinueState, + setContinueButtonState: (key, value) => set(() => ({ [key]: value })), + resetContinueButtonState: () => set(() => ({ ...initialIntegrationBuilderContinueState })), +}); + +const initialIntegrationBuilderIsLoadingState = { + relatedIsLoading: false, + categorizationIsLoading: false, + ecsMappingIsLoading: false, +}; + +export const integrationBuilderIsLoadingState: StateCreator< + IntegrationBuilderIsLoadingState, + [['zustand/devtools', never]], + [], + IntegrationBuilderIsLoadingState +> = (set): IntegrationBuilderIsLoadingState => ({ + ...initialIntegrationBuilderIsLoadingState, + setIsLoadingState: (key, value) => set(() => ({ [key]: value })), + resetIsLoadingState: () => set(() => ({ ...initialIntegrationBuilderIsLoadingState })), +}); + +const initialIntegrationBuilderHeaderState = { + isPortalLoading: false, + integrationBuilderHeaderTitle: "", +}; + +export const integrationBuilderHeaderState: StateCreator< + IntegrationBuilderHeaderState, + [['zustand/devtools', never]], + [], + IntegrationBuilderHeaderState +> = (set): IntegrationBuilderHeaderState => ({ + ...initialIntegrationBuilderHeaderState, + setIsPortalLoadingState: (value) => set(() => ({ isPortalLoading: value })), + setIntegrationBuilderHeaderTitle: (value) => set(() => ({ integrationBuilderHeaderTitle: value })), + resetIntegrationBuilderHeaderState: () => set(() => ({ ...initialIntegrationBuilderHeaderState })), +}); \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx b/x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx new file mode 100644 index 0000000000000..f2b8d78f5c2ce --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx @@ -0,0 +1,10 @@ +import { StateCreator } from 'zustand'; + +export const sideNavState: StateCreator = ( + set, +): SideNavState => ({ + selected: '', + setSelected: (value) => { + set(() => ({ selected: value })); + }, +}); diff --git a/x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx b/x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx new file mode 100644 index 0000000000000..086c42a94133f --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx @@ -0,0 +1,35 @@ +import { devtools } from 'zustand/middleware'; +import { create } from 'zustand'; +import { + integrationBuilderStepsState, + ecsMappingFormState, + integrationBuilderChainItemsState, + ecsMappingTableState, + integrationBuilderContinueState, + integrationBuilderIsLoadingState, + integrationBuilderHeaderState, +} from '@Stores/integrationBuilderStore'; + +import { sideNavState } from '@Stores/sideNavStore'; + +export const useGlobalStore = create< + IntegrationBuilderStepsState & + EcsMappingFormState & + IntegrationBuilderChainItemsState & + EcsMappingTableState & + IntegrationBuilderContinueState & + IntegrationBuilderIsLoadingState & + IntegrationBuilderHeaderState & + SideNavState +>()( + devtools((...a) => ({ + ...integrationBuilderStepsState(...a), + ...ecsMappingFormState(...a), + ...integrationBuilderChainItemsState(...a), + ...ecsMappingTableState(...a), + ...integrationBuilderContinueState(...a), + ...integrationBuilderIsLoadingState(...a), + ...integrationBuilderHeaderState(...a), + ...sideNavState(...a), + })), +); diff --git a/x-pack/plugins/integration_assistant/public/types.ts b/x-pack/plugins/integration_assistant/public/types.ts new file mode 100644 index 0000000000000..8c984dee240bb --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/types.ts @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { NavigationPublicPluginStart } from '@kbn/navigation-plugin/public'; +export interface IntegrationAssistantPluginSetup { + getGreeting: () => string; +} +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IntegrationAssistantPluginStart {} + +export interface AppPluginStartDependencies { + navigation: NavigationPublicPluginStart; +} diff --git a/x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx b/x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx new file mode 100644 index 0000000000000..71052ae5f92ab --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx @@ -0,0 +1,37 @@ +interface BuildIntegrationAPIRequest { + packageName: string; + packageTitle: string; + packageVersion: string; + dataStreamName: string; + inputTypes: string[]; + formSamples: string[]; + ingestPipeline: object; + docs: Array; +} + +interface EcsMappingAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; +} + +interface EcsMappingNewPipelineAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + mapping: object; +} + +interface CategorizationAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} + +interface RelatedAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx b/x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx new file mode 100644 index 0000000000000..f3b0c02395a3a --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx @@ -0,0 +1,20 @@ +type CategorizationApiResponse = { + results: { + pipeline: object; + docs: Array; + }; +}; + +type RelatedApiResponse = { + results: { + pipeline: object; + docs: Array; + }; +}; + +type EcsMappingApiResponse = { + results: { + mapping: object; + current_pipeline: object; + }; +}; \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx b/x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx new file mode 100644 index 0000000000000..34e70fbdbbeee --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx @@ -0,0 +1,83 @@ +type EcsMappingTableItem = { + sourceField: string; + destinationField: string; + isEcs: boolean; + description: string; + id: string; + exampleValue: any; +}; + +interface EcsMappingFormState { + packageName: string; + packageTitle: string; + packageVersion: string; + dataStreamName: string; + dataStreamTitle: string; + logFormat: string; + inputTypes: string[]; + formSamples: string[]; + sampleCount: number; + uniqueKeysCount: number; + addFormSamples: (samples: string[]) => void; + setSampleCount: (value: number) => void; + setUniqueKeysCount: (value: number) => void; + setEcsMappingFormValue: (key: string, value: string) => void; + setEcsMappingFormArrayValue: (key: string, value: string[]) => void; + resetEcsMappingFormState: () => void; +} + +interface EcsMappingTableState { + ecsMappingTableState: EcsMappingTableItem[]; + ecsMappingTablePopoverState: {}; + ecsMappingTableItemsWithEcs: number; + setEcsMappingTableItemsWithEcs: (value: number) => void; + setEcsMappingTablePopoverState: (identifier: string) => void; + setEcsMappingTableState: (value: EcsMappingTableItem[]) => void; + updateEcsMappingTableItem: (id: string, newDestinationField: string) => void; + resetEcsMappingTableState: () => void; +} + +interface IntegrationBuilderStepsState { + integrationBuilderStep1: string; + integrationBuilderStep2: string; + integrationBuilderStep3: string; + integrationBuilderStep4: string; + integrationBuilderStep5: string; + setIntegrationBuilderStepsState: (key: string, value: string) => void; + resetIntegrationBuilderStepsState: () => void; +} + +interface IntegrationBuilderContinueState { + ecsButtonContinue: boolean, + relatedButtonContinue: boolean, + categorizationButtonContinue: boolean, + setContinueButtonState: (key: string, value: boolean) => void; + resetContinueButtonState: () => void; +} + +interface IntegrationBuilderIsLoadingState { + relatedIsLoading: boolean, + ecsMappingIsLoading: boolean, + categorizationIsLoading: boolean, + setIsLoadingState: (key: string, value: boolean) => void; + resetIsLoadingState: () => void; +} + +interface IntegrationBuilderChainItemsState { + ingestPipeline: object; + docs: Array; + mapping: object; + integrationBuilderZipFile: File | null; + setIntegrationBuilderZipFile: (file: File) => void; + setIntegrationBuilderChainItemsState: (key: string, value: object) => void; + updateChainItem: (path: string, newValue: object, itemType: string) => void; + resetChainItemsState: () => void; +} + +interface IntegrationBuilderHeaderState { + integrationBuilderHeaderTitle: string; + isPortalLoading: boolean; + setIsPortalLoadingState: (value: boolean) => void; + setIntegrationBuilderHeaderTitle: (value: string) => void; + resetIntegrationBuilderHeaderState: () => void; +} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/types/SideNav.tsx b/x-pack/plugins/integration_assistant/public/types/SideNav.tsx new file mode 100644 index 0000000000000..cbca821ad1be6 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/types/SideNav.tsx @@ -0,0 +1,4 @@ +interface SideNavState { + selected: string; + setSelected: (value: string) => void; +} diff --git a/x-pack/plugins/integration_assistant/public/utils/samples.tsx b/x-pack/plugins/integration_assistant/public/utils/samples.tsx new file mode 100644 index 0000000000000..5b65be632b924 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/utils/samples.tsx @@ -0,0 +1,78 @@ +import { htmlIdGenerator } from '@elastic/eui'; +import { ECSFIELDS } from '@Constants/ecsFields'; + +function isEmptyValue(value): boolean { + return value === null || value === ''; +} + +function getExampleValueByPath(obj: object, path: string): object { + return path.split('.').reduce((acc, part) => acc && acc[part], obj); +} + +export function mergeDeeply(objects: object[], base?: object): object { + const result: object = base ?? {}; + + const merge = (target: object, source: object): object => { + Object.keys(source).forEach((key) => { + const targetValue = target[key]; + const sourceValue = source[key]; + + if (typeof sourceValue === 'object' && sourceValue !== null && !Array.isArray(sourceValue)) { + if (typeof targetValue !== 'object' || targetValue === null || isEmptyValue(targetValue)) { + target[key] = merge({}, sourceValue); + } else { + target[key] = merge(targetValue, sourceValue); + } + } else if ( + !Object.prototype.hasOwnProperty.call(target, key) || + (isEmptyValue(targetValue) && !isEmptyValue(sourceValue)) + ) { + target[key] = sourceValue; + } + }); + + return target; + }; + + objects.forEach((obj) => { + merge(result, obj); + }); + + return result; +} + +export function traverseAndMatchFields( + mapping: object, + mergedObject: object, + packageName: string, + dataStreamName: string, + path: string[] = [], +): EcsMappingTableItem[] { + const makeId = htmlIdGenerator(); + let matches: EcsMappingTableItem[] = []; + + Object.entries(mapping).forEach(([key, value]) => { + if (typeof value === 'object' && value !== null) { + matches = matches.concat( + traverseAndMatchFields(value, mergedObject, packageName, dataStreamName, path.concat(key)), + ); + } else { + const matchKey = value; + const isECS = ECSFIELDS.hasOwnProperty(matchKey); // eslint-disable-line no-prototype-builtins + const fullPath = path.concat(key).join('.'); + const exampleValue = getExampleValueByPath(mergedObject, fullPath); + const destinationField = isECS ? matchKey : `${packageName}.${dataStreamName}.${fullPath}`; + + matches.push({ + sourceField: fullPath, + destinationField: destinationField, + isEcs: isECS, + description: isECS ? ECSFIELDS[matchKey] : '', + id: makeId(), + exampleValue: exampleValue, + }); + } + }); + + return matches; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts new file mode 100644 index 0000000000000..327759b26bd90 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts @@ -0,0 +1,30 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { getModel } from '../../providers/bedrock'; +import { CategorizationState } from '../../types'; +import { combineProcessors } from '../../util/pipeline'; +import { Pipeline } from '../../../common/types'; +import { CATEGORIZATION_MAIN_PROMPT } from './prompts'; + +export async function handleCategorization(state: CategorizationState) { + const categorizationMainPrompt = CATEGORIZATION_MAIN_PROMPT; + const model = getModel(); + console.log('testing cat main'); + + const outputParser = new JsonOutputParser(); + const categorizationMainGraph = categorizationMainPrompt.pipe(model).pipe(outputParser); + + const currentProcessors = (await categorizationMainGraph.invoke({ + pipeline_results: JSON.stringify(state.pipelineResults, null, 2), + ex_answer: state?.exAnswer, + ecs_categories: state?.ecsCategories, + ecs_types: state?.ecsTypes, + })) as any[]; + + const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); + + return { + currentPipeline, + currentProcessors, + lastExecutedChain: 'categorization', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts new file mode 100644 index 0000000000000..2e679ab136dcb --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts @@ -0,0 +1,236 @@ +export const ECS_CATEGORIES = { + api: 'Covers events from API calls, including those from OS and network protocols. Allowed event.type combinations: access, admin, allowed, change, creation, deletion, denied, end, info, start, user', + authentication: + 'Focuses on login and credential verification processes. Allowed event.type combinations: start, end, info', + configuration: + 'Deals with application, process, or system settings changes. Allowed event.type combinations: access, change, creation, deletion, info', + database: + 'Relates to data storage systems, such as SQL or Elasticsearch. Allowed event.type combinations: access, change, info, error', + driver: + 'Involves OS device driver activities. Allowed event.type combinations: change, end, info, start', + email: 'Covers events from email messages and protocols. Allowed event.type combinations: info', + file: 'Related to file creation, access, and deletion. Allowed event.type combinations: access, change, creation, deletion, info', + host: 'Provides information about hosts, excluding activity on them. Allowed event.type combinations: access, change, end, info, start', + iam: 'Concerns users, groups, and administration events. Allowed event.type combinations: admin, change, creation, deletion, group, info, user', + intrusion_detection: + 'Detects intrusions from IDS/IPS systems. Allowed event.type combinations: allowed, denied, info', + library: + 'Refers to the loading of libraries into processes. Allowed event.type combinations: start', + malware: 'Focuses on malware detection events and alerts. Allowed event.type combinations: info', + network: + 'Captures all network-related activities. Allowed event.type combinations: access, allowed, connection, denied, end, info, protocol, start', + package: + 'Concerns software packages on hosts. Allowed event.type combinations: access, change, deletion, info, installation, start', + process: + 'Addresses process-specific details. Allowed event.type combinations: access, change, end, info, start', + registry: + 'Focuses on Windows registry settings. Allowed event.type combinations: access, change, creation, deletion', + session: + 'Relates to persistent connections to hosts/services. Allowed event.type combinations: start, end, info', + threat: + "Describes threat actors' intentions and behaviors. Allowed event.type combinations: indicator", + vulnerability: 'Pertain to vulnerability scan outcomes. Allowed event.type combinations: info', + web: 'Concerns web server access events. access, error, Allowed event.type combinations: info', +}; + +export const ECS_TYPES = { + access: 'Used to indicate something was accessed. Examples include accessing databases or files.', + admin: + 'Pertains to events related to admin objects, like administrative changes in IAM not tied to specific users or groups.', + allowed: + 'Indicates that a certain action or event was permitted, like firewall connections that were permitted.', + change: + 'Used for events indicating that something has changed, such as modifications in files or processes.', + connection: + 'Mainly for network-related events, capturing details sufficient for flow or connection analysis, like Netflow or IPFIX events.', + creation: 'Denotes that something was created. A typical example is file creation.', + deletion: 'Indicates that something was removed or deleted, for instance, file deletions.', + denied: + 'Refers to events where something was denied or blocked, such as a network connection that was blocked by a firewall.', + end: 'Suggests that something has concluded or ended, like a process.', + error: + 'Used for events that describe errors, but not errors during event ingestion. For instance, database errors.', + group: + 'Pertains to group-related events within categories, like creation or modification of user groups in IAM.', + indicator: + 'Represents events that contain indicators of compromise (IOCs), commonly associated with threat detection.', + info: "Denotes purely informational events that don't imply a state change or an action. For example, system information logs.", + installation: 'Indicates that something was installed, typically software or packages.', + protocol: + 'Used for events containing detailed protocol analysis, beyond just naming the protocol, especially in network events.', + start: 'Signals the commencement of something, such as a process.', + user: 'Relates to user-centric events within categories, like user creation or deletion in IAM.', +}; + +export const EVENT_TYPES = [ + 'access', + 'admin', + 'allowed', + 'change', + 'connection', + 'creation', + 'deletion', + 'denied', + 'end', + 'error', + 'group', + 'indicator', + 'info', + 'installation', + 'protocol', + 'start', + 'user', +]; + +export const EVENT_CATEGORIES = [ + 'api', + 'authentication', + 'configuration', + 'database', + 'driver', + 'email', + 'file', + 'host', + 'iam', + 'intrusion_detection', + 'library', + 'malware', + 'network', + 'package', + 'process', + 'registry', + 'session', + 'threat', + 'vulnerability', + 'web', +]; + +type EventCategories = + | 'api' + | 'authentication' + | 'configuration' + | 'database' + | 'driver' + | 'email' + | 'file' + | 'host' + | 'iam' + | 'intrusion_detection' + | 'library' + | 'network' + | 'package' + | 'process' + | 'registry' + | 'session' + | 'threat' + | 'user' + | 'vulnerability' + | 'web'; + +export const ECS_EVENT_TYPES_PER_CATEGORY: { + [key in EventCategories]: string[]; +} = { + api: [ + 'access', + 'admin', + 'allowed', + 'change', + 'creation', + 'deletion', + 'denied', + 'end', + 'info', + 'start', + 'user', + ], + authentication: ['start', 'end', 'info'], + configuration: ['access', 'change', 'creation', 'deletion', 'info'], + database: ['access', 'change', 'info', 'error'], + driver: ['change', 'end', 'info', 'start'], + email: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + file: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + host: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + iam: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + intrusion_detection: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + library: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + network: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + package: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + process: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + registry: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + session: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + threat: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + user: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + vulnerability: ['access', 'change', 'creation', 'deletion', 'info', 'start'], + web: ['access', 'change', 'creation', 'deletion', 'info', 'start'], +}; + +export const CATEGORIZATION_EXAMPLE_PROCESSORS = ` +If condition that determines if ctx.checkpoint?.operation is not of a specific value: +{ + "append": { + "field": "event.category", + "value": "network", + "allow_duplicates": false, + "if": "ctx.checkpoint?.operation != 'Log In'" + } +} + +If condition that determines if ctx.checkpoint?.operation is of a specific value: +{ + "append": { + "field": "event.category", + "value": "authentication", + "allow_duplicates": false, + "if": "ctx.checkpoint?.operation == 'Log In'" + } +} + +Appending multiple values when either the value Accept or Allow is found in ctx.checkpoint?.rule_action: +{ + "append": { + "field": "event.type", + "value": [ + "allowed", + "connection" + ], + "allow_duplicates": false, + "if": "['Accept', 'Allow'].contains(ctx.checkpoint?.rule_action)" + } +} +`; + +export const CATEGORIZATION_EXAMPLE_ANSWER = [ + { append: { field: 'event.type', value: ['access'] } }, + { + append: { + field: 'event.type', + value: ['allowed', 'connection'], + allow_duplicates: false, + if: "['Accept', 'Allow'].contains(ctx.checkpoint?.rule_action)", + }, + }, + { + append: { + field: 'event.category', + value: ['network'], + allow_duplicates: false, + if: "['Accept', 'Allow'].contains(ctx.checkpoint?.rule_action)", + }, + }, + { + append: { + field: 'event.type', + value: ['start'], + allow_duplicates: false, + if: "ctx.checkpoint?.operation == 'Log In'", + }, + }, + { + append: { + field: 'event.category', + value: ['authentication'], + allow_duplicates: false, + if: "ctx.checkpoint?.operation == 'Log In'", + }, + }, +]; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts new file mode 100644 index 0000000000000..225cc43edf884 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts @@ -0,0 +1,32 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { CATEGORIZATION_ERROR_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { CategorizationState } from '../../types'; +import { combineProcessors } from '../../util/pipeline'; +import { Pipeline } from '../../../common/types'; + +export async function handleErrors(state: CategorizationState) { + const categorizationErrorPrompt = CATEGORIZATION_ERROR_PROMPT; + const model = getModel(); + console.log('testing cat error'); + + const outputParser = new JsonOutputParser(); + const categorizationErrorGraph = categorizationErrorPrompt.pipe(model).pipe(outputParser); + + const currentProcessors = (await categorizationErrorGraph.invoke({ + current_processors: JSON.stringify(state.currentProcessors, null, 2), + ex_answer: state.exAnswer, + errors: JSON.stringify(state.errors, null, 2), + package_name: state.packageName, + data_stream_name: state.dataStreamName, + })) as any[]; + + const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); + + return { + currentPipeline, + currentProcessors, + reviewed: false, + lastExecutedChain: 'error', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts new file mode 100644 index 0000000000000..b2e174f0f9ffa --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -0,0 +1,175 @@ +import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; +import { CategorizationState } from '../../types'; +import { modifySamples, formatSamples } from '../../util/samples'; +import { handleCategorization } from './categorization'; +import { handleValidatePipeline } from '../../util/es'; +import { handleCategorizationValidation } from './validate'; +import { handleInvalidCategorization } from './invalid'; +import { handleErrors } from './errors'; +import { handleReview } from './review'; +import { CATEGORIZATION_EXAMPLE_ANSWER, ECS_CATEGORIES, ECS_TYPES } from './constants'; + +const graphState: StateGraphArgs['channels'] = { + lastExecutedChain: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + rawSamples: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + samples: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + formattedSamples: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + ecsTypes: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + ecsCategories: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + exAnswer: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + packageName: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + dataStreamName: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + finalized: { + value: (x: boolean, y?: boolean) => y ?? x, + default: () => false, + }, + reviewed: { + value: (x: boolean, y?: boolean) => y ?? x, + default: () => false, + }, + errors: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + pipelineResults: { + value: (x: object[], y?: object[]) => y ?? x, + default: () => [{}], + }, + currentMapping: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + currentPipeline: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + currentProcessors: { + value: (x: object[], y?: object[]) => y ?? x, + default: () => [], + }, + invalidCategorization: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + initialPipeline: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + results: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, +}; + +function modelInput(state: CategorizationState): Partial { + const samples = modifySamples(state); + const formattedSamples = formatSamples(samples); + const initialPipeline = JSON.parse(JSON.stringify(state.currentPipeline)); + return { + exAnswer: JSON.stringify(CATEGORIZATION_EXAMPLE_ANSWER, null, 2), + ecsCategories: JSON.stringify(ECS_CATEGORIES, null, 2), + ecsTypes: JSON.stringify(ECS_TYPES, null, 2), + samples, + formattedSamples, + initialPipeline, + finalized: false, + reviewed: false, + lastExecutedChain: 'modelInput', + }; +} + +function modelOutput(state: CategorizationState): Partial { + return { + finalized: true, + lastExecutedChain: 'modelOutput', + results: { + docs: state.pipelineResults, + pipeline: state.currentPipeline, + }, + }; +} + +function validationRouter(state: CategorizationState): string { + if (Object.keys(state.currentProcessors).length === 0) { + console.log('No current processors found'); + return 'categorization'; + } + return 'validateCategorization'; +} + +function chainRouter(state: CategorizationState): string { + if (Object.keys(state.errors).length > 0) { + return 'errors'; + } + if (Object.keys(state.invalidCategorization).length > 0) { + return 'invalidCategorization'; + } + if (!state.reviewed) { + return 'review'; + } + if (!state.finalized) { + return 'modelOutput'; + } + return END; +} + +export function getCategorizationGraph() { + const workflow = new StateGraph({ + channels: graphState, + }) + .addNode('modelInput', modelInput) + .addNode('modelOutput', modelOutput) + .addNode('handleCategorization', handleCategorization) + .addNode('handleValidatePipeline', handleValidatePipeline) + .addNode('handleCategorizationValidation', handleCategorizationValidation) + .addNode('handleInvalidCategorization', handleInvalidCategorization) + .addNode('handleErrors', handleErrors) + .addNode('handleReview', handleReview) + .addEdge(START, 'modelInput') + .addEdge('modelOutput', END) + .addEdge('modelInput', 'handleValidatePipeline') + .addEdge('handleCategorization', 'handleValidatePipeline') + .addEdge('handleInvalidCategorization', 'handleValidatePipeline') + .addEdge('handleErrors', 'handleValidatePipeline') + .addEdge('handleReview', 'handleValidatePipeline') + .addConditionalEdges('handleValidatePipeline', validationRouter, { + categorization: 'handleCategorization', + validateCategorization: 'handleCategorizationValidation', + }) + .addConditionalEdges('handleCategorizationValidation', chainRouter, { + modelOutput: 'modelOutput', + errors: 'handleErrors', + invalidCategorization: 'handleInvalidCategorization', + review: 'handleReview', + }); + + const compiledCategorizationGraph = workflow.compile(); + return compiledCategorizationGraph; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts new file mode 100644 index 0000000000000..a92a00b93ea4d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts @@ -0,0 +1 @@ +export { getCategorizationGraph } from './graph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts new file mode 100644 index 0000000000000..108e36af44d24 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -0,0 +1,32 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { CategorizationState } from '../../types'; +import { combineProcessors } from '../../util/pipeline'; +import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; +import { Pipeline } from '../../../common/types'; + +export async function handleInvalidCategorization(state: CategorizationState) { + const categorizationInvalidPrompt = CATEGORIZATION_VALIDATION_PROMPT; + const model = getModel(); + console.log('testing cat invalid'); + + const outputParser = new JsonOutputParser(); + const categorizationInvalidGraph = categorizationInvalidPrompt.pipe(model).pipe(outputParser); + + const currentProcessors = (await categorizationInvalidGraph.invoke({ + current_processors: JSON.stringify(state.currentProcessors, null, 2), + invalid_categorization: JSON.stringify(state.invalidCategorization, null, 2), + ex_answer: state.exAnswer, + compatible_types: JSON.stringify(ECS_EVENT_TYPES_PER_CATEGORY, null, 2), + })) as any[]; + + const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); + + return { + currentPipeline, + currentProcessors, + reviewed: false, + lastExecutedChain: 'invalidCategorization', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts new file mode 100644 index 0000000000000..af5066e8a0a7a --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts @@ -0,0 +1,195 @@ +import { ChatPromptTemplate } from '@langchain/core/prompts'; + +export const CATEGORIZATION_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on providing append processors that can be used to enrich samples with all relevant event.type and event.category values. + Here are some context for you to reference for your task, read it carefully as you will get questions about it later: + + + Event Category (event.category): + Purpose: It is the second level in the ECS category hierarchy, representing the primary category or "big bucket" for event classification. + Type: It's a keyword type and can have multiple values (list). + Relationship: Works alongside event.type, which acts as a subcategory. + Allowed categories and their descriptions: + {ecs_categories} + + Event Type (event.type): + Purpose: It is the third level in the ECS category hierarchy, represents a categorization "sub-bucket". + Type: It's a keyword type and can have multiple values (list). + Relationship: Works alongside event.category, which acts as a subcategory. + Allowed types and their descriptions: + {ecs_types} + + `, + ], + [ + 'human', + `Please help me by providing all relevant append processors for any detected event.category and event.type combinations that would fit the below pipeline results as an array of JSON objects. + + + {pipeline_results} + + + Go through each of the pipeline results above step by step and do the following to add all relevant event.type and event.category combinations. + 1. Try to understand what is unique about each pipeline result, and what sort of event.categorization and event.type combinations that fit best, and if there is any unique values for each result. + 2. For for each combination of event.category and event.type that you find, add a new append processor to your array of JSON objects. + 3. If only certain results are relevant to the event.category and event.type combination, add an if condition similar to the above example processors, that describes what value or field needs to be available for this categorization to take place. The if condition should be inside the processor object. + 4. Always check if the combination of event.category and event.type is common in the ecs context above. + 5. Always make sure the value for event.category and event.type is strictly from the allowed categories and allowed types in the ecs context above. + 6. The value argument for the append processor is an array of one or more types and categories. + + You ALWAYS follow these guidelines when writing your response: + + - You can add as many append processors you need to cover all the unique combinations that you detected. + - If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. + - When an if condition is not needed the argument should not be used for the processor object. + - When using a range based if condition like > 0, you first need to check that the field is not null, for example: ctx.somefield?.production != null && ctx.somefield?.production > 0 + - Do not respond with anything except the array of processors as a valid JSON objects enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the Categorization processors below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the Categorization processors below:'], +]); + +export const CATEGORIZATION_REVIEW_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on adding improvements to the provided array of processors and reviewing the current results. + + Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + + {current_processors} + + + {compatibility_matrix} + + `, + ], + [ + 'human', + `Testing my current pipeline returned me with the results: + + {pipeline_results} + + + Please review the pipeline results and the array of current processors, ensuring to identify all the possible event.type and event.category combinatinations that would match each pipeline result document. If any event.type or event.category is missing from any of the pipeline results, add them by updating the array of current processors and return the whole updated array of processors. + + For each pipeline result you review step by step, remember the below steps: + 1. Check if each of the pipeline results have at least one event.category and event.type added to them. If not then try to correlate the results with the current processors and see if either a new append processor should be added to the list with a matching if condition, or if any of the if conditions should be modified as they are not matching that is in the results. + 2. If the results have at least one event.category and event.type value, see if more of them could match, if so it could be added to the relevant append processor which added the initial values. + 3. When adding more values to event.type and event.category please keep in mind the compatibility_matrix in the context to make sure only compatible event.type , event.category pairs that are compatible are created. + 4. Ensure that all append processors has allow_duplicates: false, as seen in the example response. + + You ALWAYS follow these guidelines when writing your response: + + - You can use as many append processors as you need to add all relevant ECS categories and types combinations. + - If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. + - When an if condition is not needed the argument should not be used for the processor object. + - If not updates are needed you respond with the initially provided current processors. + - Each append processor needs to have the allow_duplicates: false argument, as shown in the below example response. + - Do not respond with anything except updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the updated ECS categorization append processors below: + \`\`\` + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the updated ECS categorization append processors below:'], +]); + +export const CATEGORIZATION_VALIDATION_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on resolving errors and issues with append processors used for categorization. + Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + + {current_processors} + + + {compatible_types} + + + {invalid_categorization} + + "], + ["human", "Please go through each error above, carefully review the provided current processors, and resolve the most likely cause to the supplied error by returning an updated version of the current_processors. + + Follow these steps to help resolve the current ingest pipeline issues: + 1. Try to fix all related errors before responding. + 2. Apply all fixes to the provided array of current append processors. + 3. If you do not know how to fix an error, then continue to the next and return the complete updated array of current append processors. + + You ALWAYS follow these guidelines when writing your response: + + - If the error complains about having event.type or event.category not in the allowed values , fix the corresponding append processors to use the allowed values mentioned in the error. + - If the error is about event.type not compatible with any event.category, please refer to the 'compatible_types' in the context to fix the corresponding append processors to use valid combination of event.type and event.category + - Do not respond with anything except the complete updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the updated ECS categorization append processors below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the updated ECS categorization append processors below:'], +]); + +export const CATEGORIZATION_ERROR_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on resolving errors and issues with append processors used for categorization. + Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + + {current_processors} + + + {errors} + + `, + ], + [ + 'human', + `Please go through each error above, carefully review the provided current processors, and resolve the most likely cause to the supplied error by returning an updated version of the current_processors. + + Follow these steps to help resolve the current ingest pipeline issues: + 1. Try to fix all related errors before responding. + 2. Apply all fixes to the provided array of current append processors. + 3. If you do not know how to fix an error, then continue to the next and return the complete updated array of current append processors. + + You ALWAYS follow these guidelines when writing your response: + + - When checking for the existance of multiple values in a single variable, use this format: "if": "['value1', 'value2'].contains(ctx.{package_name}?.{data_stream_name}?.field)" + - If conditions should never be in a format like "if": "true". If it exist in the current array of append processors, remove only the redundant if condition. + - If the error complains that it is a null point exception, always ensure the if conditions uses a ? when accessing nested fields. For example ctx.field1?.nestedfield1?.nestedfield2. + - If the error complains about having values not in the list of allowed values , fix the corresponding append processors to use the allowed values as mentioned in the error. + - Do not respond with anything except the complete updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the updated ECS categorization append processors below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the updated ECS categorization append processors below:'], +]); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts new file mode 100644 index 0000000000000..cf317043be3c0 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts @@ -0,0 +1,33 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { CATEGORIZATION_REVIEW_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { CategorizationState } from '../../types'; +import { combineProcessors } from '../../util/pipeline'; +import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; +import { Pipeline } from '../../../common/types'; + +export async function handleReview(state: CategorizationState) { + const categorizationReviewPrompt = CATEGORIZATION_REVIEW_PROMPT; + const model = getModel(); + console.log('testing cat review'); + + const outputParser = new JsonOutputParser(); + const categorizationReview = categorizationReviewPrompt.pipe(model).pipe(outputParser); + + const currentProcessors = (await categorizationReview.invoke({ + current_processors: JSON.stringify(state.currentProcessors, null, 2), + pipeline_results: JSON.stringify(state.pipelineResults, null, 2), + ex_answer: state?.exAnswer, + package_name: state?.packageName, + compatibility_matrix: JSON.stringify(ECS_EVENT_TYPES_PER_CATEGORY, null, 2), + })) as any[]; + + const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); + + return { + currentPipeline, + currentProcessors, + reviewed: true, + lastExecutedChain: 'review', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts new file mode 100644 index 0000000000000..9f79fb20c393f --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts @@ -0,0 +1,139 @@ +import { CategorizationState } from '../../types'; +import { ECS_EVENT_TYPES_PER_CATEGORY, EVENT_CATEGORIES, EVENT_TYPES } from './constants'; + +interface Event { + type?: string[]; + category?: string[]; +} + +interface PipelineResult { + event?: Event; +} + +interface ErrorMessage { + error: string; +} + +export function handleCategorizationValidation(state: CategorizationState): { + invalidCategorization: ErrorMessage[]; + lastExecutedChain: string; +} { + const errors: ErrorMessage[] = []; + const pipelineResults = state.pipelineResults as PipelineResult[]; + + // Loops through the pipeline results to find invalid categories and types + for (const doc of pipelineResults) { + let types: string[] = []; + let categories: string[] = []; + if (doc?.event?.type) { + types = doc.event.type; + } + if (doc?.event?.category) { + categories = doc.event.category; + } + + const invalidCategories = findInvalidCategories(categories); + const invalidTypes = findInvalidTypes(types); + + if (invalidCategories.length > 0) { + errors.push(createErrorMessage('event.category', invalidCategories, EVENT_CATEGORIES)); + } + + if (invalidTypes.length > 0) { + errors.push(createErrorMessage('event.type', invalidTypes, EVENT_TYPES)); + } + + // Compatibility check is done only on valid categories and types + const validCategories = categories.filter((x) => !invalidCategories.includes(x)); + const validTypes = types.filter((x) => !invalidTypes.includes(x)); + + const compatibleErrors = getTypeCategoryIncompatibleError(validCategories, validTypes); + for (const ce of compatibleErrors) { + errors.push(ce); + } + } + + return { + invalidCategorization: errors, + lastExecutedChain: 'handleCategorizationValidation', + }; +} + +function createErrorMessage( + field: string, + errorList: string[], + allowedValues: string[] +): ErrorMessage { + return { + error: `field ${field}'s values (${errorList.join( + ', ' + )}) is not one of the allowed values (${allowedValues.join(', ')})`, + }; +} + +function findInvalidCategories(categories: string[]): string[] { + const invalidCategories: string[] = []; + for (const c of categories) { + if (!EVENT_CATEGORIES.includes(c)) { + invalidCategories.push(c); + } + } + return invalidCategories; +} + +function findInvalidTypes(types: string[]): string[] { + const invalidTypes: string[] = []; + for (const t of types) { + if (!EVENT_TYPES.includes(t)) { + invalidTypes.push(t); + } + } + return invalidTypes; +} + +type EventCategories = + | 'api' + | 'authentication' + | 'configuration' + | 'database' + | 'driver' + | 'email' + | 'file' + | 'host' + | 'iam' + | 'intrusion_detection' + | 'library' + | 'network' + | 'package' + | 'process' + | 'registry' + | 'session' + | 'threat' + | 'user' + | 'vulnerability' + | 'web'; + +function getTypeCategoryIncompatibleError(categories: string[], types: string[]): ErrorMessage[] { + const errors: ErrorMessage[] = []; + let unmatchedTypes = new Set(types); + const matchCategories = new Set(categories); + let categoryExists = false; + + for (const c of matchCategories) { + if (c in ECS_EVENT_TYPES_PER_CATEGORY) { + categoryExists = true; + const matchTypes = new Set(ECS_EVENT_TYPES_PER_CATEGORY[c as EventCategories]); + unmatchedTypes = new Set([...unmatchedTypes].filter((x) => !matchTypes.has(x))); + } + } + + if (categoryExists && unmatchedTypes.size > 0) { + errors.push({ + error: `event.type (${[...unmatchedTypes].join( + ', ' + )}) not compatible with any of the event.category (${[...matchCategories].join(', ')})`, + }); + } + + return errors; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts new file mode 100644 index 0000000000000..71c71344c0f37 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts @@ -0,0 +1,3863 @@ +interface EcsFields { + [key: string]: any; +} + +export const ECS_FULL: EcsFields = { + '@timestamp': 'Date/time when the event originated.', + 'agent.build.original': 'Extended build information for the agent.', + 'agent.ephemeral_id': 'Ephemeral identifier of this agent.', + 'agent.id': 'Unique identifier of this agent.', + 'agent.name': 'Custom name of the agent.', + 'agent.type': 'Type of the agent.', + 'agent.version': 'Version of the agent.', + 'client.address': 'Client network address.', + 'client.as.number': 'Unique number allocated to the autonomous system.', + 'client.as.organization.name': 'Organization name.', + 'client.bytes': 'Bytes sent from the client to the server.', + 'client.domain': 'The domain name of the client.', + 'client.geo.city_name': 'City name.', + 'client.geo.continent_code': 'Continent code.', + 'client.geo.continent_name': 'Name of the continent.', + 'client.geo.country_iso_code': 'Country ISO code.', + 'client.geo.country_name': 'Country name.', + 'client.geo.location': 'Longitude and latitude.', + 'client.geo.name': 'User-defined description of a location.', + 'client.geo.postal_code': 'Postal code.', + 'client.geo.region_iso_code': 'Region ISO code.', + 'client.geo.region_name': 'Region name.', + 'client.geo.timezone': 'Time zone.', + 'client.ip': 'IP address of the client.', + 'client.mac': 'MAC address of the client.', + 'client.nat.ip': 'Client NAT ip address', + 'client.nat.port': 'Client NAT port', + 'client.packets': 'Packets sent from the client to the server.', + 'client.port': 'Port of the client.', + 'client.registered_domain': 'The highest registered client domain, stripped of the subdomain.', + 'client.subdomain': 'The subdomain of the domain.', + 'client.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'client.user.domain': 'Name of the directory the user is a member of.', + 'client.user.email': 'User email address.', + 'client.user.full_name': 'Users full name, if available.', + 'client.user.group.domain': 'Name of the directory the group is a member of.', + 'client.user.group.id': 'Unique identifier for the group on the system/platform.', + 'client.user.group.name': 'Name of the group.', + 'client.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'client.user.id': 'Unique identifier of the user.', + 'client.user.name': 'Short name or login of the user.', + 'client.user.roles': 'Array of user roles at the time of the event.', + 'cloud.account.id': 'The cloud account or organization id.', + 'cloud.account.name': 'The cloud account name.', + 'cloud.availability_zone': + 'Availability zone in which this host, resource, or service is located.', + 'cloud.instance.id': 'Instance ID of the host machine.', + 'cloud.instance.name': 'Instance name of the host machine.', + 'cloud.machine.type': 'Machine type of the host machine.', + 'cloud.origin.account.id': 'The cloud account or organization id.', + 'cloud.origin.account.name': 'The cloud account name.', + 'cloud.origin.availability_zone': + 'Availability zone in which this host, resource, or service is located.', + 'cloud.origin.instance.id': 'Instance ID of the host machine.', + 'cloud.origin.instance.name': 'Instance name of the host machine.', + 'cloud.origin.machine.type': 'Machine type of the host machine.', + 'cloud.origin.project.id': 'The cloud project id.', + 'cloud.origin.project.name': 'The cloud project name.', + 'cloud.origin.provider': 'Name of the cloud provider.', + 'cloud.origin.region': 'Region in which this host, resource, or service is located.', + 'cloud.origin.service.name': 'The cloud service name.', + 'cloud.project.id': 'The cloud project id.', + 'cloud.project.name': 'The cloud project name.', + 'cloud.provider': 'Name of the cloud provider.', + 'cloud.region': 'Region in which this host, resource, or service is located.', + 'cloud.service.name': 'The cloud service name.', + 'cloud.target.account.id': 'The cloud account or organization id.', + 'cloud.target.account.name': 'The cloud account name.', + 'cloud.target.availability_zone': + 'Availability zone in which this host, resource, or service is located.', + 'cloud.target.instance.id': 'Instance ID of the host machine.', + 'cloud.target.instance.name': 'Instance name of the host machine.', + 'cloud.target.machine.type': 'Machine type of the host machine.', + 'cloud.target.project.id': 'The cloud project id.', + 'cloud.target.project.name': 'The cloud project name.', + 'cloud.target.provider': 'Name of the cloud provider.', + 'cloud.target.region': 'Region in which this host, resource, or service is located.', + 'cloud.target.service.name': 'The cloud service name.', + 'container.cpu.usage': 'Percent CPU used, between 0 and 1.', + 'container.disk.read.bytes': 'The number of bytes read by all disks.', + 'container.disk.write.bytes': 'The number of bytes written on all disks.', + 'container.id': 'Unique container id.', + 'container.image.hash.all': 'An array of digests of the image the container was built on.', + 'container.image.name': 'Name of the image the container was built on.', + 'container.image.tag': 'Container image tags.', + 'container.labels': 'Image labels.', + 'container.memory.usage': 'Percent memory used, between 0 and 1.', + 'container.name': 'Container name.', + 'container.network.egress.bytes': 'The number of bytes sent on all network interfaces.', + 'container.network.ingress.bytes': 'The number of bytes received on all network interfaces.', + 'container.runtime': 'Runtime managing this container.', + 'container.security_context.privileged': + 'Indicates whether the container is running in privileged mode.', + 'data_stream.dataset': + 'The field can contain anything that makes sense to signify the source of the data.', + 'data_stream.namespace': + 'A user defined namespace. Namespaces are useful to allow grouping of data.', + 'data_stream.type': 'An overarching type for the data stream.', + 'destination.address': 'Destination network address.', + 'destination.as.number': 'Unique number allocated to the autonomous system.', + 'destination.as.organization.name': 'Organization name.', + 'destination.bytes': 'Bytes sent from the destination to the source.', + 'destination.domain': 'The domain name of the destination.', + 'destination.geo.city_name': 'City name.', + 'destination.geo.continent_code': 'Continent code.', + 'destination.geo.continent_name': 'Name of the continent.', + 'destination.geo.country_iso_code': 'Country ISO code.', + 'destination.geo.country_name': 'Country name.', + 'destination.geo.location': 'Longitude and latitude.', + 'destination.geo.name': 'User-defined description of a location.', + 'destination.geo.postal_code': 'Postal code.', + 'destination.geo.region_iso_code': 'Region ISO code.', + 'destination.geo.region_name': 'Region name.', + 'destination.geo.timezone': 'Time zone.', + 'destination.ip': 'IP address of the destination.', + 'destination.mac': 'MAC address of the destination.', + 'destination.nat.ip': 'Destination NAT ip', + 'destination.nat.port': 'Destination NAT Port', + 'destination.packets': 'Packets sent from the destination to the source.', + 'destination.port': 'Port of the destination.', + 'destination.registered_domain': + 'The highest registered destination domain, stripped of the subdomain.', + 'destination.subdomain': 'The subdomain of the domain.', + 'destination.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'destination.user.domain': 'Name of the directory the user is a member of.', + 'destination.user.email': 'User email address.', + 'destination.user.full_name': 'Users full name, if available.', + 'destination.user.group.domain': 'Name of the directory the group is a member of.', + 'destination.user.group.id': 'Unique identifier for the group on the system/platform.', + 'destination.user.group.name': 'Name of the group.', + 'destination.user.hash': + 'Unique user hash to correlate information for a user in anonymized form.', + 'destination.user.id': 'Unique identifier of the user.', + 'destination.user.name': 'Short name or login of the user.', + 'destination.user.roles': 'Array of user roles at the time of the event.', + 'device.id': 'The unique identifier of a device.', + 'device.manufacturer': 'The vendor name of the device manufacturer.', + 'device.model.identifier': 'The machine readable identifier of the device model.', + 'device.model.name': 'The human readable marketing name of the device model.', + 'dll.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'dll.code_signature.exists': 'Boolean to capture if a signature is present.', + 'dll.code_signature.signing_id': 'The identifier used to sign the process.', + 'dll.code_signature.status': 'Additional information about the certificate status.', + 'dll.code_signature.subject_name': 'Subject name of the code signer', + 'dll.code_signature.team_id': 'The team identifier used to sign the process.', + 'dll.code_signature.timestamp': 'When the signature was generated and signed.', + 'dll.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'dll.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'dll.hash.md5': 'MD5 hash.', + 'dll.hash.sha1': 'SHA1 hash.', + 'dll.hash.sha256': 'SHA256 hash.', + 'dll.hash.sha384': 'SHA384 hash.', + 'dll.hash.sha512': 'SHA512 hash.', + 'dll.hash.ssdeep': 'SSDEEP hash.', + 'dll.hash.tlsh': 'TLSH hash.', + 'dll.name': 'Name of the library.', + 'dll.path': 'Full file path of the library.', + 'dll.pe.architecture': 'CPU architecture target for the file.', + 'dll.pe.company': 'Internal company name of the file, provided at compile-time.', + 'dll.pe.description': 'Internal description of the file, provided at compile-time.', + 'dll.pe.file_version': 'Process name.', + 'dll.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'dll.pe.go_imports': 'List of imported Go language element names and types.', + 'dll.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'dll.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'dll.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'dll.pe.imphash': 'A hash of the imports in a PE file.', + 'dll.pe.import_hash': 'A hash of the imports in a PE file.', + 'dll.pe.imports': 'List of imported element names and types.', + 'dll.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'dll.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'dll.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'dll.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'dll.pe.product': 'Internal product name of the file, provided at compile-time.', + 'dll.pe.sections': 'Section information of the PE file.', + 'dll.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'dll.pe.sections.name': 'PE Section List name.', + 'dll.pe.sections.physical_size': 'PE Section List physical size.', + 'dll.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'dll.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'dns.answers': 'Array of DNS answers.', + 'dns.answers.class': 'The class of DNS data contained in this resource record.', + 'dns.answers.data': 'The data describing the resource.', + 'dns.answers.name': 'The domain name to which this resource record pertains.', + 'dns.answers.ttl': + 'The time interval in seconds that this resource record may be cached before it should be discarded.', + 'dns.answers.type': 'The type of data contained in this resource record.', + 'dns.header_flags': 'Array of DNS header flags.', + 'dns.id': + 'The DNS packet identifier assigned by the program that generated the query. The identifier is copied to the response.', + 'dns.op_code': 'The DNS operation code that specifies the kind of query in the message.', + 'dns.question.class': 'The class of records being queried.', + 'dns.question.name': 'The name being queried.', + 'dns.question.registered_domain': 'The highest registered domain, stripped of the subdomain.', + 'dns.question.subdomain': 'The subdomain of the domain.', + 'dns.question.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'dns.question.type': 'The type of record being queried.', + 'dns.resolved_ip': 'Array containing all IPs seen in answers.data', + 'dns.response_code': 'The DNS response code.', + 'dns.type': 'The type of DNS event captured, query or answer.', + 'ecs.version': 'ECS version this event conforms to.', + 'email.attachments': 'List of objects describing the attachments.', + 'email.attachments.file.extension': 'Attachment file extension.', + 'email.attachments.file.hash.md5': 'MD5 hash.', + 'email.attachments.file.hash.sha1': 'SHA1 hash.', + 'email.attachments.file.hash.sha256': 'SHA256 hash.', + 'email.attachments.file.hash.sha384': 'SHA384 hash.', + 'email.attachments.file.hash.sha512': 'SHA512 hash.', + 'email.attachments.file.hash.ssdeep': 'SSDEEP hash.', + 'email.attachments.file.hash.tlsh': 'TLSH hash.', + 'email.attachments.file.mime_type': 'MIME type of the attachment file.', + 'email.attachments.file.name': 'Name of the attachment file.', + 'email.attachments.file.size': 'Attachment file size.', + 'email.bcc.address': 'Email address of BCC recipient', + 'email.cc.address': 'Email address of CC recipient', + 'email.content_type': 'MIME type of the email message.', + 'email.delivery_timestamp': 'Date and time when message was delivered.', + 'email.direction': 'Direction of the message.', + 'email.from.address': 'The senders email address.', + 'email.local_id': 'Unique identifier given by the source.', + 'email.message_id': 'Value from the Message-ID header.', + 'email.origination_timestamp': 'Date and time the email was composed.', + 'email.reply_to.address': 'Address replies should be delivered to.', + 'email.sender.address': 'Address of the message sender.', + 'email.subject': 'The subject of the email message.', + 'email.to.address': 'Email address of recipient', + 'email.x_mailer': 'Application that drafted email.', + 'error.code': 'Error code describing the error.', + 'error.id': 'Unique identifier for the error.', + 'error.message': 'Error message.', + 'error.stack_trace': 'The stack trace of this error in plain text.', + 'error.type': 'The type of the error, for example the class name of the exception.', + 'event.action': 'The action captured by the event.', + 'event.agent_id_status': 'Validation status of the events agent.id field.', + 'event.category': 'Event category. The second categorization field in the hierarchy.', + 'event.code': 'Identification code for this event.', + 'event.created': 'Time when the event was first read by an agent or by your pipeline.', + 'event.dataset': 'Name of the dataset.', + 'event.duration': 'Duration of the event in nanoseconds.', + 'event.end': + '`event.end` contains the date when the event ended or when the activity was last observed.', + 'event.hash': + 'Hash (perhaps logstash fingerprint) of raw field to be able to demonstrate log integrity.', + 'event.id': 'Unique ID to describe the event.', + 'event.ingested': 'Timestamp when an event arrived in the central data store.', + 'event.kind': 'The kind of the event. The highest categorization field in the hierarchy.', + 'event.module': 'Name of the module this data is coming from.', + 'event.original': 'Raw text message of entire event.', + 'event.outcome': + 'The outcome of the event. The lowest level categorization field in the hierarchy.', + 'event.provider': 'Source of the event.', + 'event.reason': 'Reason why this event happened, according to the source', + 'event.reference': 'Event reference URL', + 'event.risk_score': + 'Risk score or priority of the event (e.g. security solutions). Use your systems original value here.', + 'event.risk_score_norm': 'Normalized risk score or priority of the event (0-100).', + 'event.sequence': 'Sequence number of the event.', + 'event.severity': 'Numeric severity of the event.', + 'event.start': + '`event.start` contains the date when the event started or when the activity was first observed.', + 'event.timezone': 'Event time zone.', + 'event.type': 'Event type. The third categorization field in the hierarchy.', + 'event.url': 'Event investigation URL', + 'faas.coldstart': 'Boolean value indicating a cold start of a function.', + 'faas.execution': 'The execution ID of the current function execution.', + 'faas.id': 'The unique identifier of a serverless function.', + 'faas.name': 'The name of a serverless function.', + 'faas.trigger.request_id': 'The ID of the trigger request , message, event, etc.', + 'faas.trigger.type': 'The trigger for the function execution.', + 'faas.version': 'The version of a serverless function.', + 'file.accessed': 'Last time the file was accessed.', + 'file.attributes': 'Array of file attributes.', + 'file.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'file.code_signature.signing_id': 'The identifier used to sign the process.', + 'file.code_signature.status': 'Additional information about the certificate status.', + 'file.code_signature.subject_name': 'Subject name of the code signer', + 'file.code_signature.team_id': 'The team identifier used to sign the process.', + 'file.code_signature.timestamp': 'When the signature was generated and signed.', + 'file.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'file.created': 'File creation time.', + 'file.ctime': 'Last time the file attributes or metadata changed.', + 'file.device': 'Device that is the source of the file.', + 'file.directory': 'Directory where the file is located.', + 'file.drive_letter': 'Drive letter where the file is located.', + 'file.elf.architecture': 'Machine architecture of the ELF file.', + 'file.elf.byte_order': 'Byte sequence of ELF file.', + 'file.elf.cpu_type': 'CPU type of the ELF file.', + 'file.elf.creation_date': 'Build or compile date.', + 'file.elf.exports': 'List of exported element names and types.', + 'file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'file.elf.go_imports': 'List of imported Go language element names and types.', + 'file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'file.elf.header.class': 'Header class of the ELF file.', + 'file.elf.header.data': 'Data table of the ELF header.', + 'file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'file.elf.header.object_version': '"0x1" for original ELF files.', + 'file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'file.elf.header.type': 'Header type of the ELF file.', + 'file.elf.header.version': 'Version of the ELF header.', + 'file.elf.import_hash': 'A hash of the imports in an ELF file.', + 'file.elf.imports': 'List of imported element names and types.', + 'file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.elf.sections': 'Section information of the ELF file.', + 'file.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.elf.sections.flags': 'ELF Section List flags.', + 'file.elf.sections.name': 'ELF Section List name.', + 'file.elf.sections.physical_offset': 'ELF Section List offset.', + 'file.elf.sections.physical_size': 'ELF Section List physical size.', + 'file.elf.sections.type': 'ELF Section List type.', + 'file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'file.elf.segments': 'ELF object segment list.', + 'file.elf.segments.sections': 'ELF object segment sections.', + 'file.elf.segments.type': 'ELF object segment type.', + 'file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'file.elf.telfhash': 'telfhash hash for ELF file.', + 'file.extension': 'File extension, excluding the leading dot.', + 'file.fork_name': 'A fork is additional data associated with a filesystem object.', + 'file.gid': 'Primary group ID (GID) of the file.', + 'file.group': 'Primary group name of the file.', + 'file.hash.md5': 'MD5 hash.', + 'file.hash.sha1': 'SHA1 hash.', + 'file.hash.sha256': 'SHA256 hash.', + 'file.hash.sha384': 'SHA384 hash.', + 'file.hash.sha512': 'SHA512 hash.', + 'file.hash.ssdeep': 'SSDEEP hash.', + 'file.hash.tlsh': 'TLSH hash.', + 'file.inode': 'Inode representing the file in the filesystem.', + 'file.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', + 'file.macho.go_imports': 'List of imported Go language element names and types.', + 'file.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'file.macho.import_hash': 'A hash of the imports in a Mach-O file.', + 'file.macho.imports': 'List of imported element names and types.', + 'file.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'file.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.macho.sections': 'Section information of the Mach-O file.', + 'file.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.macho.sections.name': 'Mach-O Section List name.', + 'file.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'file.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'file.macho.symhash': 'A hash of the imports in a Mach-O file.', + 'file.mime_type': 'Media type of file, document, or arrangement of bytes.', + 'file.mode': 'Mode of the file in octal representation.', + 'file.mtime': 'Last time the file content was modified.', + 'file.name': 'Name of the file including the extension, without the directory.', + 'file.owner': 'File owners username.', + 'file.path': 'Full path to the file, including the file name.', + 'file.pe.architecture': 'CPU architecture target for the file.', + 'file.pe.company': 'Internal company name of the file, provided at compile-time.', + 'file.pe.description': 'Internal description of the file, provided at compile-time.', + 'file.pe.file_version': 'Process name.', + 'file.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'file.pe.go_imports': 'List of imported Go language element names and types.', + 'file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'file.pe.imphash': 'A hash of the imports in a PE file.', + 'file.pe.import_hash': 'A hash of the imports in a PE file.', + 'file.pe.imports': 'List of imported element names and types.', + 'file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'file.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'file.pe.product': 'Internal product name of the file, provided at compile-time.', + 'file.pe.sections': 'Section information of the PE file.', + 'file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.pe.sections.name': 'PE Section List name.', + 'file.pe.sections.physical_size': 'PE Section List physical size.', + 'file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'file.size': 'File size in bytes.', + 'file.target_path': 'Target path for symlinks.', + 'file.type': 'File type (file, dir, or symlink).', + 'file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'file.x509.alternative_names': 'List of subject alternative names (SAN).', + 'file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'file.x509.issuer.country': 'List of country (C) codes', + 'file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'file.x509.issuer.locality': 'List of locality names (L)', + 'file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'file.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'file.x509.not_before': 'Time at which the certificate is first considered valid.', + 'file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific', + 'file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific', + 'file.x509.public_key_size': 'The size of the public key space in bits.', + 'file.x509.serial_number': 'Unique serial number issued by the certificate authority.', + 'file.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'file.x509.subject.country': 'List of country (C) code', + 'file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity', + 'file.x509.subject.locality': 'List of locality names (L)', + 'file.x509.subject.organization': 'List of organizations (O) of subject.', + 'file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'file.x509.version_number': 'Version of x509 format.', + 'group.domain': 'Name of the directory the group is a member of.', + 'group.id': 'Unique identifier for the group on the system/platform.', + 'group.name': 'Name of the group.', + 'host.architecture': 'Operating system architecture.', + 'host.boot.id': 'Linux boot uuid taken from /proc/sys/kernel/random/boot_id', + 'host.cpu.usage': 'Percent CPU used, between 0 and 1.', + 'host.disk.read.bytes': 'The number of bytes read by all disks.', + 'host.disk.write.bytes': 'The number of bytes written on all disks.', + 'host.domain': 'Name of the directory the group is a member of.', + 'host.geo.city_name': 'City name.', + 'host.geo.continent_code': 'Continent code.', + 'host.geo.continent_name': 'Name of the continent.', + 'host.geo.country_iso_code': 'Country ISO code.', + 'host.geo.country_name': 'Country name.', + 'host.geo.location': 'Longitude and latitude.', + 'host.geo.name': 'User-defined description of a location.', + 'host.geo.postal_code': 'Postal code.', + 'host.geo.region_iso_code': 'Region ISO code.', + 'host.geo.region_name': 'Region name.', + 'host.geo.timezone': 'Time zone.', + 'host.hostname': 'Hostname of the host.', + 'host.id': 'Unique host id.', + 'host.ip': 'Host ip addresses.', + 'host.mac': 'Host MAC addresses.', + 'host.name': 'Name of the host.', + 'host.network.egress.bytes': 'The number of bytes sent on all network interfaces.', + 'host.network.egress.packets': 'The number of packets sent on all network interfaces.', + 'host.network.ingress.bytes': 'The number of bytes received on all network interfaces.', + 'host.network.ingress.packets': 'The number of packets received on all network interfaces.', + 'host.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'host.os.full': 'Operating system name, including the version or code name.', + 'host.os.kernel': 'Operating system kernel version as a raw string.', + 'host.os.name': 'Operating system name, without the version.', + 'host.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'host.os.type': + 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', + 'host.os.version': 'Operating system version as a raw string.', + 'host.pid_ns_ino': 'Pid namespace inode', + 'host.risk.calculated_level': + 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring', + 'host.risk.calculated_score': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring', + 'host.risk.calculated_score_norm': 'A normalized risk score calculated by an internal system', + 'host.risk.static_level': + 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform', + 'host.risk.static_score': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform', + 'host.risk.static_score_norm': 'A normalized risk score calculated by an external system.', + 'host.type': 'Type of host.', + 'host.uptime': 'Seconds the host has been up.', + 'http.request.body.bytes': 'Size in bytes of the request body.', + 'http.request.body.content': 'The full HTTP request body.', + 'http.request.bytes': 'Total size in bytes of the request (body and headers).', + 'http.request.id': 'HTTP request ID.', + 'http.request.method': 'HTTP request method.', + 'http.request.mime_type': 'Mime type of the body of the request.', + 'http.request.referrer': 'Referrer for this HTTP request.', + 'http.response.body.bytes': 'Size in bytes of the response body.', + 'http.response.body.content': 'The full HTTP response body.', + 'http.response.bytes': 'Total size in bytes of the response (body and headers).', + 'http.response.mime_type': 'Mime type of the body of the response.', + 'http.response.status_code': 'HTTP response status code.', + 'http.version': 'HTTP version.', + labels: 'Custom key/value pairs.', + 'log.file.path': 'Full path to the log file this event came from.', + 'log.level': 'Log level of the log event.', + 'log.logger': 'Name of the logger.', + 'log.origin.file.line': 'The line number of the file which originated the log event.', + 'log.origin.file.name': 'The code file which originated the log event.', + 'log.origin.function': 'The function which originated the log event.', + 'log.syslog': 'Syslog metadata', + 'log.syslog.appname': 'The device or application that originated the Syslog message.', + 'log.syslog.facility.code': 'Syslog numeric facility of the event.', + 'log.syslog.facility.name': 'Syslog text-based facility of the event.', + 'log.syslog.hostname': 'The host that originated the Syslog message.', + 'log.syslog.msgid': 'An identifier for the type of Syslog message.', + 'log.syslog.priority': 'Syslog priority of the event.', + 'log.syslog.procid': 'The process name or ID that originated the Syslog message.', + 'log.syslog.severity.code': 'Syslog numeric severity of the event.', + 'log.syslog.severity.name': 'Syslog text-based severity of the event.', + 'log.syslog.structured_data': 'Structured data expressed in RFC 5424 messages.', + 'log.syslog.version': 'Syslog protocol version.', + message: 'Log message optimized for viewing in a log viewer.', + 'network.application': 'Application level protocol name.', + 'network.bytes': 'Total bytes transferred in both directions.', + 'network.community_id': 'A hash of source and destination IPs and ports.', + 'network.direction': 'Direction of the network traffic.', + 'network.forwarded_ip': 'Host IP address when the source IP address is the proxy.', + 'network.iana_number': 'IANA Protocol Number.', + 'network.inner': 'Inner VLAN tag information', + 'network.inner.vlan.id': 'VLAN ID as reported by the observer.', + 'network.inner.vlan.name': 'Optional VLAN name as reported by the observer.', + 'network.name': 'Name given by operators to sections of their network.', + 'network.packets': 'Total packets transferred in both directions.', + 'network.protocol': 'Application protocol name.', + 'network.transport': 'Protocol Name corresponding to the field `iana_number`.', + 'network.type': 'In the OSI Model this would be the Network Layer. ipv4, ipv6, ipsec, pim, etc', + 'network.vlan.id': 'VLAN ID as reported by the observer.', + 'network.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.egress': 'Object field for egress information', + 'observer.egress.interface.alias': 'Interface alias', + 'observer.egress.interface.id': 'Interface ID', + 'observer.egress.interface.name': 'Interface name', + 'observer.egress.vlan.id': 'VLAN ID as reported by the observer.', + 'observer.egress.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.egress.zone': 'Observer Egress zone', + 'observer.geo.city_name': 'City name.', + 'observer.geo.continent_code': 'Continent code.', + 'observer.geo.continent_name': 'Name of the continent.', + 'observer.geo.country_iso_code': 'Country ISO code.', + 'observer.geo.country_name': 'Country name.', + 'observer.geo.location': 'Longitude and latitude.', + 'observer.geo.name': 'User-defined description of a location.', + 'observer.geo.postal_code': 'Postal code.', + 'observer.geo.region_iso_code': 'Region ISO code.', + 'observer.geo.region_name': 'Region name.', + 'observer.geo.timezone': 'Time zone.', + 'observer.hostname': 'Hostname of the observer.', + 'observer.ingress': 'Object field for ingress information', + 'observer.ingress.interface.alias': 'Interface alias', + 'observer.ingress.interface.id': 'Interface ID', + 'observer.ingress.interface.name': 'Interface name', + 'observer.ingress.vlan.id': 'VLAN ID as reported by the observer.', + 'observer.ingress.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.ingress.zone': 'Observer ingress zone', + 'observer.ip': 'IP addresses of the observer.', + 'observer.mac': 'MAC addresses of the observer.', + 'observer.name': 'Custom name of the observer.', + 'observer.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'observer.os.full': 'Operating system name, including the version or code name.', + 'observer.os.kernel': 'Operating system kernel version as a raw string.', + 'observer.os.name': 'Operating system name, without the version.', + 'observer.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'observer.os.type': + 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', + 'observer.os.version': 'Operating system version as a raw string.', + 'observer.product': 'The product name of the observer.', + 'observer.serial_number': 'Observer serial number.', + 'observer.type': 'The type of the observer the data is coming from.', + 'observer.vendor': 'Vendor name of the observer.', + 'observer.version': 'Observer version.', + 'orchestrator.api_version': 'API version being used to carry out the action', + 'orchestrator.cluster.id': 'Unique ID of the cluster.', + 'orchestrator.cluster.name': 'Name of the cluster.', + 'orchestrator.cluster.url': 'URL of the API used to manage the cluster.', + 'orchestrator.cluster.version': 'The version of the cluster.', + 'orchestrator.namespace': 'Namespace in which the action is taking place.', + 'orchestrator.organization': + 'Organization affected by the event (for multi-tenant orchestrator setups).', + 'orchestrator.resource.annotation': 'The list of annotations added to the resource.', + 'orchestrator.resource.id': 'Unique ID of the resource being acted upon.', + 'orchestrator.resource.ip': + 'IP address assigned to the resource associated with the event being observed.', + 'orchestrator.resource.label': 'The list of labels added to the resource.', + 'orchestrator.resource.name': 'Name of the resource being acted upon.', + 'orchestrator.resource.parent.type': + 'Type or kind of the parent resource associated with the event being observed.', + 'orchestrator.resource.type': 'Type of resource being acted upon.', + 'orchestrator.type': 'Orchestrator cluster type (e.g. kubernetes, nomad or cloudfoundry).', + 'organization.id': 'Unique identifier for the organization.', + 'organization.name': 'Organization name.', + 'package.architecture': 'Package architecture.', + 'package.build_version': 'Build version information', + 'package.checksum': 'Checksum of the installed package for verification.', + 'package.description': 'Description of the package.', + 'package.install_scope': 'Indicating how the package was installed, e.g. user-local, global.', + 'package.installed': 'Time when package was installed.', + 'package.license': 'Package license', + 'package.name': 'Package name', + 'package.path': 'Path where the package is installed.', + 'package.reference': 'Package home page or reference URL', + 'package.size': 'Package size in bytes.', + 'package.type': 'Package type', + 'package.version': 'Package version', + 'process.args': 'Array of process arguments.', + 'process.args_count': 'Length of the process.args array.', + 'process.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'process.code_signature.exists': 'Boolean to capture if a signature is present.', + 'process.code_signature.signing_id': 'The identifier used to sign the process.', + 'process.code_signature.status': 'Additional information about the certificate status.', + 'process.code_signature.subject_name': 'Subject name of the code signer', + 'process.code_signature.team_id': 'The team identifier used to sign the process.', + 'process.code_signature.timestamp': 'When the signature was generated and signed.', + 'process.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'process.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'process.command_line': 'Full command line that started the process.', + 'process.elf.architecture': 'Machine architecture of the ELF file.', + 'process.elf.byte_order': 'Byte sequence of ELF file.', + 'process.elf.cpu_type': 'CPU type of the ELF file.', + 'process.elf.creation_date': 'Build or compile date.', + 'process.elf.exports': 'List of exported element names and types.', + 'process.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'process.elf.go_imports': 'List of imported Go language element names and types.', + 'process.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'process.elf.header.class': 'Header class of the ELF file.', + 'process.elf.header.data': 'Data table of the ELF header.', + 'process.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'process.elf.header.object_version': '"0x1" for original ELF files.', + 'process.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'process.elf.header.type': 'Header type of the ELF file.', + 'process.elf.header.version': 'Version of the ELF header.', + 'process.elf.import_hash': 'A hash of the imports in an ELF file.', + 'process.elf.imports': 'List of imported element names and types.', + 'process.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.elf.sections': 'Section information of the ELF file.', + 'process.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'process.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.elf.sections.flags': 'ELF Section List flags.', + 'process.elf.sections.name': 'ELF Section List name.', + 'process.elf.sections.physical_offset': 'ELF Section List offset.', + 'process.elf.sections.physical_size': 'ELF Section List physical size.', + 'process.elf.sections.type': 'ELF Section List type.', + 'process.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'process.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'process.elf.segments': 'ELF object segment list.', + 'process.elf.segments.sections': 'ELF object segment sections.', + 'process.elf.segments.type': 'ELF object segment type.', + 'process.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'process.elf.telfhash': 'telfhash hash for ELF file.', + 'process.end': 'The time the process ended.', + 'process.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.args': 'Array of process arguments.', + 'process.entry_leader.args_count': 'Length of the process.args array.', + 'process.entry_leader.attested_groups.name': 'Name of the group.', + 'process.entry_leader.attested_user.id': 'Unique identifier of the user.', + 'process.entry_leader.attested_user.name': 'Short name or login of the user.', + 'process.entry_leader.command_line': 'Full command line that started the process.', + 'process.entry_leader.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.entry_meta.source.ip': 'IP address of the source.', + 'process.entry_leader.entry_meta.type': 'The entry type for the entry session leader.', + 'process.entry_leader.executable': 'Absolute path to the process executable.', + 'process.entry_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.group.name': 'Name of the group.', + 'process.entry_leader.interactive': 'Whether the process is connected to an interactive shell.', + 'process.entry_leader.name': 'Process name.', + 'process.entry_leader.parent.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.parent.pid': 'Process id.', + 'process.entry_leader.parent.session_leader.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.parent.session_leader.pid': 'Process id.', + 'process.entry_leader.parent.session_leader.start': 'The time the process started.', + 'process.entry_leader.parent.session_leader.vpid': 'Virtual process id.', + 'process.entry_leader.parent.start': 'The time the process started.', + 'process.entry_leader.parent.vpid': 'Virtual process id.', + 'process.entry_leader.pid': 'Process id.', + 'process.entry_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.real_group.name': 'Name of the group.', + 'process.entry_leader.real_user.id': 'Unique identifier of the user.', + 'process.entry_leader.real_user.name': 'Short name or login of the user.', + 'process.entry_leader.same_as_process': + 'This boolean is used to identify if a leader process is the same as the top level process.', + 'process.entry_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.saved_group.name': 'Name of the group.', + 'process.entry_leader.saved_user.id': 'Unique identifier of the user.', + 'process.entry_leader.saved_user.name': 'Short name or login of the user.', + 'process.entry_leader.start': 'The time the process started.', + 'process.entry_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.supplemental_groups.name': 'Name of the group.', + 'process.entry_leader.tty': 'Information about the controlling TTY device.', + 'process.entry_leader.tty.char_device.major': 'The TTY character devices major number.', + 'process.entry_leader.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.entry_leader.user.id': 'Unique identifier of the user.', + 'process.entry_leader.user.name': 'Short name or login of the user.', + 'process.entry_leader.vpid': 'Virtual process id.', + 'process.entry_leader.working_directory': 'The working directory of the process.', + 'process.env_vars': 'Array of environment variable bindings.', + 'process.executable': 'Absolute path to the process executable.', + 'process.exit_code': 'The exit code of the process.', + 'process.group_leader.args': 'Array of process arguments.', + 'process.group_leader.args_count': 'Length of the process.args array.', + 'process.group_leader.command_line': 'Full command line that started the process.', + 'process.group_leader.entity_id': 'Unique identifier for the process.', + 'process.group_leader.executable': 'Absolute path to the process executable.', + 'process.group_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.group.name': 'Name of the group.', + 'process.group_leader.interactive': 'Whether the process is connected to an interactive shell.', + 'process.group_leader.name': 'Process name.', + 'process.group_leader.pid': 'Process id.', + 'process.group_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.real_group.name': 'Name of the group.', + 'process.group_leader.real_user.id': 'Unique identifier of the user.', + 'process.group_leader.real_user.name': 'Short name or login of the user.', + 'process.group_leader.same_as_process': + 'This boolean is used to identify if a leader process is the same as the top level process.', + 'process.group_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.saved_group.name': 'Name of the group.', + 'process.group_leader.saved_user.id': 'Unique identifier of the user.', + 'process.group_leader.saved_user.name': 'Short name or login of the user.', + 'process.group_leader.start': 'The time the process started.', + 'process.group_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.group_leader.supplemental_groups.name': 'Name of the group.', + 'process.group_leader.tty': 'Information about the controlling TTY device.', + 'process.group_leader.tty.char_device.major': 'The TTY character devices major number.', + 'process.group_leader.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.group_leader.user.id': 'Unique identifier of the user.', + 'process.group_leader.user.name': 'Short name or login of the user.', + 'process.group_leader.vpid': 'Virtual process id.', + 'process.group_leader.working_directory': 'The working directory of the process.', + 'process.hash.md5': 'MD5 hash.', + 'process.hash.sha1': 'SHA1 hash.', + 'process.hash.sha256': 'SHA256 hash.', + 'process.hash.sha384': 'SHA384 hash.', + 'process.hash.sha512': 'SHA512 hash.', + 'process.hash.ssdeep': 'SSDEEP hash.', + 'process.hash.tlsh': 'TLSH hash.', + 'process.interactive': 'Whether the process is connected to an interactive shell.', + 'process.io': 'A chunk of input or output (IO) from a single process.', + 'process.io.bytes_skipped': + 'An array of byte offsets and lengths denoting where IO data has been skipped.', + 'process.io.bytes_skipped.length': 'The length of bytes skipped.', + 'process.io.bytes_skipped.offset': + 'The byte offset into this events io.text (or io.bytes in the future) where length bytes were skipped.', + 'process.io.max_bytes_per_process_exceeded': + 'If true, the process producing the output has exceeded the max_kilobytes_per_process configuration setting.', + 'process.io.text': 'A chunk of output or input sanitized to UTF-8.', + 'process.io.total_bytes_captured': 'The total number of bytes captured in this event.', + 'process.io.total_bytes_skipped': + 'The total number of bytes that were not captured due to implementation restrictions such as buffer size limits.', + 'process.io.type': 'The type of object on which the IO action (read or write) was taken.', + 'process.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', + 'process.macho.go_imports': 'List of imported Go language element names and types.', + 'process.macho.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.macho.import_hash': 'A hash of the imports in a Mach-O file.', + 'process.macho.imports': 'List of imported element names and types.', + 'process.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.macho.sections': 'Section information of the Mach-O file.', + 'process.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.macho.sections.name': 'Mach-O Section List name.', + 'process.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'process.macho.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'process.macho.symhash': 'A hash of the imports in a Mach-O file.', + 'process.name': 'Process name.', + 'process.parent.args': 'Array of process arguments.', + 'process.parent.args_count': 'Length of the process.args array.', + 'process.parent.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'process.parent.code_signature.exists': 'Boolean to capture if a signature is present.', + 'process.parent.code_signature.signing_id': 'The identifier used to sign the process.', + 'process.parent.code_signature.status': 'Additional information about the certificate status.', + 'process.parent.code_signature.subject_name': 'Subject name of the code signer', + 'process.parent.code_signature.team_id': 'The team identifier used to sign the process.', + 'process.parent.code_signature.timestamp': 'When the signature was generated and signed.', + 'process.parent.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'process.parent.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'process.parent.command_line': 'Full command line that started the process.', + 'process.parent.elf.architecture': 'Machine architecture of the ELF file.', + 'process.parent.elf.byte_order': 'Byte sequence of ELF file.', + 'process.parent.elf.cpu_type': 'CPU type of the ELF file.', + 'process.parent.elf.creation_date': 'Build or compile date.', + 'process.parent.elf.exports': 'List of exported element names and types.', + 'process.parent.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'process.parent.elf.go_imports': 'List of imported Go language element names and types.', + 'process.parent.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.parent.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'process.parent.elf.header.class': 'Header class of the ELF file.', + 'process.parent.elf.header.data': 'Data table of the ELF header.', + 'process.parent.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'process.parent.elf.header.object_version': '"0x1" for original ELF files.', + 'process.parent.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'process.parent.elf.header.type': 'Header type of the ELF file.', + 'process.parent.elf.header.version': 'Version of the ELF header.', + 'process.parent.elf.import_hash': 'A hash of the imports in an ELF file.', + 'process.parent.elf.imports': 'List of imported element names and types.', + 'process.parent.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.elf.sections': 'Section information of the ELF file.', + 'process.parent.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'process.parent.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.elf.sections.flags': 'ELF Section List flags.', + 'process.parent.elf.sections.name': 'ELF Section List name.', + 'process.parent.elf.sections.physical_offset': 'ELF Section List offset.', + 'process.parent.elf.sections.physical_size': 'ELF Section List physical size.', + 'process.parent.elf.sections.type': 'ELF Section List type.', + 'process.parent.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.parent.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'process.parent.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'process.parent.elf.segments': 'ELF object segment list.', + 'process.parent.elf.segments.sections': 'ELF object segment sections.', + 'process.parent.elf.segments.type': 'ELF object segment type.', + 'process.parent.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'process.parent.elf.telfhash': 'telfhash hash for ELF file.', + 'process.parent.end': 'The time the process ended.', + 'process.parent.entity_id': 'Unique identifier for the process.', + 'process.parent.executable': 'Absolute path to the process executable.', + 'process.parent.exit_code': 'The exit code of the process.', + 'process.parent.group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.group.name': 'Name of the group.', + 'process.parent.group_leader.entity_id': 'Unique identifier for the process.', + 'process.parent.group_leader.pid': 'Process id.', + 'process.parent.group_leader.start': 'The time the process started.', + 'process.parent.group_leader.vpid': 'Virtual process id.', + 'process.parent.hash.md5': 'MD5 hash.', + 'process.parent.hash.sha1': 'SHA1 hash.', + 'process.parent.hash.sha256': 'SHA256 hash.', + 'process.parent.hash.sha384': 'SHA384 hash.', + 'process.parent.hash.sha512': 'SHA512 hash.', + 'process.parent.hash.ssdeep': 'SSDEEP hash.', + 'process.parent.hash.tlsh': 'TLSH hash.', + 'process.parent.interactive': 'Whether the process is connected to an interactive shell.', + 'process.parent.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', + 'process.parent.macho.go_imports': 'List of imported Go language element names and types.', + 'process.parent.macho.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.parent.macho.import_hash': 'A hash of the imports in a Mach-O file.', + 'process.parent.macho.imports': 'List of imported element names and types.', + 'process.parent.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.macho.sections': 'Section information of the Mach-O file.', + 'process.parent.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.macho.sections.name': 'Mach-O Section List name.', + 'process.parent.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'process.parent.macho.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.parent.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'process.parent.macho.symhash': 'A hash of the imports in a Mach-O file.', + 'process.parent.name': 'Process name.', + 'process.parent.pe.architecture': 'CPU architecture target for the file.', + 'process.parent.pe.company': 'Internal company name of the file, provided at compile-time.', + 'process.parent.pe.description': 'Internal description of the file, provided at compile-time.', + 'process.parent.pe.file_version': 'Process name.', + 'process.parent.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'process.parent.pe.go_imports': 'List of imported Go language element names and types.', + 'process.parent.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.parent.pe.imphash': 'A hash of the imports in a PE file.', + 'process.parent.pe.import_hash': 'A hash of the imports in a PE file.', + 'process.parent.pe.imports': 'List of imported element names and types.', + 'process.parent.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'process.parent.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'process.parent.pe.product': 'Internal product name of the file, provided at compile-time.', + 'process.parent.pe.sections': 'Section information of the PE file.', + 'process.parent.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.pe.sections.name': 'PE Section List name.', + 'process.parent.pe.sections.physical_size': 'PE Section List physical size.', + 'process.parent.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.parent.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'process.parent.pgid': 'Deprecated identifier of the group of processes the process belongs to.', + 'process.parent.pid': 'Process id.', + 'process.parent.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.real_group.name': 'Name of the group.', + 'process.parent.real_user.id': 'Unique identifier of the user.', + 'process.parent.real_user.name': 'Short name or login of the user.', + 'process.parent.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.saved_group.name': 'Name of the group.', + 'process.parent.saved_user.id': 'Unique identifier of the user.', + 'process.parent.saved_user.name': 'Short name or login of the user.', + 'process.parent.start': 'The time the process started.', + 'process.parent.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.parent.supplemental_groups.name': 'Name of the group.', + 'process.parent.thread.capabilities.effective': + 'Array of capabilities used for permission checks.', + 'process.parent.thread.capabilities.permitted': 'Array of capabilities a thread could assume.', + 'process.parent.thread.id': 'Thread ID.', + 'process.parent.thread.name': 'Thread name.', + 'process.parent.title': 'Process title.', + 'process.parent.tty': 'Information about the controlling TTY device.', + 'process.parent.tty.char_device.major': 'The TTY character devices major number.', + 'process.parent.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.parent.uptime': 'Seconds the process has been up.', + 'process.parent.user.id': 'Unique identifier of the user.', + 'process.parent.user.name': 'Short name or login of the user.', + 'process.parent.vpid': 'Virtual process id.', + 'process.parent.working_directory': 'The working directory of the process.', + 'process.pe.architecture': 'CPU architecture target for the file.', + 'process.pe.company': 'Internal company name of the file, provided at compile-time.', + 'process.pe.description': 'Internal description of the file, provided at compile-time.', + 'process.pe.file_version': 'Process name.', + 'process.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'process.pe.go_imports': 'List of imported Go language element names and types.', + 'process.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.pe.imphash': 'A hash of the imports in a PE file.', + 'process.pe.import_hash': 'A hash of the imports in a PE file.', + 'process.pe.imports': 'List of imported element names and types.', + 'process.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'process.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'process.pe.product': 'Internal product name of the file, provided at compile-time.', + 'process.pe.sections': 'Section information of the PE file.', + 'process.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.pe.sections.name': 'PE Section List name.', + 'process.pe.sections.physical_size': 'PE Section List physical size.', + 'process.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'process.pgid': 'Deprecated identifier of the group of processes the process belongs to.', + 'process.pid': 'Process id.', + 'process.previous.args': 'Array of process arguments.', + 'process.previous.args_count': 'Length of the process.args array.', + 'process.previous.executable': 'Absolute path to the process executable.', + 'process.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.real_group.name': 'Name of the group.', + 'process.real_user.id': 'Unique identifier of the user.', + 'process.real_user.name': 'Short name or login of the user.', + 'process.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.saved_group.name': 'Name of the group.', + 'process.saved_user.id': 'Unique identifier of the user.', + 'process.saved_user.name': 'Short name or login of the user.', + 'process.session_leader.args': 'Array of process arguments.', + 'process.session_leader.args_count': 'Length of the process.args array.', + 'process.session_leader.command_line': 'Full command line that started the process.', + 'process.session_leader.entity_id': 'Unique identifier for the process.', + 'process.session_leader.executable': 'Absolute path to the process executable.', + 'process.session_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.group.name': 'Name of the group.', + 'process.session_leader.interactive': 'Whether the process is connected to an interactive shell.', + 'process.session_leader.name': 'Process name.', + 'process.session_leader.parent.entity_id': 'Unique identifier for the process.', + 'process.session_leader.parent.pid': 'Process id.', + 'process.session_leader.parent.session_leader.entity_id': 'Unique identifier for the process.', + 'process.session_leader.parent.session_leader.pid': 'Process id.', + 'process.session_leader.parent.session_leader.start': 'The time the process started.', + 'process.session_leader.parent.session_leader.vpid': 'Virtual process id.', + 'process.session_leader.parent.start': 'The time the process started.', + 'process.session_leader.parent.vpid': 'Virtual process id.', + 'process.session_leader.pid': 'Process id.', + 'process.session_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.real_group.name': 'Name of the group.', + 'process.session_leader.real_user.id': 'Unique identifier of the user.', + 'process.session_leader.real_user.name': 'Short name or login of the user.', + 'process.session_leader.same_as_process': + 'This boolean is used to identify if a leader process is the same as the top level process.', + 'process.session_leader.saved_group.id': + 'Unique identifier for the group on the system/platform.', + 'process.session_leader.saved_group.name': 'Name of the group.', + 'process.session_leader.saved_user.id': 'Unique identifier of the user.', + 'process.session_leader.saved_user.name': 'Short name or login of the user.', + 'process.session_leader.start': 'The time the process started.', + 'process.session_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.session_leader.supplemental_groups.name': 'Name of the group.', + 'process.session_leader.tty': 'Information about the controlling TTY device.', + 'process.session_leader.tty.char_device.major': 'The TTY character devices major number.', + 'process.session_leader.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.session_leader.user.id': 'Unique identifier of the user.', + 'process.session_leader.user.name': 'Short name or login of the user.', + 'process.session_leader.vpid': 'Virtual process id.', + 'process.session_leader.working_directory': 'The working directory of the process.', + 'process.start': 'The time the process started.', + 'process.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.supplemental_groups.name': 'Name of the group.', + 'process.thread.capabilities.effective': 'Array of capabilities used for permission checks.', + 'process.thread.capabilities.permitted': 'Array of capabilities a thread could assume.', + 'process.thread.id': 'Thread ID.', + 'process.thread.name': 'Thread name.', + 'process.title': 'Process title.', + 'process.tty': 'Information about the controlling TTY device.', + 'process.tty.char_device.major': 'The TTY character devices major number.', + 'process.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.tty.columns': 'The number of character columns per line. e.g terminal width', + 'process.tty.rows': 'The number of character rows in the terminal. e.g terminal height', + 'process.uptime': 'Seconds the process has been up.', + 'process.user.id': 'Unique identifier of the user.', + 'process.user.name': 'Short name or login of the user.', + 'process.vpid': 'Virtual process id.', + 'process.working_directory': 'The working directory of the process.', + 'registry.data.bytes': 'Original bytes written with base64 encoding.', + 'registry.data.strings': 'List of strings representing what was written to the registry.', + 'registry.data.type': 'Standard registry type for encoding contents', + 'registry.hive': 'Abbreviated name for the hive.', + 'registry.key': 'Hive-relative path of keys.', + 'registry.path': 'Full path, including hive, key and value', + 'registry.value': 'Name of the value written.', + 'related.hash': 'All the hashes seen on your event.', + 'related.hosts': 'All the host identifiers seen on your event.', + 'related.ip': 'All of the IPs seen on your event.', + 'related.user': 'All the user names or other user identifiers seen on the event.', + 'rule.author': 'Rule author', + 'rule.category': 'Rule category', + 'rule.description': 'Rule description', + 'rule.id': 'Rule ID', + 'rule.license': 'Rule license', + 'rule.name': 'Rule name', + 'rule.reference': 'Rule reference URL', + 'rule.ruleset': 'Rule ruleset', + 'rule.uuid': 'Rule UUID', + 'rule.version': 'Rule version', + 'server.address': 'Server network address.', + 'server.as.number': 'Unique number allocated to the autonomous system.', + 'server.as.organization.name': 'Organization name.', + 'server.bytes': 'Bytes sent from the server to the client.', + 'server.domain': 'The domain name of the server.', + 'server.geo.city_name': 'City name.', + 'server.geo.continent_code': 'Continent code.', + 'server.geo.continent_name': 'Name of the continent.', + 'server.geo.country_iso_code': 'Country ISO code.', + 'server.geo.country_name': 'Country name.', + 'server.geo.location': 'Longitude and latitude.', + 'server.geo.name': 'User-defined description of a location.', + 'server.geo.postal_code': 'Postal code.', + 'server.geo.region_iso_code': 'Region ISO code.', + 'server.geo.region_name': 'Region name.', + 'server.geo.timezone': 'Time zone.', + 'server.ip': 'IP address of the server.', + 'server.mac': 'MAC address of the server.', + 'server.nat.ip': 'Server NAT ip', + 'server.nat.port': 'Server NAT port', + 'server.packets': 'Packets sent from the server to the client.', + 'server.port': 'Port of the server.', + 'server.registered_domain': 'The highest registered server domain, stripped of the subdomain.', + 'server.subdomain': 'The subdomain of the domain.', + 'server.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'server.user.domain': 'Name of the directory the user is a member of.', + 'server.user.email': 'User email address.', + 'server.user.full_name': 'Users full name, if available.', + 'server.user.group.domain': 'Name of the directory the group is a member of.', + 'server.user.group.id': 'Unique identifier for the group on the system/platform.', + 'server.user.group.name': 'Name of the group.', + 'server.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'server.user.id': 'Unique identifier of the user.', + 'server.user.name': 'Short name or login of the user.', + 'server.user.roles': 'Array of user roles at the time of the event.', + 'service.address': 'Address of this service.', + 'service.environment': 'Environment of the service.', + 'service.ephemeral_id': 'Ephemeral identifier of this service.', + 'service.id': 'Unique identifier of the running service.', + 'service.name': 'Name of the service.', + 'service.node.name': 'Name of the service node.', + 'service.node.role': 'Deprecated role (singular) of the service node.', + 'service.node.roles': 'Roles of the service node.', + 'service.origin.address': 'Address of this service.', + 'service.origin.environment': 'Environment of the service.', + 'service.origin.ephemeral_id': 'Ephemeral identifier of this service.', + 'service.origin.id': 'Unique identifier of the running service.', + 'service.origin.name': 'Name of the service.', + 'service.origin.node.name': 'Name of the service node.', + 'service.origin.node.role': 'Deprecated role (singular) of the service node.', + 'service.origin.node.roles': 'Roles of the service node.', + 'service.origin.state': 'Current state of the service.', + 'service.origin.type': 'The type of the service.', + 'service.origin.version': 'Version of the service.', + 'service.state': 'Current state of the service.', + 'service.target.address': 'Address of this service.', + 'service.target.environment': 'Environment of the service.', + 'service.target.ephemeral_id': 'Ephemeral identifier of this service.', + 'service.target.id': 'Unique identifier of the running service.', + 'service.target.name': 'Name of the service.', + 'service.target.node.name': 'Name of the service node.', + 'service.target.node.role': 'Deprecated role (singular) of the service node.', + 'service.target.node.roles': 'Roles of the service node.', + 'service.target.state': 'Current state of the service.', + 'service.target.type': 'The type of the service.', + 'service.target.version': 'Version of the service.', + 'service.type': 'The type of the service.', + 'service.version': 'Version of the service.', + 'source.address': 'Source network address.', + 'source.as.number': 'Unique number allocated to the autonomous system.', + 'source.as.organization.name': 'Organization name.', + 'source.bytes': 'Bytes sent from the source to the destination.', + 'source.domain': 'The domain name of the source.', + 'source.geo.city_name': 'City name.', + 'source.geo.continent_code': 'Continent code.', + 'source.geo.continent_name': 'Name of the continent.', + 'source.geo.country_iso_code': 'Country ISO code.', + 'source.geo.country_name': 'Country name.', + 'source.geo.location': 'Longitude and latitude.', + 'source.geo.name': 'User-defined description of a location.', + 'source.geo.postal_code': 'Postal code.', + 'source.geo.region_iso_code': 'Region ISO code.', + 'source.geo.region_name': 'Region name.', + 'source.geo.timezone': 'Time zone.', + 'source.ip': 'IP address of the source.', + 'source.mac': 'MAC address of the source.', + 'source.nat.ip': 'Source NAT ip', + 'source.nat.port': 'Source NAT port', + 'source.packets': 'Packets sent from the source to the destination.', + 'source.port': 'Port of the source.', + 'source.registered_domain': 'The highest registered source domain, stripped of the subdomain.', + 'source.subdomain': 'The subdomain of the domain.', + 'source.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'source.user.domain': 'Name of the directory the user is a member of.', + 'source.user.email': 'User email address.', + 'source.user.full_name': 'Users full name, if available.', + 'source.user.group.domain': 'Name of the directory the group is a member of.', + 'source.user.group.id': 'Unique identifier for the group on the system/platform.', + 'source.user.group.name': 'Name of the group.', + 'source.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'source.user.id': 'Unique identifier of the user.', + 'source.user.name': 'Short name or login of the user.', + 'source.user.roles': 'Array of user roles at the time of the event.', + 'span.id': 'Unique identifier of the span within the scope of its trace.', + tags: 'List of keywords used to tag each event.', + 'threat.enrichments': 'List of objects containing indicators enriching the event.', + 'threat.enrichments.indicator': 'Object containing indicators enriching the event.', + 'threat.enrichments.indicator.as.number': 'Unique number allocated to the autonomous system.', + 'threat.enrichments.indicator.as.organization.name': 'Organization name.', + 'threat.enrichments.indicator.confidence': 'Indicator confidence rating', + 'threat.enrichments.indicator.description': 'Indicator description', + 'threat.enrichments.indicator.email.address': 'Indicator email address', + 'threat.enrichments.indicator.file.accessed': 'Last time the file was accessed.', + 'threat.enrichments.indicator.file.attributes': 'Array of file attributes.', + 'threat.enrichments.indicator.file.code_signature.digest_algorithm': + 'Hashing algorithm used to sign the process.', + 'threat.enrichments.indicator.file.code_signature.exists': + 'Boolean to capture if a signature is present.', + 'threat.enrichments.indicator.file.code_signature.signing_id': + 'The identifier used to sign the process.', + 'threat.enrichments.indicator.file.code_signature.status': + 'Additional information about the certificate status.', + 'threat.enrichments.indicator.file.code_signature.subject_name': + 'Subject name of the code signer', + 'threat.enrichments.indicator.file.code_signature.team_id': + 'The team identifier used to sign the process.', + 'threat.enrichments.indicator.file.code_signature.timestamp': + 'When the signature was generated and signed.', + 'threat.enrichments.indicator.file.code_signature.trusted': + 'Stores the trust status of the certificate chain.', + 'threat.enrichments.indicator.file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'threat.enrichments.indicator.file.created': 'File creation time.', + 'threat.enrichments.indicator.file.ctime': 'Last time the file attributes or metadata changed.', + 'threat.enrichments.indicator.file.device': 'Device that is the source of the file.', + 'threat.enrichments.indicator.file.directory': 'Directory where the file is located.', + 'threat.enrichments.indicator.file.drive_letter': 'Drive letter where the file is located.', + 'threat.enrichments.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', + 'threat.enrichments.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', + 'threat.enrichments.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', + 'threat.enrichments.indicator.file.elf.creation_date': 'Build or compile date.', + 'threat.enrichments.indicator.file.elf.exports': 'List of exported element names and types.', + 'threat.enrichments.indicator.file.elf.go_import_hash': + 'A hash of the Go language imports in an ELF file.', + 'threat.enrichments.indicator.file.elf.go_imports': + 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.elf.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.enrichments.indicator.file.elf.header.abi_version': + 'Version of the ELF Application Binary Interface (ABI).', + 'threat.enrichments.indicator.file.elf.header.class': 'Header class of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.data': 'Data table of the ELF header.', + 'threat.enrichments.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', + 'threat.enrichments.indicator.file.elf.header.os_abi': + 'Application Binary Interface (ABI) of the Linux OS.', + 'threat.enrichments.indicator.file.elf.header.type': 'Header type of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.version': 'Version of the ELF header.', + 'threat.enrichments.indicator.file.elf.import_hash': 'A hash of the imports in an ELF file.', + 'threat.enrichments.indicator.file.elf.imports': 'List of imported element names and types.', + 'threat.enrichments.indicator.file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.elf.sections': 'Section information of the ELF file.', + 'threat.enrichments.indicator.file.elf.sections.chi2': + 'Chi-square probability distribution of the section.', + 'threat.enrichments.indicator.file.elf.sections.entropy': + 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.elf.sections.flags': 'ELF Section List flags.', + 'threat.enrichments.indicator.file.elf.sections.name': 'ELF Section List name.', + 'threat.enrichments.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', + 'threat.enrichments.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', + 'threat.enrichments.indicator.file.elf.sections.type': 'ELF Section List type.', + 'threat.enrichments.indicator.file.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.elf.sections.virtual_address': + 'ELF Section List virtual address.', + 'threat.enrichments.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'threat.enrichments.indicator.file.elf.segments': 'ELF object segment list.', + 'threat.enrichments.indicator.file.elf.segments.sections': 'ELF object segment sections.', + 'threat.enrichments.indicator.file.elf.segments.type': 'ELF object segment type.', + 'threat.enrichments.indicator.file.elf.shared_libraries': + 'List of shared libraries used by this ELF object.', + 'threat.enrichments.indicator.file.elf.telfhash': 'telfhash hash for ELF file.', + 'threat.enrichments.indicator.file.extension': 'File extension, excluding the leading dot.', + 'threat.enrichments.indicator.file.fork_name': + 'A fork is additional data associated with a filesystem object.', + 'threat.enrichments.indicator.file.gid': 'Primary group ID (GID) of the file.', + 'threat.enrichments.indicator.file.group': 'Primary group name of the file.', + 'threat.enrichments.indicator.file.hash.md5': 'MD5 hash.', + 'threat.enrichments.indicator.file.hash.sha1': 'SHA1 hash.', + 'threat.enrichments.indicator.file.hash.sha256': 'SHA256 hash.', + 'threat.enrichments.indicator.file.hash.sha384': 'SHA384 hash.', + 'threat.enrichments.indicator.file.hash.sha512': 'SHA512 hash.', + 'threat.enrichments.indicator.file.hash.ssdeep': 'SSDEEP hash.', + 'threat.enrichments.indicator.file.hash.tlsh': 'TLSH hash.', + 'threat.enrichments.indicator.file.inode': 'Inode representing the file in the filesystem.', + 'threat.enrichments.indicator.file.mime_type': + 'Media type of file, document, or arrangement of bytes.', + 'threat.enrichments.indicator.file.mode': 'Mode of the file in octal representation.', + 'threat.enrichments.indicator.file.mtime': 'Last time the file content was modified.', + 'threat.enrichments.indicator.file.name': + 'Name of the file including the extension, without the directory.', + 'threat.enrichments.indicator.file.owner': 'File owners username.', + 'threat.enrichments.indicator.file.path': 'Full path to the file, including the file name.', + 'threat.enrichments.indicator.file.pe.architecture': 'CPU architecture target for the file.', + 'threat.enrichments.indicator.file.pe.company': + 'Internal company name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.description': + 'Internal description of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.file_version': 'Process name.', + 'threat.enrichments.indicator.file.pe.go_import_hash': + 'A hash of the Go language imports in a PE file.', + 'threat.enrichments.indicator.file.pe.go_imports': + 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.pe.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.enrichments.indicator.file.pe.imphash': 'A hash of the imports in a PE file.', + 'threat.enrichments.indicator.file.pe.import_hash': 'A hash of the imports in a PE file.', + 'threat.enrichments.indicator.file.pe.imports': 'List of imported element names and types.', + 'threat.enrichments.indicator.file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.pe.original_file_name': + 'Internal name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.pehash': + 'A hash of the PE header and data from one or more PE sections.', + 'threat.enrichments.indicator.file.pe.product': + 'Internal product name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.sections': 'Section information of the PE file.', + 'threat.enrichments.indicator.file.pe.sections.entropy': + 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.pe.sections.name': 'PE Section List name.', + 'threat.enrichments.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', + 'threat.enrichments.indicator.file.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'threat.enrichments.indicator.file.size': 'File size in bytes.', + 'threat.enrichments.indicator.file.target_path': 'Target path for symlinks.', + 'threat.enrichments.indicator.file.type': 'File type (file, dir, or symlink).', + 'threat.enrichments.indicator.file.uid': + 'The user ID (UID) or security identifier (SID) of the file owner.', + 'threat.enrichments.indicator.file.x509.alternative_names': + 'List of subject alternative names (SAN).', + 'threat.enrichments.indicator.file.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.country': 'List of country (C) codes', + 'threat.enrichments.indicator.file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.file.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.file.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.file.x509.public_key_algorithm': + 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.enrichments.indicator.file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.enrichments.indicator.file.x509.public_key_size': + 'The size of the public key space in bits.', + 'threat.enrichments.indicator.file.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.enrichments.indicator.file.x509.signature_algorithm': + 'Identifier for certificate signature algorithm.', + 'threat.enrichments.indicator.file.x509.subject.common_name': + 'List of common names (CN) of subject.', + 'threat.enrichments.indicator.file.x509.subject.country': 'List of country (C) code', + 'threat.enrichments.indicator.file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.enrichments.indicator.file.x509.subject.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.file.x509.subject.organization': + 'List of organizations (O) of subject.', + 'threat.enrichments.indicator.file.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.file.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.version_number': 'Version of x509 format.', + 'threat.enrichments.indicator.first_seen': 'Date/time indicator was first reported.', + 'threat.enrichments.indicator.geo.city_name': 'City name.', + 'threat.enrichments.indicator.geo.continent_code': 'Continent code.', + 'threat.enrichments.indicator.geo.continent_name': 'Name of the continent.', + 'threat.enrichments.indicator.geo.country_iso_code': 'Country ISO code.', + 'threat.enrichments.indicator.geo.country_name': 'Country name.', + 'threat.enrichments.indicator.geo.location': 'Longitude and latitude.', + 'threat.enrichments.indicator.geo.name': 'User-defined description of a location.', + 'threat.enrichments.indicator.geo.postal_code': 'Postal code.', + 'threat.enrichments.indicator.geo.region_iso_code': 'Region ISO code.', + 'threat.enrichments.indicator.geo.region_name': 'Region name.', + 'threat.enrichments.indicator.geo.timezone': 'Time zone.', + 'threat.enrichments.indicator.ip': 'Indicator IP address', + 'threat.enrichments.indicator.last_seen': 'Date/time indicator was last reported.', + 'threat.enrichments.indicator.marking.tlp': 'Indicator TLP marking', + 'threat.enrichments.indicator.marking.tlp_version': 'Indicator TLP version', + 'threat.enrichments.indicator.modified_at': 'Date/time indicator was last updated.', + 'threat.enrichments.indicator.name': 'Indicator display name', + 'threat.enrichments.indicator.port': 'Indicator port', + 'threat.enrichments.indicator.provider': 'Indicator provider', + 'threat.enrichments.indicator.reference': 'Indicator reference URL', + 'threat.enrichments.indicator.registry.data.bytes': + 'Original bytes written with base64 encoding.', + 'threat.enrichments.indicator.registry.data.strings': + 'List of strings representing what was written to the registry.', + 'threat.enrichments.indicator.registry.data.type': 'Standard registry type for encoding contents', + 'threat.enrichments.indicator.registry.hive': 'Abbreviated name for the hive.', + 'threat.enrichments.indicator.registry.key': 'Hive-relative path of keys.', + 'threat.enrichments.indicator.registry.path': 'Full path, including hive, key and value', + 'threat.enrichments.indicator.registry.value': 'Name of the value written.', + 'threat.enrichments.indicator.scanner_stats': 'Scanner statistics', + 'threat.enrichments.indicator.sightings': 'Number of times indicator observed', + 'threat.enrichments.indicator.type': 'Type of indicator', + 'threat.enrichments.indicator.url.domain': 'Domain of the url.', + 'threat.enrichments.indicator.url.extension': + 'File extension from the request url, excluding the leading dot.', + 'threat.enrichments.indicator.url.fragment': 'Portion of the url after the `#`.', + 'threat.enrichments.indicator.url.full': 'Full unparsed URL.', + 'threat.enrichments.indicator.url.original': + 'Unmodified original url as seen in the event source.', + 'threat.enrichments.indicator.url.password': 'Password of the request.', + 'threat.enrichments.indicator.url.path': 'Path of the request, such as "/search".', + 'threat.enrichments.indicator.url.port': 'Port of the request, such as 443.', + 'threat.enrichments.indicator.url.query': 'Query string of the request.', + 'threat.enrichments.indicator.url.registered_domain': + 'The highest registered url domain, stripped of the subdomain.', + 'threat.enrichments.indicator.url.scheme': 'Scheme of the url.', + 'threat.enrichments.indicator.url.subdomain': 'The subdomain of the domain.', + 'threat.enrichments.indicator.url.top_level_domain': + 'The effective top level domain (com, org, net, co.uk).', + 'threat.enrichments.indicator.url.username': 'Username of the request.', + 'threat.enrichments.indicator.x509.alternative_names': 'List of subject alternative names (SAN).', + 'threat.enrichments.indicator.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.country': 'List of country (C) codes', + 'threat.enrichments.indicator.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.x509.public_key_algorithm': + 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.enrichments.indicator.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.enrichments.indicator.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.enrichments.indicator.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.enrichments.indicator.x509.signature_algorithm': + 'Identifier for certificate signature algorithm.', + 'threat.enrichments.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.enrichments.indicator.x509.subject.country': 'List of country (C) code', + 'threat.enrichments.indicator.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.enrichments.indicator.x509.subject.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.enrichments.indicator.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.version_number': 'Version of x509 format.', + 'threat.enrichments.matched.atomic': 'Matched indicator value', + 'threat.enrichments.matched.field': 'Matched indicator field', + 'threat.enrichments.matched.id': 'Matched indicator identifier', + 'threat.enrichments.matched.index': 'Matched indicator index', + 'threat.enrichments.matched.occurred': 'Date of match', + 'threat.enrichments.matched.type': 'Type of indicator match', + 'threat.feed.dashboard_id': 'Feed dashboard ID.', + 'threat.feed.description': 'Description of the threat feed.', + 'threat.feed.name': 'Name of the threat feed.', + 'threat.feed.reference': 'Reference for the threat feed.', + 'threat.framework': 'Threat classification framework.', + 'threat.group.alias': 'Alias of the group.', + 'threat.group.id': 'ID of the group.', + 'threat.group.name': 'Name of the group.', + 'threat.group.reference': 'Reference URL of the group.', + 'threat.indicator.as.number': 'Unique number allocated to the autonomous system.', + 'threat.indicator.as.organization.name': 'Organization name.', + 'threat.indicator.confidence': 'Indicator confidence rating', + 'threat.indicator.description': 'Indicator description', + 'threat.indicator.email.address': 'Indicator email address', + 'threat.indicator.file.accessed': 'Last time the file was accessed.', + 'threat.indicator.file.attributes': 'Array of file attributes.', + 'threat.indicator.file.code_signature.digest_algorithm': + 'Hashing algorithm used to sign the process.', + 'threat.indicator.file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'threat.indicator.file.code_signature.signing_id': 'The identifier used to sign the process.', + 'threat.indicator.file.code_signature.status': + 'Additional information about the certificate status.', + 'threat.indicator.file.code_signature.subject_name': 'Subject name of the code signer', + 'threat.indicator.file.code_signature.team_id': 'The team identifier used to sign the process.', + 'threat.indicator.file.code_signature.timestamp': 'When the signature was generated and signed.', + 'threat.indicator.file.code_signature.trusted': + 'Stores the trust status of the certificate chain.', + 'threat.indicator.file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'threat.indicator.file.created': 'File creation time.', + 'threat.indicator.file.ctime': 'Last time the file attributes or metadata changed.', + 'threat.indicator.file.device': 'Device that is the source of the file.', + 'threat.indicator.file.directory': 'Directory where the file is located.', + 'threat.indicator.file.drive_letter': 'Drive letter where the file is located.', + 'threat.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', + 'threat.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', + 'threat.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', + 'threat.indicator.file.elf.creation_date': 'Build or compile date.', + 'threat.indicator.file.elf.exports': 'List of exported element names and types.', + 'threat.indicator.file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'threat.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', + 'threat.indicator.file.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.elf.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.indicator.file.elf.header.abi_version': + 'Version of the ELF Application Binary Interface (ABI).', + 'threat.indicator.file.elf.header.class': 'Header class of the ELF file.', + 'threat.indicator.file.elf.header.data': 'Data table of the ELF header.', + 'threat.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'threat.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', + 'threat.indicator.file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'threat.indicator.file.elf.header.type': 'Header type of the ELF file.', + 'threat.indicator.file.elf.header.version': 'Version of the ELF header.', + 'threat.indicator.file.elf.import_hash': 'A hash of the imports in an ELF file.', + 'threat.indicator.file.elf.imports': 'List of imported element names and types.', + 'threat.indicator.file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.elf.sections': 'Section information of the ELF file.', + 'threat.indicator.file.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'threat.indicator.file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.indicator.file.elf.sections.flags': 'ELF Section List flags.', + 'threat.indicator.file.elf.sections.name': 'ELF Section List name.', + 'threat.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', + 'threat.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', + 'threat.indicator.file.elf.sections.type': 'ELF Section List type.', + 'threat.indicator.file.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'threat.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'threat.indicator.file.elf.segments': 'ELF object segment list.', + 'threat.indicator.file.elf.segments.sections': 'ELF object segment sections.', + 'threat.indicator.file.elf.segments.type': 'ELF object segment type.', + 'threat.indicator.file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'threat.indicator.file.elf.telfhash': 'telfhash hash for ELF file.', + 'threat.indicator.file.extension': 'File extension, excluding the leading dot.', + 'threat.indicator.file.fork_name': + 'A fork is additional data associated with a filesystem object.', + 'threat.indicator.file.gid': 'Primary group ID (GID) of the file.', + 'threat.indicator.file.group': 'Primary group name of the file.', + 'threat.indicator.file.hash.md5': 'MD5 hash.', + 'threat.indicator.file.hash.sha1': 'SHA1 hash.', + 'threat.indicator.file.hash.sha256': 'SHA256 hash.', + 'threat.indicator.file.hash.sha384': 'SHA384 hash.', + 'threat.indicator.file.hash.sha512': 'SHA512 hash.', + 'threat.indicator.file.hash.ssdeep': 'SSDEEP hash.', + 'threat.indicator.file.hash.tlsh': 'TLSH hash.', + 'threat.indicator.file.inode': 'Inode representing the file in the filesystem.', + 'threat.indicator.file.mime_type': 'Media type of file, document, or arrangement of bytes.', + 'threat.indicator.file.mode': 'Mode of the file in octal representation.', + 'threat.indicator.file.mtime': 'Last time the file content was modified.', + 'threat.indicator.file.name': 'Name of the file including the extension, without the directory.', + 'threat.indicator.file.owner': 'File owners username.', + 'threat.indicator.file.path': 'Full path to the file, including the file name.', + 'threat.indicator.file.pe.architecture': 'CPU architecture target for the file.', + 'threat.indicator.file.pe.company': + 'Internal company name of the file, provided at compile-time.', + 'threat.indicator.file.pe.description': + 'Internal description of the file, provided at compile-time.', + 'threat.indicator.file.pe.file_version': 'Process name.', + 'threat.indicator.file.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'threat.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', + 'threat.indicator.file.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.pe.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.indicator.file.pe.imphash': 'A hash of the imports in a PE file.', + 'threat.indicator.file.pe.import_hash': 'A hash of the imports in a PE file.', + 'threat.indicator.file.pe.imports': 'List of imported element names and types.', + 'threat.indicator.file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.pe.original_file_name': + 'Internal name of the file, provided at compile-time.', + 'threat.indicator.file.pe.pehash': + 'A hash of the PE header and data from one or more PE sections.', + 'threat.indicator.file.pe.product': + 'Internal product name of the file, provided at compile-time.', + 'threat.indicator.file.pe.sections': 'Section information of the PE file.', + 'threat.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.indicator.file.pe.sections.name': 'PE Section List name.', + 'threat.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', + 'threat.indicator.file.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'threat.indicator.file.size': 'File size in bytes.', + 'threat.indicator.file.target_path': 'Target path for symlinks.', + 'threat.indicator.file.type': 'File type (file, dir, or symlink).', + 'threat.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'threat.indicator.file.x509.alternative_names': 'List of subject alternative names (SAN).', + 'threat.indicator.file.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.country': 'List of country (C) codes', + 'threat.indicator.file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.locality': 'List of locality names (L)', + 'threat.indicator.file.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.indicator.file.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.indicator.file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.indicator.file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.indicator.file.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.indicator.file.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.indicator.file.x509.signature_algorithm': + 'Identifier for certificate signature algorithm.', + 'threat.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.indicator.file.x509.subject.country': 'List of country (C) code', + 'threat.indicator.file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.file.x509.subject.locality': 'List of locality names (L)', + 'threat.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.indicator.file.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.indicator.file.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.version_number': 'Version of x509 format.', + 'threat.indicator.first_seen': 'Date/time indicator was first reported.', + 'threat.indicator.geo.city_name': 'City name.', + 'threat.indicator.geo.continent_code': 'Continent code.', + 'threat.indicator.geo.continent_name': 'Name of the continent.', + 'threat.indicator.geo.country_iso_code': 'Country ISO code.', + 'threat.indicator.geo.country_name': 'Country name.', + 'threat.indicator.geo.location': 'Longitude and latitude.', + 'threat.indicator.geo.name': 'User-defined description of a location.', + 'threat.indicator.geo.postal_code': 'Postal code.', + 'threat.indicator.geo.region_iso_code': 'Region ISO code.', + 'threat.indicator.geo.region_name': 'Region name.', + 'threat.indicator.geo.timezone': 'Time zone.', + 'threat.indicator.ip': 'Indicator IP address', + 'threat.indicator.last_seen': 'Date/time indicator was last reported.', + 'threat.indicator.marking.tlp': 'Indicator TLP marking', + 'threat.indicator.marking.tlp_version': 'Indicator TLP version', + 'threat.indicator.modified_at': 'Date/time indicator was last updated.', + 'threat.indicator.name': 'Indicator display name', + 'threat.indicator.port': 'Indicator port', + 'threat.indicator.provider': 'Indicator provider', + 'threat.indicator.reference': 'Indicator reference URL', + 'threat.indicator.registry.data.bytes': 'Original bytes written with base64 encoding.', + 'threat.indicator.registry.data.strings': + 'List of strings representing what was written to the registry.', + 'threat.indicator.registry.data.type': 'Standard registry type for encoding contents', + 'threat.indicator.registry.hive': 'Abbreviated name for the hive.', + 'threat.indicator.registry.key': 'Hive-relative path of keys.', + 'threat.indicator.registry.path': 'Full path, including hive, key and value', + 'threat.indicator.registry.value': 'Name of the value written.', + 'threat.indicator.scanner_stats': 'Scanner statistics', + 'threat.indicator.sightings': 'Number of times indicator observed', + 'threat.indicator.type': 'Type of indicator', + 'threat.indicator.url.domain': 'Domain of the url.', + 'threat.indicator.url.extension': + 'File extension from the request url, excluding the leading dot.', + 'threat.indicator.url.fragment': 'Portion of the url after the `#`.', + 'threat.indicator.url.full': 'Full unparsed URL.', + 'threat.indicator.url.original': 'Unmodified original url as seen in the event source.', + 'threat.indicator.url.password': 'Password of the request.', + 'threat.indicator.url.path': 'Path of the request, such as "/search".', + 'threat.indicator.url.port': 'Port of the request, such as 443.', + 'threat.indicator.url.query': 'Query string of the request.', + 'threat.indicator.url.registered_domain': + 'The highest registered url domain, stripped of the subdomain.', + 'threat.indicator.url.scheme': 'Scheme of the url.', + 'threat.indicator.url.subdomain': 'The subdomain of the domain.', + 'threat.indicator.url.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'threat.indicator.url.username': 'Username of the request.', + 'threat.indicator.x509.alternative_names': 'List of subject alternative names (SAN).', + 'threat.indicator.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.country': 'List of country (C) codes', + 'threat.indicator.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.locality': 'List of locality names (L)', + 'threat.indicator.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'threat.indicator.x509.not_before': 'Time at which the certificate is first considered valid.', + 'threat.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.indicator.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.indicator.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.indicator.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.indicator.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.indicator.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'threat.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.indicator.x509.subject.country': 'List of country (C) code', + 'threat.indicator.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.x509.subject.locality': 'List of locality names (L)', + 'threat.indicator.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.indicator.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.indicator.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.indicator.x509.version_number': 'Version of x509 format.', + 'threat.software.alias': 'Alias of the software', + 'threat.software.id': 'ID of the software', + 'threat.software.name': 'Name of the software.', + 'threat.software.platforms': 'Platforms of the software.', + 'threat.software.reference': 'Software reference URL.', + 'threat.software.type': 'Software type.', + 'threat.tactic.id': 'Threat tactic id.', + 'threat.tactic.name': 'Threat tactic.', + 'threat.tactic.reference': 'Threat tactic URL reference.', + 'threat.technique.id': 'Threat technique id.', + 'threat.technique.name': 'Threat technique name.', + 'threat.technique.reference': 'Threat technique URL reference.', + 'threat.technique.subtechnique.id': 'Threat subtechnique id.', + 'threat.technique.subtechnique.name': 'Threat subtechnique name.', + 'threat.technique.subtechnique.reference': 'Threat subtechnique URL reference.', + 'tls.cipher': 'String indicating the cipher used during the current connection.', + 'tls.client.certificate': 'PEM-encoded stand-alone certificate offered by the client.', + 'tls.client.certificate_chain': + 'Array of PEM-encoded certificates that make up the certificate chain offered by the client.', + 'tls.client.hash.md5': + 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the client.', + 'tls.client.hash.sha1': + 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the client.', + 'tls.client.hash.sha256': + 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the client.', + 'tls.client.issuer': + 'Distinguished name of subject of the issuer of the x.509 certificate presented by the client.', + 'tls.client.ja3': + 'A hash that identifies clients based on how they perform an SSL/TLS handshake.', + 'tls.client.not_after': + 'Date/Time indicating when client certificate is no longer considered valid.', + 'tls.client.not_before': + 'Date/Time indicating when client certificate is first considered valid.', + 'tls.client.server_name': 'Hostname the client is trying to connect to. Also called the SNI.', + 'tls.client.subject': + 'Distinguished name of subject of the x.509 certificate presented by the client.', + 'tls.client.supported_ciphers': 'Array of ciphers offered by the client during the client hello.', + 'tls.client.x509.alternative_names': 'List of subject alternative names (SAN).', + 'tls.client.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'tls.client.x509.issuer.country': 'List of country (C) codes', + 'tls.client.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'tls.client.x509.issuer.locality': 'List of locality names (L)', + 'tls.client.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'tls.client.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'tls.client.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.client.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'tls.client.x509.not_before': 'Time at which the certificate is first considered valid.', + 'tls.client.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'tls.client.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'tls.client.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.client.x509.public_key_size': 'The size of the public key space in bits.', + 'tls.client.x509.serial_number': 'Unique serial number issued by the certificate authority.', + 'tls.client.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'tls.client.x509.subject.common_name': 'List of common names (CN) of subject.', + 'tls.client.x509.subject.country': 'List of country (C) code', + 'tls.client.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'tls.client.x509.subject.locality': 'List of locality names (L)', + 'tls.client.x509.subject.organization': 'List of organizations (O) of subject.', + 'tls.client.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'tls.client.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.client.x509.version_number': 'Version of x509 format.', + 'tls.curve': 'String indicating the curve used for the given cipher, when applicable.', + 'tls.established': + 'Boolean flag indicating if the TLS negotiation was successful and transitioned to an encrypted tunnel.', + 'tls.next_protocol': 'String indicating the protocol being tunneled.', + 'tls.resumed': + 'Boolean flag indicating if this TLS connection was resumed from an existing TLS negotiation.', + 'tls.server.certificate': 'PEM-encoded stand-alone certificate offered by the server.', + 'tls.server.certificate_chain': + 'Array of PEM-encoded certificates that make up the certificate chain offered by the server.', + 'tls.server.hash.md5': + 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the server.', + 'tls.server.hash.sha1': + 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the server.', + 'tls.server.hash.sha256': + 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the server.', + 'tls.server.issuer': 'Subject of the issuer of the x.509 certificate presented by the server.', + 'tls.server.ja3s': + 'A hash that identifies servers based on how they perform an SSL/TLS handshake.', + 'tls.server.not_after': + 'Timestamp indicating when server certificate is no longer considered valid.', + 'tls.server.not_before': + 'Timestamp indicating when server certificate is first considered valid.', + 'tls.server.subject': 'Subject of the x.509 certificate presented by the server.', + 'tls.server.x509.alternative_names': 'List of subject alternative names (SAN).', + 'tls.server.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'tls.server.x509.issuer.country': 'List of country (C) codes', + 'tls.server.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'tls.server.x509.issuer.locality': 'List of locality names (L)', + 'tls.server.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'tls.server.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'tls.server.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.server.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'tls.server.x509.not_before': 'Time at which the certificate is first considered valid.', + 'tls.server.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'tls.server.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'tls.server.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.server.x509.public_key_size': 'The size of the public key space in bits.', + 'tls.server.x509.serial_number': 'Unique serial number issued by the certificate authority.', + 'tls.server.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'tls.server.x509.subject.common_name': 'List of common names (CN) of subject.', + 'tls.server.x509.subject.country': 'List of country (C) code', + 'tls.server.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'tls.server.x509.subject.locality': 'List of locality names (L)', + 'tls.server.x509.subject.organization': 'List of organizations (O) of subject.', + 'tls.server.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'tls.server.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.server.x509.version_number': 'Version of x509 format.', + 'tls.version': 'Numeric part of the version parsed from the original string.', + 'tls.version_protocol': 'Normalized lowercase protocol name parsed from original string.', + 'trace.id': 'Unique identifier of the trace.', + 'transaction.id': 'Unique identifier of the transaction within the scope of its trace.', + 'url.domain': 'Domain of the url.', + 'url.extension': 'File extension from the request url, excluding the leading dot.', + 'url.fragment': 'Portion of the url after the `#`.', + 'url.full': 'Full unparsed URL.', + 'url.original': 'Unmodified original url as seen in the event source.', + 'url.password': 'Password of the request.', + 'url.path': 'Path of the request, such as "/search".', + 'url.port': 'Port of the request, such as 443.', + 'url.query': 'Query string of the request.', + 'url.registered_domain': 'The highest registered url domain, stripped of the subdomain.', + 'url.scheme': 'Scheme of the url.', + 'url.subdomain': 'The subdomain of the domain.', + 'url.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'url.username': 'Username of the request.', + 'user.changes.domain': 'Name of the directory the user is a member of.', + 'user.changes.email': 'User email address.', + 'user.changes.full_name': 'Users full name, if available.', + 'user.changes.group.domain': 'Name of the directory the group is a member of.', + 'user.changes.group.id': 'Unique identifier for the group on the system/platform.', + 'user.changes.group.name': 'Name of the group.', + 'user.changes.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.changes.id': 'Unique identifier of the user.', + 'user.changes.name': 'Short name or login of the user.', + 'user.changes.roles': 'Array of user roles at the time of the event.', + 'user.domain': 'Name of the directory the user is a member of.', + 'user.effective.domain': 'Name of the directory the user is a member of.', + 'user.effective.email': 'User email address.', + 'user.effective.full_name': 'Users full name, if available.', + 'user.effective.group.domain': 'Name of the directory the group is a member of.', + 'user.effective.group.id': 'Unique identifier for the group on the system/platform.', + 'user.effective.group.name': 'Name of the group.', + 'user.effective.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.effective.id': 'Unique identifier of the user.', + 'user.effective.name': 'Short name or login of the user.', + 'user.effective.roles': 'Array of user roles at the time of the event.', + 'user.email': 'User email address.', + 'user.full_name': 'Users full name, if available.', + 'user.group.domain': 'Name of the directory the group is a member of.', + 'user.group.id': 'Unique identifier for the group on the system/platform.', + 'user.group.name': 'Name of the group.', + 'user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.id': 'Unique identifier of the user.', + 'user.name': 'Short name or login of the user.', + 'user.risk.calculated_level': + 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring.', + 'user.risk.calculated_score': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring.', + 'user.risk.calculated_score_norm': 'A normalized risk score calculated by an internal system.', + 'user.risk.static_level': + 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'user.risk.static_score': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'user.risk.static_score_norm': 'A normalized risk score calculated by an external system.', + 'user.roles': 'Array of user roles at the time of the event.', + 'user.target.domain': 'Name of the directory the user is a member of.', + 'user.target.email': 'User email address.', + 'user.target.full_name': 'Users full name, if available.', + 'user.target.group.domain': 'Name of the directory the group is a member of.', + 'user.target.group.id': 'Unique identifier for the group on the system/platform.', + 'user.target.group.name': 'Name of the group.', + 'user.target.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.target.id': 'Unique identifier of the user.', + 'user.target.name': 'Short name or login of the user.', + 'user.target.roles': 'Array of user roles at the time of the event.', + 'user_agent.device.name': 'Name of the device.', + 'user_agent.name': 'Name of the user agent.', + 'user_agent.original': 'Unparsed user_agent string.', + 'user_agent.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'user_agent.os.full': 'Operating system name, including the version or code name.', + 'user_agent.os.kernel': 'Operating system kernel version as a raw string.', + 'user_agent.os.name': 'Operating system name, without the version.', + 'user_agent.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'user_agent.os.type': + 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', + 'user_agent.os.version': 'Operating system version as a raw string.', + 'user_agent.version': 'Version of the user agent.', + 'volume.bus_type': 'Bus type of the device.', + 'volume.default_access': 'Bus type of the device.', + 'volume.device_name': 'Device name of the volume.', + 'volume.device_type': 'Volume device type.', + 'volume.dos_name': 'DOS name of the device.', + 'volume.file_system_type': 'Volume device file system type.', + 'volume.mount_name': 'Mount name of the volume.', + 'volume.nt_name': 'NT name of the device.', + 'volume.product_id': 'ProductID of the device.', + 'volume.product_name': 'Produce name of the volume.', + 'volume.removable': 'Indicates if the volume is removable.', + 'volume.serial_number': 'Serial number of the device.', + 'volume.size': 'Size of the volume device in bytes.', + 'volume.vendor_id': 'VendorID of the device.', + 'volume.vendor_name': 'Vendor name of the device.', + 'volume.writable': 'Indicates if the volume is writable.', + 'vulnerability.category': 'Category of a vulnerability.', + 'vulnerability.classification': 'Classification of the vulnerability.', + 'vulnerability.description': 'Description of the vulnerability.', + 'vulnerability.enumeration': 'Identifier of the vulnerability.', + 'vulnerability.id': 'ID of the vulnerability.', + 'vulnerability.reference': 'Reference of the vulnerability.', + 'vulnerability.report_id': 'Scan identification number.', + 'vulnerability.scanner.vendor': 'Name of the scanner vendor.', + 'vulnerability.score.base': 'Vulnerability Base score.', + 'vulnerability.score.environmental': 'Vulnerability Environmental score.', + 'vulnerability.score.temporal': 'Vulnerability Temporal score.', + 'vulnerability.score.version': 'CVSS version.', + 'vulnerability.severity': 'Severity of the vulnerability.', +}; + +export const ECS_TYPES: EcsFields = { + '@timestamp': 'date', + 'agent.build.original': 'keyword', + 'agent.ephemeral_id': 'keyword', + 'agent.id': 'keyword', + 'agent.name': 'keyword', + 'agent.type': 'keyword', + 'agent.version': 'keyword', + 'client.address': 'keyword', + 'client.as.number': 'long', + 'client.as.organization.name': 'keyword', + 'client.bytes': 'long', + 'client.domain': 'keyword', + 'client.geo.city_name': 'keyword', + 'client.geo.continent_code': 'keyword', + 'client.geo.continent_name': 'keyword', + 'client.geo.country_iso_code': 'keyword', + 'client.geo.country_name': 'keyword', + 'client.geo.location': 'geo_point', + 'client.geo.name': 'keyword', + 'client.geo.postal_code': 'keyword', + 'client.geo.region_iso_code': 'keyword', + 'client.geo.region_name': 'keyword', + 'client.geo.timezone': 'keyword', + 'client.ip': 'ip', + 'client.mac': 'keyword', + 'client.nat.ip': 'ip', + 'client.nat.port': 'long', + 'client.packets': 'long', + 'client.port': 'long', + 'client.registered_domain': 'keyword', + 'client.subdomain': 'keyword', + 'client.top_level_domain': 'keyword', + 'client.user.domain': 'keyword', + 'client.user.email': 'keyword', + 'client.user.full_name': 'keyword', + 'client.user.group.domain': 'keyword', + 'client.user.group.id': 'keyword', + 'client.user.group.name': 'keyword', + 'client.user.hash': 'keyword', + 'client.user.id': 'keyword', + 'client.user.name': 'keyword', + 'client.user.roles': 'keyword', + 'cloud.account.id': 'keyword', + 'cloud.account.name': 'keyword', + 'cloud.availability_zone': 'keyword', + 'cloud.instance.id': 'keyword', + 'cloud.instance.name': 'keyword', + 'cloud.machine.type': 'keyword', + 'cloud.origin.account.id': 'keyword', + 'cloud.origin.account.name': 'keyword', + 'cloud.origin.availability_zone': 'keyword', + 'cloud.origin.instance.id': 'keyword', + 'cloud.origin.instance.name': 'keyword', + 'cloud.origin.machine.type': 'keyword', + 'cloud.origin.project.id': 'keyword', + 'cloud.origin.project.name': 'keyword', + 'cloud.origin.provider': 'keyword', + 'cloud.origin.region': 'keyword', + 'cloud.origin.service.name': 'keyword', + 'cloud.project.id': 'keyword', + 'cloud.project.name': 'keyword', + 'cloud.provider': 'keyword', + 'cloud.region': 'keyword', + 'cloud.service.name': 'keyword', + 'cloud.target.account.id': 'keyword', + 'cloud.target.account.name': 'keyword', + 'cloud.target.availability_zone': 'keyword', + 'cloud.target.instance.id': 'keyword', + 'cloud.target.instance.name': 'keyword', + 'cloud.target.machine.type': 'keyword', + 'cloud.target.project.id': 'keyword', + 'cloud.target.project.name': 'keyword', + 'cloud.target.provider': 'keyword', + 'cloud.target.region': 'keyword', + 'cloud.target.service.name': 'keyword', + 'container.cpu.usage': 'scaled_float', + 'container.disk.read.bytes': 'long', + 'container.disk.write.bytes': 'long', + 'container.id': 'keyword', + 'container.image.hash.all': 'keyword', + 'container.image.name': 'keyword', + 'container.image.tag': 'keyword', + 'container.labels': 'object', + 'container.memory.usage': 'scaled_float', + 'container.name': 'keyword', + 'container.network.egress.bytes': 'long', + 'container.network.ingress.bytes': 'long', + 'container.runtime': 'keyword', + 'container.security_context.privileged': 'boolean', + 'data_stream.dataset': 'constant_keyword', + 'data_stream.namespace': 'constant_keyword', + 'data_stream.type': 'constant_keyword', + 'destination.address': 'keyword', + 'destination.as.number': 'long', + 'destination.as.organization.name': 'keyword', + 'destination.bytes': 'long', + 'destination.domain': 'keyword', + 'destination.geo.city_name': 'keyword', + 'destination.geo.continent_code': 'keyword', + 'destination.geo.continent_name': 'keyword', + 'destination.geo.country_iso_code': 'keyword', + 'destination.geo.country_name': 'keyword', + 'destination.geo.location': 'geo_point', + 'destination.geo.name': 'keyword', + 'destination.geo.postal_code': 'keyword', + 'destination.geo.region_iso_code': 'keyword', + 'destination.geo.region_name': 'keyword', + 'destination.geo.timezone': 'keyword', + 'destination.ip': 'ip', + 'destination.mac': 'keyword', + 'destination.nat.ip': 'ip', + 'destination.nat.port': 'long', + 'destination.packets': 'long', + 'destination.port': 'long', + 'destination.registered_domain': 'keyword', + 'destination.subdomain': 'keyword', + 'destination.top_level_domain': 'keyword', + 'destination.user.domain': 'keyword', + 'destination.user.email': 'keyword', + 'destination.user.full_name': 'keyword', + 'destination.user.group.domain': 'keyword', + 'destination.user.group.id': 'keyword', + 'destination.user.group.name': 'keyword', + 'destination.user.hash': 'keyword', + 'destination.user.id': 'keyword', + 'destination.user.name': 'keyword', + 'destination.user.roles': 'keyword', + 'device.id': 'keyword', + 'device.manufacturer': 'keyword', + 'device.model.identifier': 'keyword', + 'device.model.name': 'keyword', + 'dll.code_signature.digest_algorithm': 'keyword', + 'dll.code_signature.exists': 'boolean', + 'dll.code_signature.signing_id': 'keyword', + 'dll.code_signature.status': 'keyword', + 'dll.code_signature.subject_name': 'keyword', + 'dll.code_signature.team_id': 'keyword', + 'dll.code_signature.timestamp': 'date', + 'dll.code_signature.trusted': 'boolean', + 'dll.code_signature.valid': 'boolean', + 'dll.hash.md5': 'keyword', + 'dll.hash.sha1': 'keyword', + 'dll.hash.sha256': 'keyword', + 'dll.hash.sha384': 'keyword', + 'dll.hash.sha512': 'keyword', + 'dll.hash.ssdeep': 'keyword', + 'dll.hash.tlsh': 'keyword', + 'dll.name': 'keyword', + 'dll.path': 'keyword', + 'dll.pe.architecture': 'keyword', + 'dll.pe.company': 'keyword', + 'dll.pe.description': 'keyword', + 'dll.pe.file_version': 'keyword', + 'dll.pe.go_import_hash': 'keyword', + 'dll.pe.go_imports': 'flattened', + 'dll.pe.go_imports_names_entropy': 'long', + 'dll.pe.go_imports_names_var_entropy': 'long', + 'dll.pe.go_stripped': 'boolean', + 'dll.pe.imphash': 'keyword', + 'dll.pe.import_hash': 'keyword', + 'dll.pe.imports': 'flattened', + 'dll.pe.imports_names_entropy': 'long', + 'dll.pe.imports_names_var_entropy': 'long', + 'dll.pe.original_file_name': 'keyword', + 'dll.pe.pehash': 'keyword', + 'dll.pe.product': 'keyword', + 'dll.pe.sections': 'nested', + 'dll.pe.sections.entropy': 'long', + 'dll.pe.sections.name': 'keyword', + 'dll.pe.sections.physical_size': 'long', + 'dll.pe.sections.var_entropy': 'long', + 'dll.pe.sections.virtual_size': 'long', + 'dns.answers': 'object', + 'dns.answers.class': 'keyword', + 'dns.answers.data': 'keyword', + 'dns.answers.name': 'keyword', + 'dns.answers.ttl': 'long', + 'dns.answers.type': 'keyword', + 'dns.header_flags': 'keyword', + 'dns.id': 'keyword', + 'dns.op_code': 'keyword', + 'dns.question.class': 'keyword', + 'dns.question.name': 'keyword', + 'dns.question.registered_domain': 'keyword', + 'dns.question.subdomain': 'keyword', + 'dns.question.top_level_domain': 'keyword', + 'dns.question.type': 'keyword', + 'dns.resolved_ip': 'ip', + 'dns.response_code': 'keyword', + 'dns.type': 'keyword', + 'ecs.version': 'keyword', + 'email.attachments': 'nested', + 'email.attachments.file.extension': 'keyword', + 'email.attachments.file.hash.md5': 'keyword', + 'email.attachments.file.hash.sha1': 'keyword', + 'email.attachments.file.hash.sha256': 'keyword', + 'email.attachments.file.hash.sha384': 'keyword', + 'email.attachments.file.hash.sha512': 'keyword', + 'email.attachments.file.hash.ssdeep': 'keyword', + 'email.attachments.file.hash.tlsh': 'keyword', + 'email.attachments.file.mime_type': 'keyword', + 'email.attachments.file.name': 'keyword', + 'email.attachments.file.size': 'long', + 'email.bcc.address': 'keyword', + 'email.cc.address': 'keyword', + 'email.content_type': 'keyword', + 'email.delivery_timestamp': 'date', + 'email.direction': 'keyword', + 'email.from.address': 'keyword', + 'email.local_id': 'keyword', + 'email.message_id': 'wildcard', + 'email.origination_timestamp': 'date', + 'email.reply_to.address': 'keyword', + 'email.sender.address': 'keyword', + 'email.subject': 'keyword', + 'email.to.address': 'keyword', + 'email.x_mailer': 'keyword', + 'error.code': 'keyword', + 'error.id': 'keyword', + 'error.message': 'match_only_text', + 'error.stack_trace': 'wildcard', + 'error.type': 'keyword', + 'event.action': 'keyword', + 'event.agent_id_status': 'keyword', + 'event.category': 'keyword', + 'event.code': 'keyword', + 'event.created': 'date', + 'event.dataset': 'keyword', + 'event.duration': 'long', + 'event.end': 'date', + 'event.hash': 'keyword', + 'event.id': 'keyword', + 'event.ingested': 'date', + 'event.kind': 'keyword', + 'event.module': 'keyword', + 'event.original': 'keyword', + 'event.outcome': 'keyword', + 'event.provider': 'keyword', + 'event.reason': 'keyword', + 'event.reference': 'keyword', + 'event.risk_score': 'float', + 'event.risk_score_norm': 'float', + 'event.sequence': 'long', + 'event.severity': 'long', + 'event.start': 'date', + 'event.timezone': 'keyword', + 'event.type': 'keyword', + 'event.url': 'keyword', + 'faas.coldstart': 'boolean', + 'faas.execution': 'keyword', + 'faas.id': 'keyword', + 'faas.name': 'keyword', + 'faas.trigger.request_id': 'keyword', + 'faas.trigger.type': 'keyword', + 'faas.version': 'keyword', + 'file.accessed': 'date', + 'file.attributes': 'keyword', + 'file.code_signature.digest_algorithm': 'keyword', + 'file.code_signature.exists': 'boolean', + 'file.code_signature.signing_id': 'keyword', + 'file.code_signature.status': 'keyword', + 'file.code_signature.subject_name': 'keyword', + 'file.code_signature.team_id': 'keyword', + 'file.code_signature.timestamp': 'date', + 'file.code_signature.trusted': 'boolean', + 'file.code_signature.valid': 'boolean', + 'file.created': 'date', + 'file.ctime': 'date', + 'file.device': 'keyword', + 'file.directory': 'keyword', + 'file.drive_letter': 'keyword', + 'file.elf.architecture': 'keyword', + 'file.elf.byte_order': 'keyword', + 'file.elf.cpu_type': 'keyword', + 'file.elf.creation_date': 'date', + 'file.elf.exports': 'flattened', + 'file.elf.go_import_hash': 'keyword', + 'file.elf.go_imports': 'flattened', + 'file.elf.go_imports_names_entropy': 'long', + 'file.elf.go_imports_names_var_entropy': 'long', + 'file.elf.go_stripped': 'boolean', + 'file.elf.header.abi_version': 'keyword', + 'file.elf.header.class': 'keyword', + 'file.elf.header.data': 'keyword', + 'file.elf.header.entrypoint': 'long', + 'file.elf.header.object_version': 'keyword', + 'file.elf.header.os_abi': 'keyword', + 'file.elf.header.type': 'keyword', + 'file.elf.header.version': 'keyword', + 'file.elf.import_hash': 'keyword', + 'file.elf.imports': 'flattened', + 'file.elf.imports_names_entropy': 'long', + 'file.elf.imports_names_var_entropy': 'long', + 'file.elf.sections': 'nested', + 'file.elf.sections.chi2': 'long', + 'file.elf.sections.entropy': 'long', + 'file.elf.sections.flags': 'keyword', + 'file.elf.sections.name': 'keyword', + 'file.elf.sections.physical_offset': 'keyword', + 'file.elf.sections.physical_size': 'long', + 'file.elf.sections.type': 'keyword', + 'file.elf.sections.var_entropy': 'long', + 'file.elf.sections.virtual_address': 'long', + 'file.elf.sections.virtual_size': 'long', + 'file.elf.segments': 'nested', + 'file.elf.segments.sections': 'keyword', + 'file.elf.segments.type': 'keyword', + 'file.elf.shared_libraries': 'keyword', + 'file.elf.telfhash': 'keyword', + 'file.extension': 'keyword', + 'file.fork_name': 'keyword', + 'file.gid': 'keyword', + 'file.group': 'keyword', + 'file.hash.md5': 'keyword', + 'file.hash.sha1': 'keyword', + 'file.hash.sha256': 'keyword', + 'file.hash.sha384': 'keyword', + 'file.hash.sha512': 'keyword', + 'file.hash.ssdeep': 'keyword', + 'file.hash.tlsh': 'keyword', + 'file.inode': 'keyword', + 'file.macho.go_import_hash': 'keyword', + 'file.macho.go_imports': 'flattened', + 'file.macho.go_imports_names_entropy': 'long', + 'file.macho.go_imports_names_var_entropy': 'long', + 'file.macho.go_stripped': 'boolean', + 'file.macho.import_hash': 'keyword', + 'file.macho.imports': 'flattened', + 'file.macho.imports_names_entropy': 'long', + 'file.macho.imports_names_var_entropy': 'long', + 'file.macho.sections': 'nested', + 'file.macho.sections.entropy': 'long', + 'file.macho.sections.name': 'keyword', + 'file.macho.sections.physical_size': 'long', + 'file.macho.sections.var_entropy': 'long', + 'file.macho.sections.virtual_size': 'long', + 'file.macho.symhash': 'keyword', + 'file.mime_type': 'keyword', + 'file.mode': 'keyword', + 'file.mtime': 'date', + 'file.name': 'keyword', + 'file.owner': 'keyword', + 'file.path': 'keyword', + 'file.pe.architecture': 'keyword', + 'file.pe.company': 'keyword', + 'file.pe.description': 'keyword', + 'file.pe.file_version': 'keyword', + 'file.pe.go_import_hash': 'keyword', + 'file.pe.go_imports': 'flattened', + 'file.pe.go_imports_names_entropy': 'long', + 'file.pe.go_imports_names_var_entropy': 'long', + 'file.pe.go_stripped': 'boolean', + 'file.pe.imphash': 'keyword', + 'file.pe.import_hash': 'keyword', + 'file.pe.imports': 'flattened', + 'file.pe.imports_names_entropy': 'long', + 'file.pe.imports_names_var_entropy': 'long', + 'file.pe.original_file_name': 'keyword', + 'file.pe.pehash': 'keyword', + 'file.pe.product': 'keyword', + 'file.pe.sections': 'nested', + 'file.pe.sections.entropy': 'long', + 'file.pe.sections.name': 'keyword', + 'file.pe.sections.physical_size': 'long', + 'file.pe.sections.var_entropy': 'long', + 'file.pe.sections.virtual_size': 'long', + 'file.size': 'long', + 'file.target_path': 'keyword', + 'file.type': 'keyword', + 'file.uid': 'keyword', + 'file.x509.alternative_names': 'keyword', + 'file.x509.issuer.common_name': 'keyword', + 'file.x509.issuer.country': 'keyword', + 'file.x509.issuer.distinguished_name': 'keyword', + 'file.x509.issuer.locality': 'keyword', + 'file.x509.issuer.organization': 'keyword', + 'file.x509.issuer.organizational_unit': 'keyword', + 'file.x509.issuer.state_or_province': 'keyword', + 'file.x509.not_after': 'date', + 'file.x509.not_before': 'date', + 'file.x509.public_key_algorithm': 'keyword', + 'file.x509.public_key_curve': 'keyword', + 'file.x509.public_key_exponent': 'long', + 'file.x509.public_key_size': 'long', + 'file.x509.serial_number': 'keyword', + 'file.x509.signature_algorithm': 'keyword', + 'file.x509.subject.common_name': 'keyword', + 'file.x509.subject.country': 'keyword', + 'file.x509.subject.distinguished_name': 'keyword', + 'file.x509.subject.locality': 'keyword', + 'file.x509.subject.organization': 'keyword', + 'file.x509.subject.organizational_unit': 'keyword', + 'file.x509.subject.state_or_province': 'keyword', + 'file.x509.version_number': 'keyword', + 'group.domain': 'keyword', + 'group.id': 'keyword', + 'group.name': 'keyword', + 'host.architecture': 'keyword', + 'host.boot.id': 'keyword', + 'host.cpu.usage': 'scaled_float', + 'host.disk.read.bytes': 'long', + 'host.disk.write.bytes': 'long', + 'host.domain': 'keyword', + 'host.geo.city_name': 'keyword', + 'host.geo.continent_code': 'keyword', + 'host.geo.continent_name': 'keyword', + 'host.geo.country_iso_code': 'keyword', + 'host.geo.country_name': 'keyword', + 'host.geo.location': 'geo_point', + 'host.geo.name': 'keyword', + 'host.geo.postal_code': 'keyword', + 'host.geo.region_iso_code': 'keyword', + 'host.geo.region_name': 'keyword', + 'host.geo.timezone': 'keyword', + 'host.hostname': 'keyword', + 'host.id': 'keyword', + 'host.ip': 'ip', + 'host.mac': 'keyword', + 'host.name': 'keyword', + 'host.network.egress.bytes': 'long', + 'host.network.egress.packets': 'long', + 'host.network.ingress.bytes': 'long', + 'host.network.ingress.packets': 'long', + 'host.os.family': 'keyword', + 'host.os.full': 'keyword', + 'host.os.kernel': 'keyword', + 'host.os.name': 'keyword', + 'host.os.platform': 'keyword', + 'host.os.type': 'keyword', + 'host.os.version': 'keyword', + 'host.pid_ns_ino': 'keyword', + 'host.risk.calculated_level': 'keyword', + 'host.risk.calculated_score': 'float', + 'host.risk.calculated_score_norm': 'float', + 'host.risk.static_level': 'keyword', + 'host.risk.static_score': 'float', + 'host.risk.static_score_norm': 'float', + 'host.type': 'keyword', + 'host.uptime': 'long', + 'http.request.body.bytes': 'long', + 'http.request.body.content': 'wildcard', + 'http.request.bytes': 'long', + 'http.request.id': 'keyword', + 'http.request.method': 'keyword', + 'http.request.mime_type': 'keyword', + 'http.request.referrer': 'keyword', + 'http.response.body.bytes': 'long', + 'http.response.body.content': 'wildcard', + 'http.response.bytes': 'long', + 'http.response.mime_type': 'keyword', + 'http.response.status_code': 'long', + 'http.version': 'keyword', + labels: 'object', + 'log.file.path': 'keyword', + 'log.level': 'keyword', + 'log.logger': 'keyword', + 'log.origin.file.line': 'long', + 'log.origin.file.name': 'keyword', + 'log.origin.function': 'keyword', + 'log.syslog': 'object', + 'log.syslog.appname': 'keyword', + 'log.syslog.facility.code': 'long', + 'log.syslog.facility.name': 'keyword', + 'log.syslog.hostname': 'keyword', + 'log.syslog.msgid': 'keyword', + 'log.syslog.priority': 'long', + 'log.syslog.procid': 'keyword', + 'log.syslog.severity.code': 'long', + 'log.syslog.severity.name': 'keyword', + 'log.syslog.structured_data': 'flattened', + 'log.syslog.version': 'keyword', + message: 'match_only_text', + 'network.application': 'keyword', + 'network.bytes': 'long', + 'network.community_id': 'keyword', + 'network.direction': 'keyword', + 'network.forwarded_ip': 'ip', + 'network.iana_number': 'keyword', + 'network.inner': 'object', + 'network.inner.vlan.id': 'keyword', + 'network.inner.vlan.name': 'keyword', + 'network.name': 'keyword', + 'network.packets': 'long', + 'network.protocol': 'keyword', + 'network.transport': 'keyword', + 'network.type': 'keyword', + 'network.vlan.id': 'keyword', + 'network.vlan.name': 'keyword', + 'observer.egress': 'object', + 'observer.egress.interface.alias': 'keyword', + 'observer.egress.interface.id': 'keyword', + 'observer.egress.interface.name': 'keyword', + 'observer.egress.vlan.id': 'keyword', + 'observer.egress.vlan.name': 'keyword', + 'observer.egress.zone': 'keyword', + 'observer.geo.city_name': 'keyword', + 'observer.geo.continent_code': 'keyword', + 'observer.geo.continent_name': 'keyword', + 'observer.geo.country_iso_code': 'keyword', + 'observer.geo.country_name': 'keyword', + 'observer.geo.location': 'geo_point', + 'observer.geo.name': 'keyword', + 'observer.geo.postal_code': 'keyword', + 'observer.geo.region_iso_code': 'keyword', + 'observer.geo.region_name': 'keyword', + 'observer.geo.timezone': 'keyword', + 'observer.hostname': 'keyword', + 'observer.ingress': 'object', + 'observer.ingress.interface.alias': 'keyword', + 'observer.ingress.interface.id': 'keyword', + 'observer.ingress.interface.name': 'keyword', + 'observer.ingress.vlan.id': 'keyword', + 'observer.ingress.vlan.name': 'keyword', + 'observer.ingress.zone': 'keyword', + 'observer.ip': 'ip', + 'observer.mac': 'keyword', + 'observer.name': 'keyword', + 'observer.os.family': 'keyword', + 'observer.os.full': 'keyword', + 'observer.os.kernel': 'keyword', + 'observer.os.name': 'keyword', + 'observer.os.platform': 'keyword', + 'observer.os.type': 'keyword', + 'observer.os.version': 'keyword', + 'observer.product': 'keyword', + 'observer.serial_number': 'keyword', + 'observer.type': 'keyword', + 'observer.vendor': 'keyword', + 'observer.version': 'keyword', + 'orchestrator.api_version': 'keyword', + 'orchestrator.cluster.id': 'keyword', + 'orchestrator.cluster.name': 'keyword', + 'orchestrator.cluster.url': 'keyword', + 'orchestrator.cluster.version': 'keyword', + 'orchestrator.namespace': 'keyword', + 'orchestrator.organization': 'keyword', + 'orchestrator.resource.annotation': 'keyword', + 'orchestrator.resource.id': 'keyword', + 'orchestrator.resource.ip': 'ip', + 'orchestrator.resource.label': 'keyword', + 'orchestrator.resource.name': 'keyword', + 'orchestrator.resource.parent.type': 'keyword', + 'orchestrator.resource.type': 'keyword', + 'orchestrator.type': 'keyword', + 'organization.id': 'keyword', + 'organization.name': 'keyword', + 'package.architecture': 'keyword', + 'package.build_version': 'keyword', + 'package.checksum': 'keyword', + 'package.description': 'keyword', + 'package.install_scope': 'keyword', + 'package.installed': 'date', + 'package.license': 'keyword', + 'package.name': 'keyword', + 'package.path': 'keyword', + 'package.reference': 'keyword', + 'package.size': 'long', + 'package.type': 'keyword', + 'package.version': 'keyword', + 'process.args': 'keyword', + 'process.args_count': 'long', + 'process.code_signature.digest_algorithm': 'keyword', + 'process.code_signature.exists': 'boolean', + 'process.code_signature.signing_id': 'keyword', + 'process.code_signature.status': 'keyword', + 'process.code_signature.subject_name': 'keyword', + 'process.code_signature.team_id': 'keyword', + 'process.code_signature.timestamp': 'date', + 'process.code_signature.trusted': 'boolean', + 'process.code_signature.valid': 'boolean', + 'process.command_line': 'wildcard', + 'process.elf.architecture': 'keyword', + 'process.elf.byte_order': 'keyword', + 'process.elf.cpu_type': 'keyword', + 'process.elf.creation_date': 'date', + 'process.elf.exports': 'flattened', + 'process.elf.go_import_hash': 'keyword', + 'process.elf.go_imports': 'flattened', + 'process.elf.go_imports_names_entropy': 'long', + 'process.elf.go_imports_names_var_entropy': 'long', + 'process.elf.go_stripped': 'boolean', + 'process.elf.header.abi_version': 'keyword', + 'process.elf.header.class': 'keyword', + 'process.elf.header.data': 'keyword', + 'process.elf.header.entrypoint': 'long', + 'process.elf.header.object_version': 'keyword', + 'process.elf.header.os_abi': 'keyword', + 'process.elf.header.type': 'keyword', + 'process.elf.header.version': 'keyword', + 'process.elf.import_hash': 'keyword', + 'process.elf.imports': 'flattened', + 'process.elf.imports_names_entropy': 'long', + 'process.elf.imports_names_var_entropy': 'long', + 'process.elf.sections': 'nested', + 'process.elf.sections.chi2': 'long', + 'process.elf.sections.entropy': 'long', + 'process.elf.sections.flags': 'keyword', + 'process.elf.sections.name': 'keyword', + 'process.elf.sections.physical_offset': 'keyword', + 'process.elf.sections.physical_size': 'long', + 'process.elf.sections.type': 'keyword', + 'process.elf.sections.var_entropy': 'long', + 'process.elf.sections.virtual_address': 'long', + 'process.elf.sections.virtual_size': 'long', + 'process.elf.segments': 'nested', + 'process.elf.segments.sections': 'keyword', + 'process.elf.segments.type': 'keyword', + 'process.elf.shared_libraries': 'keyword', + 'process.elf.telfhash': 'keyword', + 'process.end': 'date', + 'process.entity_id': 'keyword', + 'process.entry_leader.args': 'keyword', + 'process.entry_leader.args_count': 'long', + 'process.entry_leader.attested_groups.name': 'keyword', + 'process.entry_leader.attested_user.id': 'keyword', + 'process.entry_leader.attested_user.name': 'keyword', + 'process.entry_leader.command_line': 'wildcard', + 'process.entry_leader.entity_id': 'keyword', + 'process.entry_leader.entry_meta.source.ip': 'ip', + 'process.entry_leader.entry_meta.type': 'keyword', + 'process.entry_leader.executable': 'keyword', + 'process.entry_leader.group.id': 'keyword', + 'process.entry_leader.group.name': 'keyword', + 'process.entry_leader.interactive': 'boolean', + 'process.entry_leader.name': 'keyword', + 'process.entry_leader.parent.entity_id': 'keyword', + 'process.entry_leader.parent.pid': 'long', + 'process.entry_leader.parent.session_leader.entity_id': 'keyword', + 'process.entry_leader.parent.session_leader.pid': 'long', + 'process.entry_leader.parent.session_leader.start': 'date', + 'process.entry_leader.parent.session_leader.vpid': 'long', + 'process.entry_leader.parent.start': 'date', + 'process.entry_leader.parent.vpid': 'long', + 'process.entry_leader.pid': 'long', + 'process.entry_leader.real_group.id': 'keyword', + 'process.entry_leader.real_group.name': 'keyword', + 'process.entry_leader.real_user.id': 'keyword', + 'process.entry_leader.real_user.name': 'keyword', + 'process.entry_leader.same_as_process': 'boolean', + 'process.entry_leader.saved_group.id': 'keyword', + 'process.entry_leader.saved_group.name': 'keyword', + 'process.entry_leader.saved_user.id': 'keyword', + 'process.entry_leader.saved_user.name': 'keyword', + 'process.entry_leader.start': 'date', + 'process.entry_leader.supplemental_groups.id': 'keyword', + 'process.entry_leader.supplemental_groups.name': 'keyword', + 'process.entry_leader.tty': 'object', + 'process.entry_leader.tty.char_device.major': 'long', + 'process.entry_leader.tty.char_device.minor': 'long', + 'process.entry_leader.user.id': 'keyword', + 'process.entry_leader.user.name': 'keyword', + 'process.entry_leader.vpid': 'long', + 'process.entry_leader.working_directory': 'keyword', + 'process.env_vars': 'keyword', + 'process.executable': 'keyword', + 'process.exit_code': 'long', + 'process.group_leader.args': 'keyword', + 'process.group_leader.args_count': 'long', + 'process.group_leader.command_line': 'wildcard', + 'process.group_leader.entity_id': 'keyword', + 'process.group_leader.executable': 'keyword', + 'process.group_leader.group.id': 'keyword', + 'process.group_leader.group.name': 'keyword', + 'process.group_leader.interactive': 'boolean', + 'process.group_leader.name': 'keyword', + 'process.group_leader.pid': 'long', + 'process.group_leader.real_group.id': 'keyword', + 'process.group_leader.real_group.name': 'keyword', + 'process.group_leader.real_user.id': 'keyword', + 'process.group_leader.real_user.name': 'keyword', + 'process.group_leader.same_as_process': 'boolean', + 'process.group_leader.saved_group.id': 'keyword', + 'process.group_leader.saved_group.name': 'keyword', + 'process.group_leader.saved_user.id': 'keyword', + 'process.group_leader.saved_user.name': 'keyword', + 'process.group_leader.start': 'date', + 'process.group_leader.supplemental_groups.id': 'keyword', + 'process.group_leader.supplemental_groups.name': 'keyword', + 'process.group_leader.tty': 'object', + 'process.group_leader.tty.char_device.major': 'long', + 'process.group_leader.tty.char_device.minor': 'long', + 'process.group_leader.user.id': 'keyword', + 'process.group_leader.user.name': 'keyword', + 'process.group_leader.vpid': 'long', + 'process.group_leader.working_directory': 'keyword', + 'process.hash.md5': 'keyword', + 'process.hash.sha1': 'keyword', + 'process.hash.sha256': 'keyword', + 'process.hash.sha384': 'keyword', + 'process.hash.sha512': 'keyword', + 'process.hash.ssdeep': 'keyword', + 'process.hash.tlsh': 'keyword', + 'process.interactive': 'boolean', + 'process.io': 'object', + 'process.io.bytes_skipped': 'object', + 'process.io.bytes_skipped.length': 'long', + 'process.io.bytes_skipped.offset': 'long', + 'process.io.max_bytes_per_process_exceeded': 'boolean', + 'process.io.text': 'wildcard', + 'process.io.total_bytes_captured': 'long', + 'process.io.total_bytes_skipped': 'long', + 'process.io.type': 'keyword', + 'process.macho.go_import_hash': 'keyword', + 'process.macho.go_imports': 'flattened', + 'process.macho.go_imports_names_entropy': 'long', + 'process.macho.go_imports_names_var_entropy': 'long', + 'process.macho.go_stripped': 'boolean', + 'process.macho.import_hash': 'keyword', + 'process.macho.imports': 'flattened', + 'process.macho.imports_names_entropy': 'long', + 'process.macho.imports_names_var_entropy': 'long', + 'process.macho.sections': 'nested', + 'process.macho.sections.entropy': 'long', + 'process.macho.sections.name': 'keyword', + 'process.macho.sections.physical_size': 'long', + 'process.macho.sections.var_entropy': 'long', + 'process.macho.sections.virtual_size': 'long', + 'process.macho.symhash': 'keyword', + 'process.name': 'keyword', + 'process.parent.args': 'keyword', + 'process.parent.args_count': 'long', + 'process.parent.code_signature.digest_algorithm': 'keyword', + 'process.parent.code_signature.exists': 'boolean', + 'process.parent.code_signature.signing_id': 'keyword', + 'process.parent.code_signature.status': 'keyword', + 'process.parent.code_signature.subject_name': 'keyword', + 'process.parent.code_signature.team_id': 'keyword', + 'process.parent.code_signature.timestamp': 'date', + 'process.parent.code_signature.trusted': 'boolean', + 'process.parent.code_signature.valid': 'boolean', + 'process.parent.command_line': 'wildcard', + 'process.parent.elf.architecture': 'keyword', + 'process.parent.elf.byte_order': 'keyword', + 'process.parent.elf.cpu_type': 'keyword', + 'process.parent.elf.creation_date': 'date', + 'process.parent.elf.exports': 'flattened', + 'process.parent.elf.go_import_hash': 'keyword', + 'process.parent.elf.go_imports': 'flattened', + 'process.parent.elf.go_imports_names_entropy': 'long', + 'process.parent.elf.go_imports_names_var_entropy': 'long', + 'process.parent.elf.go_stripped': 'boolean', + 'process.parent.elf.header.abi_version': 'keyword', + 'process.parent.elf.header.class': 'keyword', + 'process.parent.elf.header.data': 'keyword', + 'process.parent.elf.header.entrypoint': 'long', + 'process.parent.elf.header.object_version': 'keyword', + 'process.parent.elf.header.os_abi': 'keyword', + 'process.parent.elf.header.type': 'keyword', + 'process.parent.elf.header.version': 'keyword', + 'process.parent.elf.import_hash': 'keyword', + 'process.parent.elf.imports': 'flattened', + 'process.parent.elf.imports_names_entropy': 'long', + 'process.parent.elf.imports_names_var_entropy': 'long', + 'process.parent.elf.sections': 'nested', + 'process.parent.elf.sections.chi2': 'long', + 'process.parent.elf.sections.entropy': 'long', + 'process.parent.elf.sections.flags': 'keyword', + 'process.parent.elf.sections.name': 'keyword', + 'process.parent.elf.sections.physical_offset': 'keyword', + 'process.parent.elf.sections.physical_size': 'long', + 'process.parent.elf.sections.type': 'keyword', + 'process.parent.elf.sections.var_entropy': 'long', + 'process.parent.elf.sections.virtual_address': 'long', + 'process.parent.elf.sections.virtual_size': 'long', + 'process.parent.elf.segments': 'nested', + 'process.parent.elf.segments.sections': 'keyword', + 'process.parent.elf.segments.type': 'keyword', + 'process.parent.elf.shared_libraries': 'keyword', + 'process.parent.elf.telfhash': 'keyword', + 'process.parent.end': 'date', + 'process.parent.entity_id': 'keyword', + 'process.parent.executable': 'keyword', + 'process.parent.exit_code': 'long', + 'process.parent.group.id': 'keyword', + 'process.parent.group.name': 'keyword', + 'process.parent.group_leader.entity_id': 'keyword', + 'process.parent.group_leader.pid': 'long', + 'process.parent.group_leader.start': 'date', + 'process.parent.group_leader.vpid': 'long', + 'process.parent.hash.md5': 'keyword', + 'process.parent.hash.sha1': 'keyword', + 'process.parent.hash.sha256': 'keyword', + 'process.parent.hash.sha384': 'keyword', + 'process.parent.hash.sha512': 'keyword', + 'process.parent.hash.ssdeep': 'keyword', + 'process.parent.hash.tlsh': 'keyword', + 'process.parent.interactive': 'boolean', + 'process.parent.macho.go_import_hash': 'keyword', + 'process.parent.macho.go_imports': 'flattened', + 'process.parent.macho.go_imports_names_entropy': 'long', + 'process.parent.macho.go_imports_names_var_entropy': 'long', + 'process.parent.macho.go_stripped': 'boolean', + 'process.parent.macho.import_hash': 'keyword', + 'process.parent.macho.imports': 'flattened', + 'process.parent.macho.imports_names_entropy': 'long', + 'process.parent.macho.imports_names_var_entropy': 'long', + 'process.parent.macho.sections': 'nested', + 'process.parent.macho.sections.entropy': 'long', + 'process.parent.macho.sections.name': 'keyword', + 'process.parent.macho.sections.physical_size': 'long', + 'process.parent.macho.sections.var_entropy': 'long', + 'process.parent.macho.sections.virtual_size': 'long', + 'process.parent.macho.symhash': 'keyword', + 'process.parent.name': 'keyword', + 'process.parent.pe.architecture': 'keyword', + 'process.parent.pe.company': 'keyword', + 'process.parent.pe.description': 'keyword', + 'process.parent.pe.file_version': 'keyword', + 'process.parent.pe.go_import_hash': 'keyword', + 'process.parent.pe.go_imports': 'flattened', + 'process.parent.pe.go_imports_names_entropy': 'long', + 'process.parent.pe.go_imports_names_var_entropy': 'long', + 'process.parent.pe.go_stripped': 'boolean', + 'process.parent.pe.imphash': 'keyword', + 'process.parent.pe.import_hash': 'keyword', + 'process.parent.pe.imports': 'flattened', + 'process.parent.pe.imports_names_entropy': 'long', + 'process.parent.pe.imports_names_var_entropy': 'long', + 'process.parent.pe.original_file_name': 'keyword', + 'process.parent.pe.pehash': 'keyword', + 'process.parent.pe.product': 'keyword', + 'process.parent.pe.sections': 'nested', + 'process.parent.pe.sections.entropy': 'long', + 'process.parent.pe.sections.name': 'keyword', + 'process.parent.pe.sections.physical_size': 'long', + 'process.parent.pe.sections.var_entropy': 'long', + 'process.parent.pe.sections.virtual_size': 'long', + 'process.parent.pgid': 'long', + 'process.parent.pid': 'long', + 'process.parent.real_group.id': 'keyword', + 'process.parent.real_group.name': 'keyword', + 'process.parent.real_user.id': 'keyword', + 'process.parent.real_user.name': 'keyword', + 'process.parent.saved_group.id': 'keyword', + 'process.parent.saved_group.name': 'keyword', + 'process.parent.saved_user.id': 'keyword', + 'process.parent.saved_user.name': 'keyword', + 'process.parent.start': 'date', + 'process.parent.supplemental_groups.id': 'keyword', + 'process.parent.supplemental_groups.name': 'keyword', + 'process.parent.thread.capabilities.effective': 'keyword', + 'process.parent.thread.capabilities.permitted': 'keyword', + 'process.parent.thread.id': 'long', + 'process.parent.thread.name': 'keyword', + 'process.parent.title': 'keyword', + 'process.parent.tty': 'object', + 'process.parent.tty.char_device.major': 'long', + 'process.parent.tty.char_device.minor': 'long', + 'process.parent.uptime': 'long', + 'process.parent.user.id': 'keyword', + 'process.parent.user.name': 'keyword', + 'process.parent.vpid': 'long', + 'process.parent.working_directory': 'keyword', + 'process.pe.architecture': 'keyword', + 'process.pe.company': 'keyword', + 'process.pe.description': 'keyword', + 'process.pe.file_version': 'keyword', + 'process.pe.go_import_hash': 'keyword', + 'process.pe.go_imports': 'flattened', + 'process.pe.go_imports_names_entropy': 'long', + 'process.pe.go_imports_names_var_entropy': 'long', + 'process.pe.go_stripped': 'boolean', + 'process.pe.imphash': 'keyword', + 'process.pe.import_hash': 'keyword', + 'process.pe.imports': 'flattened', + 'process.pe.imports_names_entropy': 'long', + 'process.pe.imports_names_var_entropy': 'long', + 'process.pe.original_file_name': 'keyword', + 'process.pe.pehash': 'keyword', + 'process.pe.product': 'keyword', + 'process.pe.sections': 'nested', + 'process.pe.sections.entropy': 'long', + 'process.pe.sections.name': 'keyword', + 'process.pe.sections.physical_size': 'long', + 'process.pe.sections.var_entropy': 'long', + 'process.pe.sections.virtual_size': 'long', + 'process.pgid': 'long', + 'process.pid': 'long', + 'process.previous.args': 'keyword', + 'process.previous.args_count': 'long', + 'process.previous.executable': 'keyword', + 'process.real_group.id': 'keyword', + 'process.real_group.name': 'keyword', + 'process.real_user.id': 'keyword', + 'process.real_user.name': 'keyword', + 'process.saved_group.id': 'keyword', + 'process.saved_group.name': 'keyword', + 'process.saved_user.id': 'keyword', + 'process.saved_user.name': 'keyword', + 'process.session_leader.args': 'keyword', + 'process.session_leader.args_count': 'long', + 'process.session_leader.command_line': 'wildcard', + 'process.session_leader.entity_id': 'keyword', + 'process.session_leader.executable': 'keyword', + 'process.session_leader.group.id': 'keyword', + 'process.session_leader.group.name': 'keyword', + 'process.session_leader.interactive': 'boolean', + 'process.session_leader.name': 'keyword', + 'process.session_leader.parent.entity_id': 'keyword', + 'process.session_leader.parent.pid': 'long', + 'process.session_leader.parent.session_leader.entity_id': 'keyword', + 'process.session_leader.parent.session_leader.pid': 'long', + 'process.session_leader.parent.session_leader.start': 'date', + 'process.session_leader.parent.session_leader.vpid': 'long', + 'process.session_leader.parent.start': 'date', + 'process.session_leader.parent.vpid': 'long', + 'process.session_leader.pid': 'long', + 'process.session_leader.real_group.id': 'keyword', + 'process.session_leader.real_group.name': 'keyword', + 'process.session_leader.real_user.id': 'keyword', + 'process.session_leader.real_user.name': 'keyword', + 'process.session_leader.same_as_process': 'boolean', + 'process.session_leader.saved_group.id': 'keyword', + 'process.session_leader.saved_group.name': 'keyword', + 'process.session_leader.saved_user.id': 'keyword', + 'process.session_leader.saved_user.name': 'keyword', + 'process.session_leader.start': 'date', + 'process.session_leader.supplemental_groups.id': 'keyword', + 'process.session_leader.supplemental_groups.name': 'keyword', + 'process.session_leader.tty': 'object', + 'process.session_leader.tty.char_device.major': 'long', + 'process.session_leader.tty.char_device.minor': 'long', + 'process.session_leader.user.id': 'keyword', + 'process.session_leader.user.name': 'keyword', + 'process.session_leader.vpid': 'long', + 'process.session_leader.working_directory': 'keyword', + 'process.start': 'date', + 'process.supplemental_groups.id': 'keyword', + 'process.supplemental_groups.name': 'keyword', + 'process.thread.capabilities.effective': 'keyword', + 'process.thread.capabilities.permitted': 'keyword', + 'process.thread.id': 'long', + 'process.thread.name': 'keyword', + 'process.title': 'keyword', + 'process.tty': 'object', + 'process.tty.char_device.major': 'long', + 'process.tty.char_device.minor': 'long', + 'process.tty.columns': 'long', + 'process.tty.rows': 'long', + 'process.uptime': 'long', + 'process.user.id': 'keyword', + 'process.user.name': 'keyword', + 'process.vpid': 'long', + 'process.working_directory': 'keyword', + 'registry.data.bytes': 'keyword', + 'registry.data.strings': 'wildcard', + 'registry.data.type': 'keyword', + 'registry.hive': 'keyword', + 'registry.key': 'keyword', + 'registry.path': 'keyword', + 'registry.value': 'keyword', + 'related.hash': 'keyword', + 'related.hosts': 'keyword', + 'related.ip': 'ip', + 'related.user': 'keyword', + 'rule.author': 'keyword', + 'rule.category': 'keyword', + 'rule.description': 'keyword', + 'rule.id': 'keyword', + 'rule.license': 'keyword', + 'rule.name': 'keyword', + 'rule.reference': 'keyword', + 'rule.ruleset': 'keyword', + 'rule.uuid': 'keyword', + 'rule.version': 'keyword', + 'server.address': 'keyword', + 'server.as.number': 'long', + 'server.as.organization.name': 'keyword', + 'server.bytes': 'long', + 'server.domain': 'keyword', + 'server.geo.city_name': 'keyword', + 'server.geo.continent_code': 'keyword', + 'server.geo.continent_name': 'keyword', + 'server.geo.country_iso_code': 'keyword', + 'server.geo.country_name': 'keyword', + 'server.geo.location': 'geo_point', + 'server.geo.name': 'keyword', + 'server.geo.postal_code': 'keyword', + 'server.geo.region_iso_code': 'keyword', + 'server.geo.region_name': 'keyword', + 'server.geo.timezone': 'keyword', + 'server.ip': 'ip', + 'server.mac': 'keyword', + 'server.nat.ip': 'ip', + 'server.nat.port': 'long', + 'server.packets': 'long', + 'server.port': 'long', + 'server.registered_domain': 'keyword', + 'server.subdomain': 'keyword', + 'server.top_level_domain': 'keyword', + 'server.user.domain': 'keyword', + 'server.user.email': 'keyword', + 'server.user.full_name': 'keyword', + 'server.user.group.domain': 'keyword', + 'server.user.group.id': 'keyword', + 'server.user.group.name': 'keyword', + 'server.user.hash': 'keyword', + 'server.user.id': 'keyword', + 'server.user.name': 'keyword', + 'server.user.roles': 'keyword', + 'service.address': 'keyword', + 'service.environment': 'keyword', + 'service.ephemeral_id': 'keyword', + 'service.id': 'keyword', + 'service.name': 'keyword', + 'service.node.name': 'keyword', + 'service.node.role': 'keyword', + 'service.node.roles': 'keyword', + 'service.origin.address': 'keyword', + 'service.origin.environment': 'keyword', + 'service.origin.ephemeral_id': 'keyword', + 'service.origin.id': 'keyword', + 'service.origin.name': 'keyword', + 'service.origin.node.name': 'keyword', + 'service.origin.node.role': 'keyword', + 'service.origin.node.roles': 'keyword', + 'service.origin.state': 'keyword', + 'service.origin.type': 'keyword', + 'service.origin.version': 'keyword', + 'service.state': 'keyword', + 'service.target.address': 'keyword', + 'service.target.environment': 'keyword', + 'service.target.ephemeral_id': 'keyword', + 'service.target.id': 'keyword', + 'service.target.name': 'keyword', + 'service.target.node.name': 'keyword', + 'service.target.node.role': 'keyword', + 'service.target.node.roles': 'keyword', + 'service.target.state': 'keyword', + 'service.target.type': 'keyword', + 'service.target.version': 'keyword', + 'service.type': 'keyword', + 'service.version': 'keyword', + 'source.address': 'keyword', + 'source.as.number': 'long', + 'source.as.organization.name': 'keyword', + 'source.bytes': 'long', + 'source.domain': 'keyword', + 'source.geo.city_name': 'keyword', + 'source.geo.continent_code': 'keyword', + 'source.geo.continent_name': 'keyword', + 'source.geo.country_iso_code': 'keyword', + 'source.geo.country_name': 'keyword', + 'source.geo.location': 'geo_point', + 'source.geo.name': 'keyword', + 'source.geo.postal_code': 'keyword', + 'source.geo.region_iso_code': 'keyword', + 'source.geo.region_name': 'keyword', + 'source.geo.timezone': 'keyword', + 'source.ip': 'ip', + 'source.mac': 'keyword', + 'source.nat.ip': 'ip', + 'source.nat.port': 'long', + 'source.packets': 'long', + 'source.port': 'long', + 'source.registered_domain': 'keyword', + 'source.subdomain': 'keyword', + 'source.top_level_domain': 'keyword', + 'source.user.domain': 'keyword', + 'source.user.email': 'keyword', + 'source.user.full_name': 'keyword', + 'source.user.group.domain': 'keyword', + 'source.user.group.id': 'keyword', + 'source.user.group.name': 'keyword', + 'source.user.hash': 'keyword', + 'source.user.id': 'keyword', + 'source.user.name': 'keyword', + 'source.user.roles': 'keyword', + 'span.id': 'keyword', + tags: 'keyword', + 'threat.enrichments': 'nested', + 'threat.enrichments.indicator': 'object', + 'threat.enrichments.indicator.as.number': 'long', + 'threat.enrichments.indicator.as.organization.name': 'keyword', + 'threat.enrichments.indicator.confidence': 'keyword', + 'threat.enrichments.indicator.description': 'keyword', + 'threat.enrichments.indicator.email.address': 'keyword', + 'threat.enrichments.indicator.file.accessed': 'date', + 'threat.enrichments.indicator.file.attributes': 'keyword', + 'threat.enrichments.indicator.file.code_signature.digest_algorithm': 'keyword', + 'threat.enrichments.indicator.file.code_signature.exists': 'boolean', + 'threat.enrichments.indicator.file.code_signature.signing_id': 'keyword', + 'threat.enrichments.indicator.file.code_signature.status': 'keyword', + 'threat.enrichments.indicator.file.code_signature.subject_name': 'keyword', + 'threat.enrichments.indicator.file.code_signature.team_id': 'keyword', + 'threat.enrichments.indicator.file.code_signature.timestamp': 'date', + 'threat.enrichments.indicator.file.code_signature.trusted': 'boolean', + 'threat.enrichments.indicator.file.code_signature.valid': 'boolean', + 'threat.enrichments.indicator.file.created': 'date', + 'threat.enrichments.indicator.file.ctime': 'date', + 'threat.enrichments.indicator.file.device': 'keyword', + 'threat.enrichments.indicator.file.directory': 'keyword', + 'threat.enrichments.indicator.file.drive_letter': 'keyword', + 'threat.enrichments.indicator.file.elf.architecture': 'keyword', + 'threat.enrichments.indicator.file.elf.byte_order': 'keyword', + 'threat.enrichments.indicator.file.elf.cpu_type': 'keyword', + 'threat.enrichments.indicator.file.elf.creation_date': 'date', + 'threat.enrichments.indicator.file.elf.exports': 'flattened', + 'threat.enrichments.indicator.file.elf.go_import_hash': 'keyword', + 'threat.enrichments.indicator.file.elf.go_imports': 'flattened', + 'threat.enrichments.indicator.file.elf.go_imports_names_entropy': 'long', + 'threat.enrichments.indicator.file.elf.go_imports_names_var_entropy': 'long', + 'threat.enrichments.indicator.file.elf.go_stripped': 'boolean', + 'threat.enrichments.indicator.file.elf.header.abi_version': 'keyword', + 'threat.enrichments.indicator.file.elf.header.class': 'keyword', + 'threat.enrichments.indicator.file.elf.header.data': 'keyword', + 'threat.enrichments.indicator.file.elf.header.entrypoint': 'long', + 'threat.enrichments.indicator.file.elf.header.object_version': 'keyword', + 'threat.enrichments.indicator.file.elf.header.os_abi': 'keyword', + 'threat.enrichments.indicator.file.elf.header.type': 'keyword', + 'threat.enrichments.indicator.file.elf.header.version': 'keyword', + 'threat.enrichments.indicator.file.elf.import_hash': 'keyword', + 'threat.enrichments.indicator.file.elf.imports': 'flattened', + 'threat.enrichments.indicator.file.elf.imports_names_entropy': 'long', + 'threat.enrichments.indicator.file.elf.imports_names_var_entropy': 'long', + 'threat.enrichments.indicator.file.elf.sections': 'nested', + 'threat.enrichments.indicator.file.elf.sections.chi2': 'long', + 'threat.enrichments.indicator.file.elf.sections.entropy': 'long', + 'threat.enrichments.indicator.file.elf.sections.flags': 'keyword', + 'threat.enrichments.indicator.file.elf.sections.name': 'keyword', + 'threat.enrichments.indicator.file.elf.sections.physical_offset': 'keyword', + 'threat.enrichments.indicator.file.elf.sections.physical_size': 'long', + 'threat.enrichments.indicator.file.elf.sections.type': 'keyword', + 'threat.enrichments.indicator.file.elf.sections.var_entropy': 'long', + 'threat.enrichments.indicator.file.elf.sections.virtual_address': 'long', + 'threat.enrichments.indicator.file.elf.sections.virtual_size': 'long', + 'threat.enrichments.indicator.file.elf.segments': 'nested', + 'threat.enrichments.indicator.file.elf.segments.sections': 'keyword', + 'threat.enrichments.indicator.file.elf.segments.type': 'keyword', + 'threat.enrichments.indicator.file.elf.shared_libraries': 'keyword', + 'threat.enrichments.indicator.file.elf.telfhash': 'keyword', + 'threat.enrichments.indicator.file.extension': 'keyword', + 'threat.enrichments.indicator.file.fork_name': 'keyword', + 'threat.enrichments.indicator.file.gid': 'keyword', + 'threat.enrichments.indicator.file.group': 'keyword', + 'threat.enrichments.indicator.file.hash.md5': 'keyword', + 'threat.enrichments.indicator.file.hash.sha1': 'keyword', + 'threat.enrichments.indicator.file.hash.sha256': 'keyword', + 'threat.enrichments.indicator.file.hash.sha384': 'keyword', + 'threat.enrichments.indicator.file.hash.sha512': 'keyword', + 'threat.enrichments.indicator.file.hash.ssdeep': 'keyword', + 'threat.enrichments.indicator.file.hash.tlsh': 'keyword', + 'threat.enrichments.indicator.file.inode': 'keyword', + 'threat.enrichments.indicator.file.mime_type': 'keyword', + 'threat.enrichments.indicator.file.mode': 'keyword', + 'threat.enrichments.indicator.file.mtime': 'date', + 'threat.enrichments.indicator.file.name': 'keyword', + 'threat.enrichments.indicator.file.owner': 'keyword', + 'threat.enrichments.indicator.file.path': 'keyword', + 'threat.enrichments.indicator.file.pe.architecture': 'keyword', + 'threat.enrichments.indicator.file.pe.company': 'keyword', + 'threat.enrichments.indicator.file.pe.description': 'keyword', + 'threat.enrichments.indicator.file.pe.file_version': 'keyword', + 'threat.enrichments.indicator.file.pe.go_import_hash': 'keyword', + 'threat.enrichments.indicator.file.pe.go_imports': 'flattened', + 'threat.enrichments.indicator.file.pe.go_imports_names_entropy': 'long', + 'threat.enrichments.indicator.file.pe.go_imports_names_var_entropy': 'long', + 'threat.enrichments.indicator.file.pe.go_stripped': 'boolean', + 'threat.enrichments.indicator.file.pe.imphash': 'keyword', + 'threat.enrichments.indicator.file.pe.import_hash': 'keyword', + 'threat.enrichments.indicator.file.pe.imports': 'flattened', + 'threat.enrichments.indicator.file.pe.imports_names_entropy': 'long', + 'threat.enrichments.indicator.file.pe.imports_names_var_entropy': 'long', + 'threat.enrichments.indicator.file.pe.original_file_name': 'keyword', + 'threat.enrichments.indicator.file.pe.pehash': 'keyword', + 'threat.enrichments.indicator.file.pe.product': 'keyword', + 'threat.enrichments.indicator.file.pe.sections': 'nested', + 'threat.enrichments.indicator.file.pe.sections.entropy': 'long', + 'threat.enrichments.indicator.file.pe.sections.name': 'keyword', + 'threat.enrichments.indicator.file.pe.sections.physical_size': 'long', + 'threat.enrichments.indicator.file.pe.sections.var_entropy': 'long', + 'threat.enrichments.indicator.file.pe.sections.virtual_size': 'long', + 'threat.enrichments.indicator.file.size': 'long', + 'threat.enrichments.indicator.file.target_path': 'keyword', + 'threat.enrichments.indicator.file.type': 'keyword', + 'threat.enrichments.indicator.file.uid': 'keyword', + 'threat.enrichments.indicator.file.x509.alternative_names': 'keyword', + 'threat.enrichments.indicator.file.x509.issuer.common_name': 'keyword', + 'threat.enrichments.indicator.file.x509.issuer.country': 'keyword', + 'threat.enrichments.indicator.file.x509.issuer.distinguished_name': 'keyword', + 'threat.enrichments.indicator.file.x509.issuer.locality': 'keyword', + 'threat.enrichments.indicator.file.x509.issuer.organization': 'keyword', + 'threat.enrichments.indicator.file.x509.issuer.organizational_unit': 'keyword', + 'threat.enrichments.indicator.file.x509.issuer.state_or_province': 'keyword', + 'threat.enrichments.indicator.file.x509.not_after': 'date', + 'threat.enrichments.indicator.file.x509.not_before': 'date', + 'threat.enrichments.indicator.file.x509.public_key_algorithm': 'keyword', + 'threat.enrichments.indicator.file.x509.public_key_curve': 'keyword', + 'threat.enrichments.indicator.file.x509.public_key_exponent': 'long', + 'threat.enrichments.indicator.file.x509.public_key_size': 'long', + 'threat.enrichments.indicator.file.x509.serial_number': 'keyword', + 'threat.enrichments.indicator.file.x509.signature_algorithm': 'keyword', + 'threat.enrichments.indicator.file.x509.subject.common_name': 'keyword', + 'threat.enrichments.indicator.file.x509.subject.country': 'keyword', + 'threat.enrichments.indicator.file.x509.subject.distinguished_name': 'keyword', + 'threat.enrichments.indicator.file.x509.subject.locality': 'keyword', + 'threat.enrichments.indicator.file.x509.subject.organization': 'keyword', + 'threat.enrichments.indicator.file.x509.subject.organizational_unit': 'keyword', + 'threat.enrichments.indicator.file.x509.subject.state_or_province': 'keyword', + 'threat.enrichments.indicator.file.x509.version_number': 'keyword', + 'threat.enrichments.indicator.first_seen': 'date', + 'threat.enrichments.indicator.geo.city_name': 'keyword', + 'threat.enrichments.indicator.geo.continent_code': 'keyword', + 'threat.enrichments.indicator.geo.continent_name': 'keyword', + 'threat.enrichments.indicator.geo.country_iso_code': 'keyword', + 'threat.enrichments.indicator.geo.country_name': 'keyword', + 'threat.enrichments.indicator.geo.location': 'geo_point', + 'threat.enrichments.indicator.geo.name': 'keyword', + 'threat.enrichments.indicator.geo.postal_code': 'keyword', + 'threat.enrichments.indicator.geo.region_iso_code': 'keyword', + 'threat.enrichments.indicator.geo.region_name': 'keyword', + 'threat.enrichments.indicator.geo.timezone': 'keyword', + 'threat.enrichments.indicator.ip': 'ip', + 'threat.enrichments.indicator.last_seen': 'date', + 'threat.enrichments.indicator.marking.tlp': 'keyword', + 'threat.enrichments.indicator.marking.tlp_version': 'keyword', + 'threat.enrichments.indicator.modified_at': 'date', + 'threat.enrichments.indicator.name': 'keyword', + 'threat.enrichments.indicator.port': 'long', + 'threat.enrichments.indicator.provider': 'keyword', + 'threat.enrichments.indicator.reference': 'keyword', + 'threat.enrichments.indicator.registry.data.bytes': 'keyword', + 'threat.enrichments.indicator.registry.data.strings': 'wildcard', + 'threat.enrichments.indicator.registry.data.type': 'keyword', + 'threat.enrichments.indicator.registry.hive': 'keyword', + 'threat.enrichments.indicator.registry.key': 'keyword', + 'threat.enrichments.indicator.registry.path': 'keyword', + 'threat.enrichments.indicator.registry.value': 'keyword', + 'threat.enrichments.indicator.scanner_stats': 'long', + 'threat.enrichments.indicator.sightings': 'long', + 'threat.enrichments.indicator.type': 'keyword', + 'threat.enrichments.indicator.url.domain': 'keyword', + 'threat.enrichments.indicator.url.extension': 'keyword', + 'threat.enrichments.indicator.url.fragment': 'keyword', + 'threat.enrichments.indicator.url.full': 'wildcard', + 'threat.enrichments.indicator.url.original': 'wildcard', + 'threat.enrichments.indicator.url.password': 'keyword', + 'threat.enrichments.indicator.url.path': 'wildcard', + 'threat.enrichments.indicator.url.port': 'long', + 'threat.enrichments.indicator.url.query': 'keyword', + 'threat.enrichments.indicator.url.registered_domain': 'keyword', + 'threat.enrichments.indicator.url.scheme': 'keyword', + 'threat.enrichments.indicator.url.subdomain': 'keyword', + 'threat.enrichments.indicator.url.top_level_domain': 'keyword', + 'threat.enrichments.indicator.url.username': 'keyword', + 'threat.enrichments.indicator.x509.alternative_names': 'keyword', + 'threat.enrichments.indicator.x509.issuer.common_name': 'keyword', + 'threat.enrichments.indicator.x509.issuer.country': 'keyword', + 'threat.enrichments.indicator.x509.issuer.distinguished_name': 'keyword', + 'threat.enrichments.indicator.x509.issuer.locality': 'keyword', + 'threat.enrichments.indicator.x509.issuer.organization': 'keyword', + 'threat.enrichments.indicator.x509.issuer.organizational_unit': 'keyword', + 'threat.enrichments.indicator.x509.issuer.state_or_province': 'keyword', + 'threat.enrichments.indicator.x509.not_after': 'date', + 'threat.enrichments.indicator.x509.not_before': 'date', + 'threat.enrichments.indicator.x509.public_key_algorithm': 'keyword', + 'threat.enrichments.indicator.x509.public_key_curve': 'keyword', + 'threat.enrichments.indicator.x509.public_key_exponent': 'long', + 'threat.enrichments.indicator.x509.public_key_size': 'long', + 'threat.enrichments.indicator.x509.serial_number': 'keyword', + 'threat.enrichments.indicator.x509.signature_algorithm': 'keyword', + 'threat.enrichments.indicator.x509.subject.common_name': 'keyword', + 'threat.enrichments.indicator.x509.subject.country': 'keyword', + 'threat.enrichments.indicator.x509.subject.distinguished_name': 'keyword', + 'threat.enrichments.indicator.x509.subject.locality': 'keyword', + 'threat.enrichments.indicator.x509.subject.organization': 'keyword', + 'threat.enrichments.indicator.x509.subject.organizational_unit': 'keyword', + 'threat.enrichments.indicator.x509.subject.state_or_province': 'keyword', + 'threat.enrichments.indicator.x509.version_number': 'keyword', + 'threat.enrichments.matched.atomic': 'keyword', + 'threat.enrichments.matched.field': 'keyword', + 'threat.enrichments.matched.id': 'keyword', + 'threat.enrichments.matched.index': 'keyword', + 'threat.enrichments.matched.occurred': 'date', + 'threat.enrichments.matched.type': 'keyword', + 'threat.feed.dashboard_id': 'keyword', + 'threat.feed.description': 'keyword', + 'threat.feed.name': 'keyword', + 'threat.feed.reference': 'keyword', + 'threat.framework': 'keyword', + 'threat.group.alias': 'keyword', + 'threat.group.id': 'keyword', + 'threat.group.name': 'keyword', + 'threat.group.reference': 'keyword', + 'threat.indicator.as.number': 'long', + 'threat.indicator.as.organization.name': 'keyword', + 'threat.indicator.confidence': 'keyword', + 'threat.indicator.description': 'keyword', + 'threat.indicator.email.address': 'keyword', + 'threat.indicator.file.accessed': 'date', + 'threat.indicator.file.attributes': 'keyword', + 'threat.indicator.file.code_signature.digest_algorithm': 'keyword', + 'threat.indicator.file.code_signature.exists': 'boolean', + 'threat.indicator.file.code_signature.signing_id': 'keyword', + 'threat.indicator.file.code_signature.status': 'keyword', + 'threat.indicator.file.code_signature.subject_name': 'keyword', + 'threat.indicator.file.code_signature.team_id': 'keyword', + 'threat.indicator.file.code_signature.timestamp': 'date', + 'threat.indicator.file.code_signature.trusted': 'boolean', + 'threat.indicator.file.code_signature.valid': 'boolean', + 'threat.indicator.file.created': 'date', + 'threat.indicator.file.ctime': 'date', + 'threat.indicator.file.device': 'keyword', + 'threat.indicator.file.directory': 'keyword', + 'threat.indicator.file.drive_letter': 'keyword', + 'threat.indicator.file.elf.architecture': 'keyword', + 'threat.indicator.file.elf.byte_order': 'keyword', + 'threat.indicator.file.elf.cpu_type': 'keyword', + 'threat.indicator.file.elf.creation_date': 'date', + 'threat.indicator.file.elf.exports': 'flattened', + 'threat.indicator.file.elf.go_import_hash': 'keyword', + 'threat.indicator.file.elf.go_imports': 'flattened', + 'threat.indicator.file.elf.go_imports_names_entropy': 'long', + 'threat.indicator.file.elf.go_imports_names_var_entropy': 'long', + 'threat.indicator.file.elf.go_stripped': 'boolean', + 'threat.indicator.file.elf.header.abi_version': 'keyword', + 'threat.indicator.file.elf.header.class': 'keyword', + 'threat.indicator.file.elf.header.data': 'keyword', + 'threat.indicator.file.elf.header.entrypoint': 'long', + 'threat.indicator.file.elf.header.object_version': 'keyword', + 'threat.indicator.file.elf.header.os_abi': 'keyword', + 'threat.indicator.file.elf.header.type': 'keyword', + 'threat.indicator.file.elf.header.version': 'keyword', + 'threat.indicator.file.elf.import_hash': 'keyword', + 'threat.indicator.file.elf.imports': 'flattened', + 'threat.indicator.file.elf.imports_names_entropy': 'long', + 'threat.indicator.file.elf.imports_names_var_entropy': 'long', + 'threat.indicator.file.elf.sections': 'nested', + 'threat.indicator.file.elf.sections.chi2': 'long', + 'threat.indicator.file.elf.sections.entropy': 'long', + 'threat.indicator.file.elf.sections.flags': 'keyword', + 'threat.indicator.file.elf.sections.name': 'keyword', + 'threat.indicator.file.elf.sections.physical_offset': 'keyword', + 'threat.indicator.file.elf.sections.physical_size': 'long', + 'threat.indicator.file.elf.sections.type': 'keyword', + 'threat.indicator.file.elf.sections.var_entropy': 'long', + 'threat.indicator.file.elf.sections.virtual_address': 'long', + 'threat.indicator.file.elf.sections.virtual_size': 'long', + 'threat.indicator.file.elf.segments': 'nested', + 'threat.indicator.file.elf.segments.sections': 'keyword', + 'threat.indicator.file.elf.segments.type': 'keyword', + 'threat.indicator.file.elf.shared_libraries': 'keyword', + 'threat.indicator.file.elf.telfhash': 'keyword', + 'threat.indicator.file.extension': 'keyword', + 'threat.indicator.file.fork_name': 'keyword', + 'threat.indicator.file.gid': 'keyword', + 'threat.indicator.file.group': 'keyword', + 'threat.indicator.file.hash.md5': 'keyword', + 'threat.indicator.file.hash.sha1': 'keyword', + 'threat.indicator.file.hash.sha256': 'keyword', + 'threat.indicator.file.hash.sha384': 'keyword', + 'threat.indicator.file.hash.sha512': 'keyword', + 'threat.indicator.file.hash.ssdeep': 'keyword', + 'threat.indicator.file.hash.tlsh': 'keyword', + 'threat.indicator.file.inode': 'keyword', + 'threat.indicator.file.mime_type': 'keyword', + 'threat.indicator.file.mode': 'keyword', + 'threat.indicator.file.mtime': 'date', + 'threat.indicator.file.name': 'keyword', + 'threat.indicator.file.owner': 'keyword', + 'threat.indicator.file.path': 'keyword', + 'threat.indicator.file.pe.architecture': 'keyword', + 'threat.indicator.file.pe.company': 'keyword', + 'threat.indicator.file.pe.description': 'keyword', + 'threat.indicator.file.pe.file_version': 'keyword', + 'threat.indicator.file.pe.go_import_hash': 'keyword', + 'threat.indicator.file.pe.go_imports': 'flattened', + 'threat.indicator.file.pe.go_imports_names_entropy': 'long', + 'threat.indicator.file.pe.go_imports_names_var_entropy': 'long', + 'threat.indicator.file.pe.go_stripped': 'boolean', + 'threat.indicator.file.pe.imphash': 'keyword', + 'threat.indicator.file.pe.import_hash': 'keyword', + 'threat.indicator.file.pe.imports': 'flattened', + 'threat.indicator.file.pe.imports_names_entropy': 'long', + 'threat.indicator.file.pe.imports_names_var_entropy': 'long', + 'threat.indicator.file.pe.original_file_name': 'keyword', + 'threat.indicator.file.pe.pehash': 'keyword', + 'threat.indicator.file.pe.product': 'keyword', + 'threat.indicator.file.pe.sections': 'nested', + 'threat.indicator.file.pe.sections.entropy': 'long', + 'threat.indicator.file.pe.sections.name': 'keyword', + 'threat.indicator.file.pe.sections.physical_size': 'long', + 'threat.indicator.file.pe.sections.var_entropy': 'long', + 'threat.indicator.file.pe.sections.virtual_size': 'long', + 'threat.indicator.file.size': 'long', + 'threat.indicator.file.target_path': 'keyword', + 'threat.indicator.file.type': 'keyword', + 'threat.indicator.file.uid': 'keyword', + 'threat.indicator.file.x509.alternative_names': 'keyword', + 'threat.indicator.file.x509.issuer.common_name': 'keyword', + 'threat.indicator.file.x509.issuer.country': 'keyword', + 'threat.indicator.file.x509.issuer.distinguished_name': 'keyword', + 'threat.indicator.file.x509.issuer.locality': 'keyword', + 'threat.indicator.file.x509.issuer.organization': 'keyword', + 'threat.indicator.file.x509.issuer.organizational_unit': 'keyword', + 'threat.indicator.file.x509.issuer.state_or_province': 'keyword', + 'threat.indicator.file.x509.not_after': 'date', + 'threat.indicator.file.x509.not_before': 'date', + 'threat.indicator.file.x509.public_key_algorithm': 'keyword', + 'threat.indicator.file.x509.public_key_curve': 'keyword', + 'threat.indicator.file.x509.public_key_exponent': 'long', + 'threat.indicator.file.x509.public_key_size': 'long', + 'threat.indicator.file.x509.serial_number': 'keyword', + 'threat.indicator.file.x509.signature_algorithm': 'keyword', + 'threat.indicator.file.x509.subject.common_name': 'keyword', + 'threat.indicator.file.x509.subject.country': 'keyword', + 'threat.indicator.file.x509.subject.distinguished_name': 'keyword', + 'threat.indicator.file.x509.subject.locality': 'keyword', + 'threat.indicator.file.x509.subject.organization': 'keyword', + 'threat.indicator.file.x509.subject.organizational_unit': 'keyword', + 'threat.indicator.file.x509.subject.state_or_province': 'keyword', + 'threat.indicator.file.x509.version_number': 'keyword', + 'threat.indicator.first_seen': 'date', + 'threat.indicator.geo.city_name': 'keyword', + 'threat.indicator.geo.continent_code': 'keyword', + 'threat.indicator.geo.continent_name': 'keyword', + 'threat.indicator.geo.country_iso_code': 'keyword', + 'threat.indicator.geo.country_name': 'keyword', + 'threat.indicator.geo.location': 'geo_point', + 'threat.indicator.geo.name': 'keyword', + 'threat.indicator.geo.postal_code': 'keyword', + 'threat.indicator.geo.region_iso_code': 'keyword', + 'threat.indicator.geo.region_name': 'keyword', + 'threat.indicator.geo.timezone': 'keyword', + 'threat.indicator.ip': 'ip', + 'threat.indicator.last_seen': 'date', + 'threat.indicator.marking.tlp': 'keyword', + 'threat.indicator.marking.tlp_version': 'keyword', + 'threat.indicator.modified_at': 'date', + 'threat.indicator.name': 'keyword', + 'threat.indicator.port': 'long', + 'threat.indicator.provider': 'keyword', + 'threat.indicator.reference': 'keyword', + 'threat.indicator.registry.data.bytes': 'keyword', + 'threat.indicator.registry.data.strings': 'wildcard', + 'threat.indicator.registry.data.type': 'keyword', + 'threat.indicator.registry.hive': 'keyword', + 'threat.indicator.registry.key': 'keyword', + 'threat.indicator.registry.path': 'keyword', + 'threat.indicator.registry.value': 'keyword', + 'threat.indicator.scanner_stats': 'long', + 'threat.indicator.sightings': 'long', + 'threat.indicator.type': 'keyword', + 'threat.indicator.url.domain': 'keyword', + 'threat.indicator.url.extension': 'keyword', + 'threat.indicator.url.fragment': 'keyword', + 'threat.indicator.url.full': 'wildcard', + 'threat.indicator.url.original': 'wildcard', + 'threat.indicator.url.password': 'keyword', + 'threat.indicator.url.path': 'wildcard', + 'threat.indicator.url.port': 'long', + 'threat.indicator.url.query': 'keyword', + 'threat.indicator.url.registered_domain': 'keyword', + 'threat.indicator.url.scheme': 'keyword', + 'threat.indicator.url.subdomain': 'keyword', + 'threat.indicator.url.top_level_domain': 'keyword', + 'threat.indicator.url.username': 'keyword', + 'threat.indicator.x509.alternative_names': 'keyword', + 'threat.indicator.x509.issuer.common_name': 'keyword', + 'threat.indicator.x509.issuer.country': 'keyword', + 'threat.indicator.x509.issuer.distinguished_name': 'keyword', + 'threat.indicator.x509.issuer.locality': 'keyword', + 'threat.indicator.x509.issuer.organization': 'keyword', + 'threat.indicator.x509.issuer.organizational_unit': 'keyword', + 'threat.indicator.x509.issuer.state_or_province': 'keyword', + 'threat.indicator.x509.not_after': 'date', + 'threat.indicator.x509.not_before': 'date', + 'threat.indicator.x509.public_key_algorithm': 'keyword', + 'threat.indicator.x509.public_key_curve': 'keyword', + 'threat.indicator.x509.public_key_exponent': 'long', + 'threat.indicator.x509.public_key_size': 'long', + 'threat.indicator.x509.serial_number': 'keyword', + 'threat.indicator.x509.signature_algorithm': 'keyword', + 'threat.indicator.x509.subject.common_name': 'keyword', + 'threat.indicator.x509.subject.country': 'keyword', + 'threat.indicator.x509.subject.distinguished_name': 'keyword', + 'threat.indicator.x509.subject.locality': 'keyword', + 'threat.indicator.x509.subject.organization': 'keyword', + 'threat.indicator.x509.subject.organizational_unit': 'keyword', + 'threat.indicator.x509.subject.state_or_province': 'keyword', + 'threat.indicator.x509.version_number': 'keyword', + 'threat.software.alias': 'keyword', + 'threat.software.id': 'keyword', + 'threat.software.name': 'keyword', + 'threat.software.platforms': 'keyword', + 'threat.software.reference': 'keyword', + 'threat.software.type': 'keyword', + 'threat.tactic.id': 'keyword', + 'threat.tactic.name': 'keyword', + 'threat.tactic.reference': 'keyword', + 'threat.technique.id': 'keyword', + 'threat.technique.name': 'keyword', + 'threat.technique.reference': 'keyword', + 'threat.technique.subtechnique.id': 'keyword', + 'threat.technique.subtechnique.name': 'keyword', + 'threat.technique.subtechnique.reference': 'keyword', + 'tls.cipher': 'keyword', + 'tls.client.certificate': 'keyword', + 'tls.client.certificate_chain': 'keyword', + 'tls.client.hash.md5': 'keyword', + 'tls.client.hash.sha1': 'keyword', + 'tls.client.hash.sha256': 'keyword', + 'tls.client.issuer': 'keyword', + 'tls.client.ja3': 'keyword', + 'tls.client.not_after': 'date', + 'tls.client.not_before': 'date', + 'tls.client.server_name': 'keyword', + 'tls.client.subject': 'keyword', + 'tls.client.supported_ciphers': 'keyword', + 'tls.client.x509.alternative_names': 'keyword', + 'tls.client.x509.issuer.common_name': 'keyword', + 'tls.client.x509.issuer.country': 'keyword', + 'tls.client.x509.issuer.distinguished_name': 'keyword', + 'tls.client.x509.issuer.locality': 'keyword', + 'tls.client.x509.issuer.organization': 'keyword', + 'tls.client.x509.issuer.organizational_unit': 'keyword', + 'tls.client.x509.issuer.state_or_province': 'keyword', + 'tls.client.x509.not_after': 'date', + 'tls.client.x509.not_before': 'date', + 'tls.client.x509.public_key_algorithm': 'keyword', + 'tls.client.x509.public_key_curve': 'keyword', + 'tls.client.x509.public_key_exponent': 'long', + 'tls.client.x509.public_key_size': 'long', + 'tls.client.x509.serial_number': 'keyword', + 'tls.client.x509.signature_algorithm': 'keyword', + 'tls.client.x509.subject.common_name': 'keyword', + 'tls.client.x509.subject.country': 'keyword', + 'tls.client.x509.subject.distinguished_name': 'keyword', + 'tls.client.x509.subject.locality': 'keyword', + 'tls.client.x509.subject.organization': 'keyword', + 'tls.client.x509.subject.organizational_unit': 'keyword', + 'tls.client.x509.subject.state_or_province': 'keyword', + 'tls.client.x509.version_number': 'keyword', + 'tls.curve': 'keyword', + 'tls.established': 'boolean', + 'tls.next_protocol': 'keyword', + 'tls.resumed': 'boolean', + 'tls.server.certificate': 'keyword', + 'tls.server.certificate_chain': 'keyword', + 'tls.server.hash.md5': 'keyword', + 'tls.server.hash.sha1': 'keyword', + 'tls.server.hash.sha256': 'keyword', + 'tls.server.issuer': 'keyword', + 'tls.server.ja3s': 'keyword', + 'tls.server.not_after': 'date', + 'tls.server.not_before': 'date', + 'tls.server.subject': 'keyword', + 'tls.server.x509.alternative_names': 'keyword', + 'tls.server.x509.issuer.common_name': 'keyword', + 'tls.server.x509.issuer.country': 'keyword', + 'tls.server.x509.issuer.distinguished_name': 'keyword', + 'tls.server.x509.issuer.locality': 'keyword', + 'tls.server.x509.issuer.organization': 'keyword', + 'tls.server.x509.issuer.organizational_unit': 'keyword', + 'tls.server.x509.issuer.state_or_province': 'keyword', + 'tls.server.x509.not_after': 'date', + 'tls.server.x509.not_before': 'date', + 'tls.server.x509.public_key_algorithm': 'keyword', + 'tls.server.x509.public_key_curve': 'keyword', + 'tls.server.x509.public_key_exponent': 'long', + 'tls.server.x509.public_key_size': 'long', + 'tls.server.x509.serial_number': 'keyword', + 'tls.server.x509.signature_algorithm': 'keyword', + 'tls.server.x509.subject.common_name': 'keyword', + 'tls.server.x509.subject.country': 'keyword', + 'tls.server.x509.subject.distinguished_name': 'keyword', + 'tls.server.x509.subject.locality': 'keyword', + 'tls.server.x509.subject.organization': 'keyword', + 'tls.server.x509.subject.organizational_unit': 'keyword', + 'tls.server.x509.subject.state_or_province': 'keyword', + 'tls.server.x509.version_number': 'keyword', + 'tls.version': 'keyword', + 'tls.version_protocol': 'keyword', + 'trace.id': 'keyword', + 'transaction.id': 'keyword', + 'url.domain': 'keyword', + 'url.extension': 'keyword', + 'url.fragment': 'keyword', + 'url.full': 'wildcard', + 'url.original': 'wildcard', + 'url.password': 'keyword', + 'url.path': 'wildcard', + 'url.port': 'long', + 'url.query': 'keyword', + 'url.registered_domain': 'keyword', + 'url.scheme': 'keyword', + 'url.subdomain': 'keyword', + 'url.top_level_domain': 'keyword', + 'url.username': 'keyword', + 'user.changes.domain': 'keyword', + 'user.changes.email': 'keyword', + 'user.changes.full_name': 'keyword', + 'user.changes.group.domain': 'keyword', + 'user.changes.group.id': 'keyword', + 'user.changes.group.name': 'keyword', + 'user.changes.hash': 'keyword', + 'user.changes.id': 'keyword', + 'user.changes.name': 'keyword', + 'user.changes.roles': 'keyword', + 'user.domain': 'keyword', + 'user.effective.domain': 'keyword', + 'user.effective.email': 'keyword', + 'user.effective.full_name': 'keyword', + 'user.effective.group.domain': 'keyword', + 'user.effective.group.id': 'keyword', + 'user.effective.group.name': 'keyword', + 'user.effective.hash': 'keyword', + 'user.effective.id': 'keyword', + 'user.effective.name': 'keyword', + 'user.effective.roles': 'keyword', + 'user.email': 'keyword', + 'user.full_name': 'keyword', + 'user.group.domain': 'keyword', + 'user.group.id': 'keyword', + 'user.group.name': 'keyword', + 'user.hash': 'keyword', + 'user.id': 'keyword', + 'user.name': 'keyword', + 'user.risk.calculated_level': 'keyword', + 'user.risk.calculated_score': 'float', + 'user.risk.calculated_score_norm': 'float', + 'user.risk.static_level': 'keyword', + 'user.risk.static_score': 'float', + 'user.risk.static_score_norm': 'float', + 'user.roles': 'keyword', + 'user.target.domain': 'keyword', + 'user.target.email': 'keyword', + 'user.target.full_name': 'keyword', + 'user.target.group.domain': 'keyword', + 'user.target.group.id': 'keyword', + 'user.target.group.name': 'keyword', + 'user.target.hash': 'keyword', + 'user.target.id': 'keyword', + 'user.target.name': 'keyword', + 'user.target.roles': 'keyword', + 'user_agent.device.name': 'keyword', + 'user_agent.name': 'keyword', + 'user_agent.original': 'keyword', + 'user_agent.os.family': 'keyword', + 'user_agent.os.full': 'keyword', + 'user_agent.os.kernel': 'keyword', + 'user_agent.os.name': 'keyword', + 'user_agent.os.platform': 'keyword', + 'user_agent.os.type': 'keyword', + 'user_agent.os.version': 'keyword', + 'user_agent.version': 'keyword', + 'volume.bus_type': 'keyword', + 'volume.default_access': 'keyword', + 'volume.device_name': 'keyword', + 'volume.device_type': 'keyword', + 'volume.dos_name': 'keyword', + 'volume.file_system_type': 'keyword', + 'volume.mount_name': 'keyword', + 'volume.nt_name': 'keyword', + 'volume.product_id': 'keyword', + 'volume.product_name': 'keyword', + 'volume.removable': 'boolean', + 'volume.serial_number': 'keyword', + 'volume.size': 'long', + 'volume.vendor_id': 'keyword', + 'volume.vendor_name': 'keyword', + 'volume.writable': 'boolean', + 'vulnerability.category': 'keyword', + 'vulnerability.classification': 'keyword', + 'vulnerability.description': 'keyword', + 'vulnerability.enumeration': 'keyword', + 'vulnerability.id': 'keyword', + 'vulnerability.reference': 'keyword', + 'vulnerability.report_id': 'keyword', + 'vulnerability.scanner.vendor': 'keyword', + 'vulnerability.score.base': 'float', + 'vulnerability.score.environmental': 'float', + 'vulnerability.score.temporal': 'float', + 'vulnerability.score.version': 'keyword', + 'vulnerability.severity': 'keyword', +}; + +export const ECS_FIELDS: EcsFields = { + 'destination.address': 'Destination network address.', + 'destination.bytes': 'Bytes sent from the destination to the source.', + 'destination.domain': 'The domain name of the destination.', + 'destination.ip': 'IP address of the destination.', + 'destination.mac': 'MAC address of the destination.', + 'destination.packets': 'Packets sent from the destination to the source.', + 'destination.port': 'Port of the destination.', + 'destination.user.domain': 'Name of the directory the user is a member of.', + 'destination.user.email': 'User email address.', + 'destination.user.full_name': 'Users full name, if available.', + 'destination.user.group.domain': 'Name of the directory the group is a member of.', + 'destination.user.group.id': 'Unique identifier for the group on the system/platform.', + 'destination.user.group.name': 'Name of the group.', + 'destination.user.id': 'Unique identifier of the user.', + 'destination.user.name': 'Short name or login of the user.', + 'event.action': 'The action captured by the event.', + 'event.created': 'Time when the event was first read by an agent or by your pipeline.', + 'event.code': 'Identification code for this event.', + 'event.duration': 'Duration of the event in nanoseconds.', + 'event.end': + 'event.end contains the date when the event ended or when the activity was last observed.', + 'event.id': 'Unique ID to describe the event.', + 'event.severity': 'Numeric severity of the event.', + 'file.directory': 'Directory where the file is located.', + 'file.extension': 'File extension, excluding the leading dot.', + 'file.gid': 'Primary group ID (GID) of the file.', + 'file.group': 'Primary group name of the file.', + 'file.hash.md5': 'MD5 hash.', + 'file.hash.sha1': 'SHA1 hash.', + 'file.hash.sha256': 'SHA256 hash.', + 'file.inode': 'Inode representing the file in the filesystem.', + 'file.name': 'Name of the file including the extension, without the directory.', + 'file.path': 'Full path to the file, including the file name.', + 'file.size': 'File size in bytes.', + 'file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'group.domain': 'Name of the directory the group is a member of.', + 'group.id': 'Unique identifier for the group on the system/platform.', + 'group.name': 'Name of the group.', + 'http.request.method': 'HTTP request method.', + 'http.response.status_code': 'HTTP response status code.', + 'http.version': 'HTTP version.', + 'network.application': 'Application level protocol name.', + 'network.bytes': 'Total bytes transferred in both directions.', + 'network.direction': 'Direction of the network traffic.', + 'network.packets': 'Total packets transferred in both directions.', + 'network.protocol': 'Application protocol name.', + 'network.transport': 'Protocol Name corresponding to the field `iana_number`.', + 'network.type': 'In the OSI Model this would be the Network Layer. ipv4, ipv6, ipsec, pim, etc', + 'organization.id': 'Unique identifier for the organization.', + 'organization.name': 'Organization name.', + 'process.args': 'Array of process arguments.', + 'process.args_count': 'Length of the process.args array.', + 'process.command_line': 'Full command line that started the process.', + 'process.end': 'The time the process ended.', + 'process.executable': 'Absolute path to the process executable.', + 'process.hash.md5': 'MD5 hash.', + 'process.hash.sha1': 'SHA1 hash.', + 'process.hash.sha256': 'SHA256 hash.', + 'process.name': 'Process name.', + 'process.parent.args': 'Array of process arguments.', + 'process.parent.args_count': 'Length of the process.args array.', + 'process.parent.command_line': 'Full command line that started the process.', + 'process.parent.end': 'The time the process ended.', + 'process.parent.executable': 'Absolute path to the process executable.', + 'process.parent.group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.group.name': 'Name of the group.', + 'process.parent.hash.md5': 'MD5 hash.', + 'process.parent.hash.sha1': 'SHA1 hash.', + 'process.parent.hash.sha256': 'SHA256 hash.', + 'process.parent.name': 'Process name.', + 'process.parent.pgid': 'Deprecated identifier of the group of processes the process belongs to.', + 'process.parent.pid': 'Process id.', + 'process.parent.start': 'The time the process started.', + 'process.parent.thread.id': 'Thread ID.', + 'process.parent.thread.name': 'Thread name.', + 'process.parent.user.id': 'Unique identifier of the user.', + 'process.parent.user.name': 'Short name or login of the user.', + 'process.pgid': 'Deprecated identifier of the group of processes the process belongs to.', + 'process.pid': 'Process id.', + 'process.start': 'The time the process started.', + 'process.thread.id': 'Thread ID.', + 'process.thread.name': 'Thread name.', + 'process.user.id': 'Unique identifier of the user.', + 'process.user.name': 'Short name or login of the user.', + 'rule.author': 'Rule author', + 'rule.category': 'Rule category', + 'rule.description': 'Rule description', + 'rule.id': 'Rule ID', + 'rule.license': 'Rule license', + 'rule.name': 'Rule name', + 'rule.reference': 'Rule reference URL', + 'rule.ruleset': 'Rule ruleset', + 'rule.uuid': 'Rule UUID', + 'rule.version': 'Rule version', + 'source.address': 'Source network address.', + 'source.bytes': 'Bytes sent from the source to the destination.', + 'source.domain': 'The domain name of the source.', + 'source.ip': 'IP address of the source.', + 'source.mac': 'MAC address of the source.', + 'source.packets': 'Packets sent from the source to the destination.', + 'source.port': 'Port of the source.', + 'source.user.domain': 'Name of the directory the user is a member of.', + 'source.user.email': 'User email address.', + 'source.user.full_name': 'Users full name, if available.', + 'source.user.group.domain': 'Name of the directory the group is a member of.', + 'source.user.group.id': 'Unique identifier for the group on the system/platform.', + 'source.user.group.name': 'Name of the group.', + 'source.user.id': 'Unique identifier of the user.', + 'source.user.name': 'Short name or login of the user.', + 'source.user.roles': 'Array of user roles at the time of the event.', + 'tls.server.x509.alternative_names': 'List of subject alternative names (SAN).', + 'tls.server.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.framework': 'Threat classification framework.', + 'threat.tactic.id': 'Threat tactic id.', + 'threat.tactic.name': 'Threat tactic.', + 'threat.technique.id': 'Threat technique id.', + 'threat.technique.name': 'Threat technique name.', + 'url.domain': 'Domain of the url.', + 'url.extension': 'File extension from the request url, excluding the leading dot.', + 'url.fragment': 'Portion of the url after the `#`.', + 'url.full': 'Full unparsed URL.', + 'url.original': 'Unmodified original url as seen in the event source.', + 'url.path': 'Path of the request, such as "/search".', + 'url.port': 'Port of the request, such as 443.', + 'url.query': 'Query string of the request.', + 'url.scheme': 'Scheme of the url.', + 'user.domain': 'Name of the directory the user is a member of.', + 'user.email': 'User email address.', + 'user.full_name': 'Users full name, if available.', + 'user.group.domain': 'Name of the directory the group is a member of.', + 'user.group.id': 'Unique identifier for the group on the system/platform.', + 'user.group.name': 'Name of the group.', + 'user.id': 'Unique identifier of the user.', + 'user.name': 'Short name or login of the user.', + 'user.roles': 'Array of user roles at the time of the event.', + 'user_agent.original': 'Unparsed user_agent string.', +}; + +export const ECS_EXAMPLE_ANSWER = { + crowdstrike: { + falcon: { + metadata: { + customerIDString: null, + offset: null, + eventType: { + target: 'event.code', + confidence: 0.94, + type: 'string', + date_formats: [], + }, + eventCreationTime: { + target: 'event.created', + confidence: 0.85, + type: 'date', + date_formats: ['UNIX'], + version: null, + }, + event: { + DeviceId: null, + CustomerId: null, + Ipv: { + target: 'network.type', + confidence: 0.99, + type: 'string', + date_formats: [], + }, + CommandLine: { + target: 'process.command_line', + confidence: 0.9, + type: 'string', + date_formats: [], + }, + ConnectionDirection: { + target: 'network.direction', + confidence: 0.9, + type: 'string', + date_formats: [], + }, + EventType: { + target: 'event.action', + confidence: 0.82, + type: 'string', + date_formats: [], + }, + Flags: { Audit: null, Log: null, Monitor: null }, + HostName: { + target: 'host.name', + confidence: 0.82, + type: 'string', + date_formats: [], + }, + LocalAddress: { + target: 'source.address', + confidence: 0.83, + type: 'string', + date_formats: [], + }, + LocalPort: { + target: 'source.port', + confidence: 0.83, + type: 'number', + date_formats: [], + }, + PolicyName: null, + RemoteAddress: { + target: 'destination.address', + confidence: 0.83, + type: 'string', + date_formats: [], + }, + RemotePort: { + target: 'destination.port', + confidence: 0.83, + type: 'number', + date_formats: [], + }, + RuleAction: { + target: 'event.type', + confidence: 0.86, + type: 'string', + date_formats: [], + }, + RuleDescription: { + target: 'rule.description', + confidence: 0.99, + type: 'string', + date_formats: [], + }, + UTCTimestamp: { + target: '@timestamp', + confidence: 0.99, + type: 'string', + date_formats: ['UNIX_MS'], + }, + }, + }, + }, + }, +}; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts new file mode 100644 index 0000000000000..72108566d6952 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts @@ -0,0 +1,22 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { ECS_DUPLICATES_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { EcsMappingState } from '../../types'; + +export async function handleDuplicates(state: EcsMappingState) { + const ecsDuplicatesPrompt = ECS_DUPLICATES_PROMPT; + const model = getModel(); + console.log('testing ecs duplicate'); + + const outputParser = new JsonOutputParser(); + const ecsDuplicatesGraph = ecsDuplicatesPrompt.pipe(model).pipe(outputParser); + + const currentMapping = await ecsDuplicatesGraph.invoke({ + ecs: state.ecs, + current_mapping: JSON.stringify(state.currentMapping, null, 2), + ex_answer: state.exAnswer, + duplicate_fields: state.duplicateFields, + }); + + return { currentMapping, lastExecutedChain: 'duplicateFields' }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts new file mode 100644 index 0000000000000..0025a5cd4b898 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -0,0 +1,162 @@ +import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; +import { ECS_EXAMPLE_ANSWER, ECS_FIELDS } from './constants'; +import { modifySamples, mergeSamples } from '../../util/samples'; +import { createPipeline } from './pipeline'; +import { handleEcsMapping } from './mapping'; +import { handleDuplicates } from './duplicates'; +import { handleMissingKeys } from './missing'; +import { handleInvalidEcs } from './invalid'; +import { handleValidateMappings } from './validate'; +import { EcsMappingState } from '../../types'; + +const graphState: StateGraphArgs['channels'] = { + ecs: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + lastExecutedChain: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + rawSamples: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + samples: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + formattedSamples: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + exAnswer: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + packageName: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + dataStreamName: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + finalized: { + value: (x: boolean, y?: boolean) => y ?? x, + default: () => false, + }, + currentMapping: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + currentPipeline: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + duplicateFields: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + missingKeys: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + invalidEcsFields: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + results: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + logFormat: { + value: (x: string, y?: string) => y ?? x, + default: () => 'json', + }, + ecsVersion: { + value: (x: string, y?: string) => y ?? x, + default: () => '8.11.0', + }, +}; + +function modelInput(state: EcsMappingState): Partial { + const samples = modifySamples(state); + const formattedSamples = mergeSamples(samples); + return { + exAnswer: JSON.stringify(ECS_EXAMPLE_ANSWER, null, 2), + ecs: JSON.stringify(ECS_FIELDS, null, 2), + samples, + finalized: false, + formattedSamples, + lastExecutedChain: 'modelInput', + }; +} + +function modelOutput(state: EcsMappingState): Partial { + const currentPipeline = createPipeline(state); + return { + finalized: true, + lastExecutedChain: 'modelOutput', + results: { + mapping: state.currentMapping, + current_pipeline: currentPipeline, + }, + }; +} + +function inputRouter(state: EcsMappingState): string { + if (Object.keys(state.currentMapping).length === 0) { + console.log('No current mapping found'); + return 'ecsMapping'; + } + return 'modelOutput'; +} + +function chainRouter(state: EcsMappingState): string { + if (Object.keys(state.duplicateFields).length > 0) { + return 'duplicateFields'; + } + if (Object.keys(state.missingKeys).length > 0) { + return 'missingKeys'; + } + if (Object.keys(state.invalidEcsFields).length > 0) { + return 'invalidEcsFields'; + } + if (!state.finalized) { + return 'modelOutput'; + } + return END; +} + +export function getEcsGraph() { + const workflow = new StateGraph({ + channels: graphState, + }) + .addNode('modelInput', modelInput) + .addNode('modelOutput', modelOutput) + .addNode('handleEcsMapping', handleEcsMapping) + .addNode('handleValidation', handleValidateMappings) + .addNode('handleDuplicates', handleDuplicates) + .addNode('handleMissingKeys', handleMissingKeys) + .addNode('handleInvalidEcs', handleInvalidEcs) + .addEdge(START, 'modelInput') + .addEdge('modelOutput', END) + .addEdge('handleEcsMapping', 'handleValidation') + .addEdge('handleDuplicates', 'handleValidation') + .addEdge('handleMissingKeys', 'handleValidation') + .addEdge('handleInvalidEcs', 'handleValidation') + .addConditionalEdges('modelInput', inputRouter, { + ecsMapping: 'handleEcsMapping', + modelOutput: 'modelOutput', + }) + .addConditionalEdges('handleValidation', chainRouter, { + duplicateFields: 'handleDuplicates', + missingKeys: 'handleMissingKeys', + invalidEcsFields: 'handleInvalidEcs', + modelOutput: 'modelOutput', + }); + + const compiledEcsGraph = workflow.compile(); + return compiledEcsGraph; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts new file mode 100644 index 0000000000000..0f930a68699dd --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts @@ -0,0 +1 @@ +export { getEcsGraph } from './graph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts new file mode 100644 index 0000000000000..249e64faf1c08 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts @@ -0,0 +1,23 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { ECS_INVALID_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { EcsMappingState } from '../../types'; + +export async function handleInvalidEcs(state: EcsMappingState) { + const ecsInvalidEcsPrompt = ECS_INVALID_PROMPT; + const model = getModel(); + console.log('testing ecs invalid'); + + const outputParser = new JsonOutputParser(); + const ecsInvalidEcsGraph = ecsInvalidEcsPrompt.pipe(model).pipe(outputParser); + + const currentMapping = await ecsInvalidEcsGraph.invoke({ + ecs: state.ecs, + current_mapping: JSON.stringify(state.currentMapping, null, 2), + ex_answer: state.exAnswer, + formatted_samples: state.formattedSamples, + invalid_ecs_fields: state.invalidEcsFields, + }); + + return { currentMapping, lastExecutedChain: 'invalidEcs' }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts new file mode 100644 index 0000000000000..9caf196f70717 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts @@ -0,0 +1,23 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { ECS_MAIN_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { EcsMappingState } from '../../types'; + +export async function handleEcsMapping(state: EcsMappingState) { + const ecsMainPrompt = ECS_MAIN_PROMPT; + const model = getModel(); + console.log('testing ecs mapping'); + + const outputParser = new JsonOutputParser(); + const ecsMainGraph = ecsMainPrompt.pipe(model).pipe(outputParser); + + const currentMapping = await ecsMainGraph.invoke({ + ecs: state.ecs, + formatted_samples: state.formattedSamples, + package_name: state.packageName, + data_stream_name: state.dataStreamName, + ex_answer: state.exAnswer, + }); + + return { currentMapping, lastExecutedChain: 'ecsMapping' }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts new file mode 100644 index 0000000000000..e0d01fb5acd47 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts @@ -0,0 +1,23 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { ECS_MISSING_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { EcsMappingState } from '../../types'; + +export async function handleMissingKeys(state: EcsMappingState) { + const ecsMissingPrompt = ECS_MISSING_PROMPT; + const model = getModel(); + console.log('testing ecs missing'); + + const outputParser = new JsonOutputParser(); + const ecsMissingGraph = ecsMissingPrompt.pipe(model).pipe(outputParser); + + const currentMapping = await ecsMissingGraph.invoke({ + ecs: state.ecs, + current_mapping: state.currentMapping, + ex_answer: state.exAnswer, + formatted_samples: state.formattedSamples, + missing_keys: state?.missingKeys, + }); + + return { currentMapping, lastExecutedChain: 'missingKeys' }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts new file mode 100644 index 0000000000000..af96064daff0a --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -0,0 +1,179 @@ +import * as yaml from 'js-yaml'; +import { Environment, FileSystemLoader } from 'nunjucks'; +import * as path from 'path'; +import { ECS_TYPES } from './constants'; +import { EcsMappingState } from '../../types'; + +interface IngestPipeline { + [key: string]: any; +} + +interface ECSField { + target: string; + confidence: number; + date_formats: string[]; + type: string; +} + +function generateProcessor( + currentPath: string, + ecsField: ECSField, + expectedEcsType: string, + sampleValue: any +): object { + if (needsTypeConversion(sampleValue, expectedEcsType)) { + return { + convert: { + field: currentPath, + target_field: ecsField.target, + type: getConvertProcessorType(expectedEcsType), + ignore_missing: true, + }, + }; + } + + if (ecsField.type === 'date') { + return { + date: { + field: currentPath, + target_field: ecsField.target, + formats: ecsField.date_formats, + if: currentPath.replace('.', '?.'), + }, + }; + } + + return { + rename: { + field: currentPath, + target_field: ecsField.target, + ignore_missing: true, + }, + }; +} + +function getSampleValue(key: string, samples: Record): any { + try { + const keyList = key.split('.'); + let value: any = samples; + for (const k of keyList) { + if (value === undefined || value === null) { + return null; + } + value = value[k]; + } + return value; + } catch (e) { + console.log(e); + return null; + } +} + +function getEcsType(ecsField: ECSField, ecsTypes: Record): string { + const ecsTarget = ecsField.target; + return ecsTypes[ecsTarget]; +} + +function getConvertProcessorType(expectedEcsType: string): string { + if (expectedEcsType === 'long') { + return 'long'; + } + if (['scaled_float', 'float'].includes(expectedEcsType)) { + return 'float'; + } + if (expectedEcsType === 'ip') { + return 'ip'; + } + if (expectedEcsType === 'boolean') { + return 'boolean'; + } + return 'string'; +} + +function needsTypeConversion(sample: any, expected: string): boolean { + if (sample === null || sample === undefined) { + return false; + } + + if (expected === 'ip') { + return true; + } + + if (expected === 'boolean' && typeof sample !== 'boolean') { + return true; + } + + if (['long', 'float', 'scaled_float'].includes(expected) && typeof sample !== 'number') { + return true; + } + + if ( + ['keyword', 'wildcard', 'match_only_text', 'constant_keyword'].includes(expected) && + !(typeof sample === 'string' || Array.isArray(sample)) + ) { + return true; + } + + // If types are anything but the above, we return false. Example types: + // "nested", "flattened", "object", "geopoint", "date" + return false; +} + +function generateProcessors(ecsMapping: object, samples: object, basePath: string = ''): object[] { + const ecsTypes = ECS_TYPES; + const valueFieldKeys = new Set(['target', 'confidence', 'date_formats', 'type']); + const results: object[] = []; + + for (const [key, value] of Object.entries(ecsMapping)) { + const currentPath = basePath ? `${basePath}.${key}` : key; + + if (typeof value === 'object' && value !== null) { + const valueKeys = new Set(Object.keys(value)); + if ([...valueFieldKeys].every((k) => valueKeys.has(k))) { + const processor = generateProcessor( + currentPath, + value as ECSField, + getEcsType(value as ECSField, ecsTypes), + getSampleValue(currentPath, samples) + ); + results.push(processor); + } else { + results.push(...generateProcessors(value, samples, currentPath)); + } + } + } + return results; +} + +export function createPipeline(state: EcsMappingState): IngestPipeline { + const samples = JSON.parse(state.formattedSamples); + + const processors = generateProcessors(state.currentMapping, samples); + // Retrieve all source field names from convert processors to populate single remove processor: + const fieldsToRemove = processors.filter((p: any) => p.convert).map((p: any) => p.convert.field); + + const templatesPath = path.join(__dirname, '../../templates'); + const mappedValues = { + processors, + ecs_version: state.ecsVersion, + package_name: state.packageName, + data_stream_name: state.dataStreamName, + log_format: state.logFormat, + fields_to_remove: fieldsToRemove, + }; + try { + const env = new Environment(new FileSystemLoader(templatesPath), { + autoescape: false, + }); + env.addFilter('startswith', function (str, prefix) { + return str.startsWith(prefix); + }); + const template = env.getTemplate('pipeline.yml.njk'); + const renderedTemplate = template.render(mappedValues); + const ingestPipeline = yaml.load(renderedTemplate) as IngestPipeline; + return ingestPipeline; + } catch (error) { + console.error('Error rendering template:', error); + throw error; + } +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts new file mode 100644 index 0000000000000..6cfa56382f64d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts @@ -0,0 +1,173 @@ +import { ChatPromptTemplate } from '@langchain/core/prompts'; +export const ECS_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant in Elastic Common Schema (ECS), focusing only on helping users with translating their provided combined samples to Elastic Common Schema (ECS). + + Here is some context for you to reference for your task, read it carefully as you will get questions about it later: + + + {ecs} + + + {formatted_samples} + + `, + ], + [ + 'human', + `Looking at the combined sample from {package_name} {data_stream_name} provided above. The combined sample is a JSON object that includes all unique fields from the log samples sent by {package_name} {data_stream_name}. + + Go through each value step by step and modify it with the following process: + 1. Check if the name of each key and its current value matches the description and usecase of any of the above ECS fields. + 2. If one or more relevant ECS field is found, pick the one you are most confident about. + 3. Replace the value with a new object, and set the nested key "target" to be the full path of the ECS field name. If no confident match is found, the value should always be replaced with null. Also set the nested key "type" to be either "string", "boolean", "number" or "date" depending on what was detected as the example value. + 4. If the type "date" is used, then set date_format to be an array of one or more of the equivilant JAVA date formats that fits the example value. If the type is not date then date_format should be set to an empty array []. + 5. For each key that you set a target ECS field, also score the confidence you have in that the target field is correct, use a float between 0.0 and 1.0 and set the value in the nested "confidence" key. + 6. When you want to use an ECS field as a value for a target, but another field already has the same ECS field as its target, try to find another fitting ECS field. If none is found then the least confident key/value should be null instead. + 7. If you are not confident for a specific field, you should always set the value to null. + 8. These {package_name} log samples are based on source and destination type data, prioritize these compared to other related ECS fields like host.* and observer.*. + + You ALWAYS follow these guidelines when writing your response: + + - Never use \`event.category\` or \`event.type\` as target ECS fields. + - Never use the same ECS target multiple times. If no other field is found that you are confident in, it should always be null. + - All keys should be under the {package_name} {data_stream_name} parent fields, same as the original combined sample above. + - All target key values should be ECS field names only from the above ECS fields provided as context. + - All original keys from the combined sample object needs to be in your response. + - Only when a target value is set should type, date_format and confidence be filled out. If no target value then the value should simply be null. + - Do not respond with anything except the ecs maping JSON object enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the JSON object below: + \`\`\`json + {ex_answer} + \`\`\` + "`, + ], + ['ai', 'Please find the JSON object below:'], +]); + +export const ECS_INVALID_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant in Elastic Common Schema (ECS), you help review and try to resolve incorrect field mappings. + + Here is some context for you to reference your task, read it carefully as you will get questions about it later: + + + {ecs} + + + {formatted_samples} + + + {current_mapping} + + `, + ], + [ + 'human', + `The following fields are mapped incorrectly in the current mapping, please help me resolve this: + + {invalid_ecs_fields} + + To resolve the invalid ecs fields, go through each key and value defined in the invalid fields, and modify the current mapping step by step, and ensure they follow these guidelines: + + - Update the provided current mapping object, the value should be the corresponding Elastic Common Schema field name. If no good or valid match is found the value should always be null. + - Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. + + + Example response format: + + A: Please find the JSON object below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the JSON object below:'], +]); + +export const ECS_MISSING_KEYS_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant in Elastic Common Schema (ECS), you help review and try to resolve missing fields in the current mapping. + + Here is some context for you to reference for your task, read it carefully as you will get questions about it later: + + + {ecs} + + + {formatted_samples} + + + {current_mapping} + + `, + ], + [ + 'human', + `The following keys are missing from the current mapping: + + {missing_keys} + + + Help resolve the issue by adding the missing keys, look up example values from the formatted samples, and go through each missing key step by step, resolve it by following these guidelines: + + - Update the provided current mapping object with all the missing keys, the value should be the corresponding Elastic Common Schema field name. If no good match is found the value should always be null. + - Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. + + + Example response format: + + A: Please find the JSON object below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the JSON object below:'], +]); + +export const ECS_DUPLICATES_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant in Elastic Common Schema (ECS), you help review and try to resolve incorrect duplicate fields in the current mapping. + + Here is some context for you to reference for your task, read it carefully as you will get questions about it later: + + + {ecs} + + + {current_mapping} + + `, + ], + [ + 'human', + `The following duplicate fields are mapped to the same ECS fields in the current mapping, please help me resolve this: + + {duplicate_fields} + + + To resolve the duplicate mappings, go through each key and value defined in the duplicate fields, and modify the current mapping step by step, and ensure they follow these guidelines: + + - Multiple keys should not have the same value (ECS field it will be mapped to). If multiple keys do have the same value then always choose the best match for the ECS field, while the other duplicates should have their value changed to null. + - Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. + + + Example response format: + + A: Please find the JSON object below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the JSON object below:'], +]); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts new file mode 100644 index 0000000000000..5640c27e58199 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts @@ -0,0 +1,150 @@ +import { ECS_FULL } from './constants'; +import { EcsMappingState } from '../../types'; + +const valueFieldKeys = new Set(['target', 'confidence', 'date_formats', 'type']); +type AnyObject = Record; + +function extractKeys(data: AnyObject, prefix: string = ''): Set { + const keys = new Set(); + + for (const [key, value] of Object.entries(data)) { + const fullKey = prefix ? `${prefix}.${key}` : key; + + if (Array.isArray(value)) { + // Directly add the key for arrays without iterating over elements + keys.add(fullKey); + } else if (typeof value === 'object' && value !== null) { + const valueKeys = new Set(Object.keys(value)); + + if ([...valueFieldKeys].every((k) => valueKeys.has(k))) { + keys.add(fullKey); + } else { + // Recursively extract keys if the current value is a nested object + for (const nestedKey of extractKeys(value, fullKey)) { + keys.add(nestedKey); + } + } + } else { + // Add the key if the value is not an object or is null + keys.add(fullKey); + } + } + + return keys; +} + +function findMissingFields(formattedSamples: string, ecsMapping: AnyObject): string[] { + const combinedSamples = JSON.parse(formattedSamples); + const uniqueKeysFromSamples = extractKeys(combinedSamples); + const ecsResponseKeys = extractKeys(ecsMapping); + + const missingKeys = [...uniqueKeysFromSamples].filter((key) => !ecsResponseKeys.has(key)); + return missingKeys; +} + +function processMapping(path: string[], value: any, output: Record): void { + if (typeof value === 'object' && value !== null) { + if (!Array.isArray(value)) { + // If the value is a dict with all the keys returned for each source field, this is the full path of the field. + const valueKeys = new Set(Object.keys(value)); + + if ([...valueFieldKeys].every((k) => valueKeys.has(k))) { + if (value?.target !== null) { + if (!output[value?.target]) { + output[value.target] = []; + } + output[value.target].push(path); + } + } else { + // Regular dictionary, continue traversing + for (const [k, v] of Object.entries(value)) { + processMapping([...path, k], v, output); + } + } + } else { + // If the value is an array, iterate through items and process them + for (const item of value) { + if (typeof item === 'object' && item !== null) { + processMapping(path, item, output); + } + } + } + } else if (value !== null) { + // Direct value, accumulate path + if (!output[value]) { + output[value] = []; + } + output[value].push(path); + } +} + +function getValueFromPath(obj: AnyObject, path: string[]): any { + return path.reduce((acc, key) => (acc && acc[key] !== undefined ? acc[key] : null), obj); +} + +function findDuplicateFields(samples: string[], ecsMapping: AnyObject): string[] { + const parsedSamples = samples.map((sample) => JSON.parse(sample)); + const results: string[] = []; + const output: Record = {}; + + // Get all keys for each target ECS mapping field + processMapping([], ecsMapping, output); + + // Filter out any ECS field that does not have multiple source fields mapped to it + const filteredOutput = Object.fromEntries( + Object.entries(output).filter(([_, paths]) => paths.length > 1 && _ !== null) + ); + + // For each ECS field where value is the ECS field and paths is the array of source field names + for (const [value, paths] of Object.entries(filteredOutput)) { + // For each log sample, checking if more than 1 source field exists in the same sample + for (const sample of parsedSamples) { + const foundPaths = paths.filter((path) => getValueFromPath(sample, path) !== null); + if (foundPaths.length > 1) { + const matchingFields = foundPaths.map((p) => p.join('.')); + results.push( + `One or more samples have matching fields for ECS field '${value}': ${matchingFields.join( + ', ' + )}` + ); + break; + } + } + } + + return results; +} + +// Function to find invalid ECS fields +function findInvalidEcsFields(ecsMapping: AnyObject): string[] { + const results: string[] = []; + const output: Record = {}; + const ecsDict = ECS_FULL; + + processMapping([], ecsMapping, output); + const filteredOutput = Object.fromEntries( + Object.entries(output).filter(([key, _]) => key !== null) + ); + + for (const [ecsValue, paths] of Object.entries(filteredOutput)) { + if (!ecsDict.hasOwnProperty(ecsValue)) { + const field = paths.map((p) => p.join('.')); + results.push(`Invalid ECS field mapping identified for ${ecsValue} : ${field.join(', ')}`); + } + } + + return results; +} + +export function handleValidateMappings(state: EcsMappingState): AnyObject { + const missingKeys = findMissingFields(state?.formattedSamples, state?.currentMapping); + const duplicateFields = findDuplicateFields(state?.samples, state?.currentMapping); + const invalidEcsFields = findInvalidEcsFields(state?.currentMapping); + + return { + missingKeys, + duplicateFields, + invalidEcsFields, + lastExecutedChain: 'validate_mappings', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/related/constants.ts new file mode 100644 index 0000000000000..3bf2ac0d418f6 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/constants.ts @@ -0,0 +1,53 @@ +export const RELATED_ECS_FIELDS = { + 'related.hash': { + type: 'keyword', + description: 'All the hashes seen in the docs', + note: 'this field should contain an array of values', + }, + 'related.hosts': { + type: 'keyword', + description: 'All hostnames or other host identifiers seen in the docs', + note: 'this field should contain an array of values', + }, + 'related.ip': { + type: 'keyword', + description: 'All of the IPs seen in the docs', + note: 'this field should contain an array of values', + }, + 'related.user': { + type: 'keyword', + description: 'All the user names or other user identifiers seen in the docs', + note: 'this field should contain an array of values', + }, +}; + +export const RELATED_EXAMPLE_ANSWER = [ + { + append: { + field: 'related.ip', + value: ['{{{source.ip}}}'], + allow_duplicates: 'false', + }, + }, + { + append: { + field: 'related.user', + value: ['{{{server.user.name}}}'], + allow_duplicates: 'false', + }, + }, + { + append: { + field: 'related.hosts', + value: ['{{{client.domain}}}'], + allow_duplicates: 'false', + }, + }, + { + append: { + field: 'related.hash', + value: ['{{{file.hash.sha1}}}'], + allow_duplicates: 'false', + }, + }, +]; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts new file mode 100644 index 0000000000000..5d5c2d340febe --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts @@ -0,0 +1,32 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { RELATED_ERROR_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { RelatedState } from '../../types'; +import { combineProcessors } from '../../util/pipeline'; +import { Pipeline } from '../../../common/types'; + +export async function handleErrors(state: RelatedState) { + const relatedErrorPrompt = RELATED_ERROR_PROMPT; + const model = getModel(); + console.log('testing related error'); + + const outputParser = new JsonOutputParser(); + const relatedErrorGraph = relatedErrorPrompt.pipe(model).pipe(outputParser); + + const currentProcessors = (await relatedErrorGraph.invoke({ + current_processors: JSON.stringify(state.currentProcessors, null, 2), + ex_answer: state.exAnswer, + errors: JSON.stringify(state.errors, null, 2), + package_name: state.packageName, + data_stream_name: state.dataStreamName, + })) as any[]; + + const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); + + return { + currentPipeline, + currentProcessors, + reviewed: false, + lastExecutedChain: 'error', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts new file mode 100644 index 0000000000000..3a09ce6f3f046 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -0,0 +1,158 @@ +import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; +import { RelatedState } from '../../types'; +import { modifySamples, formatSamples } from '../../util/samples'; +import { handleValidatePipeline } from '../../util/es'; +import { handleRelated } from './related'; +import { handleErrors } from './errors'; +import { handleReview } from './review'; +import { RELATED_ECS_FIELDS, RELATED_EXAMPLE_ANSWER } from './constants'; + +const graphState: StateGraphArgs['channels'] = { + lastExecutedChain: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + rawSamples: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + samples: { + value: (x: string[], y?: string[]) => y ?? x, + default: () => [], + }, + formattedSamples: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + ecs: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + exAnswer: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + packageName: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + dataStreamName: { + value: (x: string, y?: string) => y ?? x, + default: () => '', + }, + finalized: { + value: (x: boolean, y?: boolean) => y ?? x, + default: () => false, + }, + reviewed: { + value: (x: boolean, y?: boolean) => y ?? x, + default: () => false, + }, + errors: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + pipelineResults: { + value: (x: object[], y?: object[]) => y ?? x, + default: () => [], + }, + currentMapping: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + currentPipeline: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + currentProcessors: { + value: (x: object[], y?: object[]) => y ?? x, + default: () => [], + }, + initialPipeline: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, + results: { + value: (x: object, y?: object) => y ?? x, + default: () => ({}), + }, +}; + +function modelInput(state: RelatedState): Partial { + const samples = modifySamples(state); + const formattedSamples = formatSamples(samples); + const initialPipeline = JSON.parse(JSON.stringify(state.currentPipeline)); + return { + exAnswer: JSON.stringify(RELATED_EXAMPLE_ANSWER, null, 2), + ecs: JSON.stringify(RELATED_ECS_FIELDS, null, 2), + samples, + formattedSamples, + initialPipeline, + finalized: false, + reviewed: false, + lastExecutedChain: 'modelInput', + }; +} + +function modelOutput(state: RelatedState): Partial { + return { + finalized: true, + lastExecutedChain: 'modelOutput', + results: { + docs: state.pipelineResults, + pipeline: state.currentPipeline, + }, + }; +} + +function inputRouter(state: RelatedState): string { + if (Object.keys(state.pipelineResults).length === 0) { + console.log('No pipeline results found'); + return 'validatePipeline'; + } + return 'related'; +} + +function chainRouter(state: RelatedState): string { + if (Object.keys(state.currentProcessors).length === 0) { + return 'related'; + } + if (Object.keys(state.errors).length > 0) { + return 'errors'; + } + if (!state.reviewed) { + return 'review'; + } + if (!state.finalized) { + return 'modelOutput'; + } + return END; +} + +export function getRelatedGraph() { + const workflow = new StateGraph({ channels: graphState }) + .addNode('modelInput', modelInput) + .addNode('modelOutput', modelOutput) + .addNode('handleRelated', handleRelated) + .addNode('handleValidatePipeline', handleValidatePipeline) + .addNode('handleErrors', handleErrors) + .addNode('handleReview', handleReview) + .addEdge(START, 'modelInput') + .addEdge('modelOutput', END) + .addEdge('handleRelated', 'handleValidatePipeline') + .addEdge('handleErrors', 'handleValidatePipeline') + .addEdge('handleReview', 'handleValidatePipeline') + .addConditionalEdges('modelInput', inputRouter, { + related: 'handleRelated', + validatePipeline: 'handleValidatePipeline', + }) + .addConditionalEdges('handleValidatePipeline', chainRouter, { + related: 'handleRelated', + errors: 'handleErrors', + review: 'handleReview', + modelOutput: 'modelOutput', + }); + + const compiledRelatedGraph = workflow.compile(); + return compiledRelatedGraph; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/index.ts b/x-pack/plugins/integration_assistant/server/graphs/related/index.ts new file mode 100644 index 0000000000000..eb205d904e83d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/index.ts @@ -0,0 +1 @@ +export { getRelatedGraph } from './graph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts new file mode 100644 index 0000000000000..531e3203827c4 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts @@ -0,0 +1,136 @@ +import { ChatPromptTemplate } from '@langchain/core/prompts'; + +export const RELATED_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on providing append processors that can be used to enrich samples with all relevant related.ip, related.hash, related.user and related.host fields. + Here are some context for you to reference for your task, read it carefully as you will get questions about it later: + + + {ecs} + + `, + ], + [ + 'human', + `Please help me by providing all relevant append processors for any detected related.ip, related.hash, related.user and related.host fields that would fit the below pipeline results as an array of JSON objects. + + + {pipeline_results} + + + Go through each of the pipeline results above step by step and do the following to add all relevant related.ip, related.hash, related.user and related.host fields. + 1. Try to understand what is unique about each pipeline result, and what sort of related.ip, related.hash, related.user and related.host fields that fit best, and if there is any unique values for each result. + 2. For each of related.ip, related.hash, related.user and related.host fields that you find, add a new append processor to your array of JSON objects. + 3. If only certain results are relevant to the related.ip, related.hash, related.user and related.host fields, add an if condition similar to the above example processors, that describes what value or field needs to be available for this categorization to take place. The if condition should be inside the processor object. + 4. Always check if the related.ip, related.hash, related.user and related.host fields are common in the ecs context above. + 5. The value argument for the append processor shall consist of one field. + + You ALWAYS follow these guidelines when writing your response: + + - You can add as many append processors you need to cover all the fields that you detected. + - If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. + - When an if condition is not needed the argument should not be used for the processor object. + - Do not respond with anything except the array of processors as a valid JSON objects enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the Related processors below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the Related processors below:'], +]); + +export const RELATED_ERROR_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on resolving errors and issues with append processors used for related field categorization. + Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + + {current_processors} + + + {errors} + + `, + ], + [ + 'human', + `Please go through each error above, carefully review the provided current processors, and resolve the most likely cause to the supplied error by returning an updated version of the current_processors. + + Follow these steps to help resolve the current ingest pipeline issues: + 1. Try to fix all related errors before responding. + 2. Apply all fixes to the provided array of current append processors. + 3. If you do not know how to fix an error, then continue to the next and return the complete updated array of current append processors. + + You ALWAYS follow these guidelines when writing your response: + + - When checking for the existance of multiple values in a single variable, use this format: "if": "['value1', 'value2'].contains(ctx.{package_name}?.{data_stream_name}?.field)" + - If conditions should never be in a format like "if": "true". If it exist in the current array of append processors, remove only the redundant if condition. + - If the error complains that it is a null point exception, always ensure the if conditions uses a ? when accessing nested fields. For example ctx.field1?.nestedfield1?.nestedfield2. + - Never use "split" in template values, only use the field name inside the triple brackets. If the error mentions "Improperly closed variable in query-template" then check each "value" field for any special characters and remove them. + - Do not respond with anything except the complete updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the updated ECS related append processors below: + \`\`\`json + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the updated ECS related append processors below:'], +]); + +export const RELATED_REVIEW_PROMPT = ChatPromptTemplate.fromMessages([ + [ + 'system', + `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on adding improvements to the provided array of processors and reviewing the current results. + + Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + + {current_processors} + + `, + ], + [ + 'human', + `Testing my current pipeline returned me with the below pipeline results: + + {pipeline_results} + + + Please review the pipeline results and the array of current processors, ensuring to identify all the related.ip , related.user , related.hash and related.host fields that would match each pipeline result document. If any related.ip , related.user , related.hash or related.host fields is missing from any of the pipeline results, add them by updating the array of current processors and return the whole updated array of processors. + + For each pipeline result you review step by step, remember the below steps: + 1. Check each of the pipeline results to see if the field/value matches related.ip , related.user , related.hash or related.host. If not then try to correlate the results with the current processors and see if either a new append processor should be added to the list with a matching if condition, or if any of the if conditions should be modified as they are not matching that is in the results. + 2. If the results have related.ip , related.user , related.hash or related.host value, see if more of them could match, if so it could be added to the relevant append processor which added the initial values. + 3. Ensure that all append processors has allow_duplicates: false, as seen in the example response. + + You ALWAYS follow these guidelines when writing your response: + + - You can use as many append processors as you need to add all relevant ECS categories and types combinations. + - If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. + - When an if condition is not needed the argument should not be used for the processor object. + - If not updates are needed you respond with the initially provided current processors. + - Each append processor needs to have the allow_duplicates: false argument, as shown in the below example response. + - Do not respond with anything except updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + + Example response format: + + A: Please find the updated ECS related append processors below: + \`\`\` + {ex_answer} + \`\`\` + `, + ], + ['ai', 'Please find the updated ECS related append processors below:'], +]); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts new file mode 100644 index 0000000000000..115aa6292c981 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts @@ -0,0 +1,30 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { RELATED_MAIN_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { RelatedState } from '../../types'; +import { combineProcessors } from '../../util/pipeline'; +import { Pipeline } from '../../../common/types'; + +export async function handleRelated(state: RelatedState) { + const relatedMainPrompt = RELATED_MAIN_PROMPT; + const model = getModel(); + console.log('testing related main'); + + const outputParser = new JsonOutputParser(); + const relatedMainGraph = relatedMainPrompt.pipe(model).pipe(outputParser); + + const currentProcessors = (await relatedMainGraph.invoke({ + pipeline_results: JSON.stringify(state.pipelineResults, null, 2), + ex_answer: state.exAnswer, + ecs: state.ecs, + })) as any[]; + + const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); + + return { + currentPipeline, + currentProcessors, + reviewed: false, + lastExecutedChain: 'main', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts new file mode 100644 index 0000000000000..4de13d427a015 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts @@ -0,0 +1,30 @@ +import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { RELATED_REVIEW_PROMPT } from './prompts'; +import { getModel } from '../../providers/bedrock'; +import { RelatedState } from '../../types'; +import { combineProcessors } from '../../util/pipeline'; +import { Pipeline } from '../../../common/types'; + +export async function handleReview(state: RelatedState) { + const relatedReviewPrompt = RELATED_REVIEW_PROMPT; + const model = getModel(); + console.log('testing related review'); + + const outputParser = new JsonOutputParser(); + const relatedReviewGraph = relatedReviewPrompt.pipe(model).pipe(outputParser); + + const currentProcessors = (await relatedReviewGraph.invoke({ + current_processors: JSON.stringify(state.currentProcessors, null, 2), + ex_answer: state.exAnswer, + pipeline_results: JSON.stringify(state.pipelineResults, null, 2), + })) as any[]; + + const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); + + return { + currentPipeline, + currentProcessors, + reviewed: true, + lastExecutedChain: 'review', + }; +} diff --git a/x-pack/plugins/integration_assistant/server/index.ts b/x-pack/plugins/integration_assistant/server/index.ts new file mode 100644 index 0000000000000..d47e4470ccb8b --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/index.ts @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { PluginInitializerContext } from '@kbn/core/server'; + +export async function plugin(initializerContext: PluginInitializerContext) { + const { IntegrationAssistantPlugin } = await import('./plugin'); + return new IntegrationAssistantPlugin(initializerContext); +} + +export type { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; diff --git a/x-pack/plugins/integration_assistant/server/plugin.ts b/x-pack/plugins/integration_assistant/server/plugin.ts new file mode 100644 index 0000000000000..7e36f759ff141 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/plugin.ts @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { Plugin, PluginInitializerContext, CoreSetup, CoreStart, Logger } from '@kbn/core/server'; +import { registerRoutes } from './routes'; +import { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; + +export class IntegrationAssistantPlugin + implements Plugin +{ + private readonly logger: Logger; + + constructor(initializerContext: PluginInitializerContext) { + this.logger = initializerContext.logger.get(); + } + public setup(core: CoreSetup) { + const router = core.http.createRouter(); + this.logger.debug('integrationAssistant api: Setup'); + registerRoutes(router); + + return {}; + } + + public start(core: CoreStart) { + this.logger.debug('integrationAssistant api: Started'); + return {}; + } + + public stop() {} +} diff --git a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts new file mode 100644 index 0000000000000..beac9eb3863d1 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts @@ -0,0 +1,20 @@ +import { BedrockChat } from '@langchain/community/chat_models/bedrock/web'; + +export function getModel(): BedrockChat { + const model = new BedrockChat({ + model: 'anthropic.claude-3-opus-20240229-v1:0', + region: 'us-west-2', + temperature: 0.05, + maxTokens: 4096, + credentials: { + accessKeyId: process.env.AWS_ACCESS_KEY_ID || '', + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '', + }, + modelKwargs: { + top_k: 200, + top_p: 0.4, + stop_sequences: ['Human:'], + }, + }); + return model; +} diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts new file mode 100644 index 0000000000000..39769e6cc8ff5 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IRouter } from '@kbn/core/server'; +import { INTEGRATION_BUILDER_PATH } from '../../common'; + +export function registerIntegrationBuilderRoutes(router: IRouter) { + router.post( + { + path: `${INTEGRATION_BUILDER_PATH}`, + validate: false, + }, + async (ctx, req, res) => { + return res.ok(); + } + ); +} diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts new file mode 100644 index 0000000000000..34bf93aee985b --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IRouter } from '@kbn/core/server'; +import { CATEGORZATION_GRAPH_PATH } from '../../common'; + +export function registerCategorizationRoutes(router: IRouter) { + router.post( + { + path: `${CATEGORZATION_GRAPH_PATH}`, + validate: false, + }, + async (ctx, req, res) => { + return res.ok(); + } + ); +} diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts new file mode 100644 index 0000000000000..d21f0d4fcc835 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IRouter } from '@kbn/core/server'; +import { ECS_GRAPH_PATH } from '../../common'; + +export interface EcsGraphResponse { + ecs: string; +} + +const body: EcsGraphResponse = { + ecs: 'graph', +}; + +export function registerEcsRoutes(router: IRouter) { + router.get( + { + path: `${ECS_GRAPH_PATH}`, + validate: false, + }, + async (ctx, req, res) => { + return res.ok({ body }); + } + ); +} diff --git a/x-pack/plugins/integration_assistant/server/routes/index.ts b/x-pack/plugins/integration_assistant/server/routes/index.ts new file mode 100644 index 0000000000000..34d65dabb88c5 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/routes/index.ts @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +export { registerRoutes } from './register_routes'; diff --git a/x-pack/plugins/integration_assistant/server/routes/register_routes.ts b/x-pack/plugins/integration_assistant/server/routes/register_routes.ts new file mode 100644 index 0000000000000..954e80b924c86 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/routes/register_routes.ts @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IRouter } from '@kbn/core/server'; +import { registerEcsRoutes } from './ecs_routes'; +import { registerIntegrationBuilderRoutes } from './build_integration_routes'; +import { registerCategorizationRoutes } from './categorization_routes'; +import { registerRelatedRoutes } from './related_routes'; + +export function registerRoutes(router: IRouter) { + registerEcsRoutes(router); + registerIntegrationBuilderRoutes(router); + registerCategorizationRoutes(router); + registerRelatedRoutes(router); +} diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts new file mode 100644 index 0000000000000..4eb70cdbfa159 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IRouter } from '@kbn/core/server'; +import { RELATED_GRAPH_PATH } from '../../common'; + +export function registerRelatedRoutes(router: IRouter) { + router.post( + { + path: `${RELATED_GRAPH_PATH}`, + validate: false, + }, + async (ctx, req, res) => { + return res.ok(); + } + ); +} diff --git a/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk b/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk new file mode 100644 index 0000000000000..d9096154595da --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk @@ -0,0 +1,131 @@ +--- +description: Pipeline to process {{ package_name }} {{ data_stream_name }} logs +processors: + - set: + field: ecs.version + tag: set_ecs_version + value: '{{ ecs_version }}' + - rename: + field: message + target_field: event.original + tag: rename_message + ignore_missing: true + if: ctx.event?.original == null + - remove: + field: message + ignore_missing: true + tag: remove_message + if: 'ctx.event?.original != null'{% if log_format == 'json' %} + - json: + field: event.original + tag: json_original + target_field: {{ package_name }}.{{ data_stream_name }}{% endif %} +{% for processor in processors %}{% for key, value in processor %} + {% if key == 'rename' %} + - {{ key }}: + field: {{ value.field }} + target_field: {% if value.target_field | startswith('@') %}"{{ value.target_field }}"{% else %}{{ value.target_field }}{% endif %} + ignore_missing: true{% endif %} + {% if key == 'date' %} + - {{ key }}: + field: {{ value.field }} + target_field: {% if value.target_field | startswith('@') %}"{{ value.target_field }}"{% else %}{{ value.target_field }}{% endif %} + formats: + {% for format in value.formats %} + - {{ format }} + {% endfor %} + if: "ctx.{{ value.if }} != null"{% endif %} + {% if key == 'convert' %} + - {{ key }}: + field: {{ value.field }} + target_field: {% if value.target_field | startswith('@') %}"{{ value.target_field }}"{% else %}{{ value.target_field }}{% endif %} + ignore_missing: true + ignore_failure: true + type: {{value.type}}{% endif %}{% endfor %}{% endfor %} + - script: + description: Drops null/empty values recursively. + tag: script_drop_null_empty_values + lang: painless + {% raw %}source: | + boolean dropEmptyFields(Object object) { + if (object == null || object == "") { + return true; + } else if (object instanceof Map) { + ((Map) object).values().removeIf(value -> dropEmptyFields(value)); + return (((Map) object).size() == 0); + } else if (object instanceof List) { + ((List) object).removeIf(value -> dropEmptyFields(value)); + return (((List) object).length == 0); + } + return false; + } + dropEmptyFields(ctx);{% endraw %} + - geoip: + field: source.ip + tag: geoip_source_ip + target_field: source.geo + ignore_missing: true + - geoip: + ignore_missing: true + database_file: GeoLite2-ASN.mmdb + field: source.ip + tag: geoip_source_asn + target_field: source.as + properties: + - asn + - organization_name + - rename: + field: source.as.asn + tag: rename_source_as_asn + target_field: source.as.number + ignore_missing: true + - rename: + field: source.as.organization_name + tag: rename_source_as_organization_name + target_field: source.as.organization.name + ignore_missing: true + - geoip: + field: destination.ip + tag: geoip_destination_ip + target_field: destination.geo + ignore_missing: true + - geoip: + database_file: GeoLite2-ASN.mmdb + field: destination.ip + tag: geoip_destination_asn + target_field: destination.as + properties: + - asn + - organization_name + ignore_missing: true + - rename: + field: destination.as.asn + tag: rename_destination_as_asn + target_field: destination.as.number + ignore_missing: true + - rename: + field: destination.as.organization_name + tag: rename_destination_as_organization_name + target_field: destination.as.organization.name + ignore_missing: true +{% if fields_to_remove %} + - remove: + field: + {% for field in fields_to_remove %} + - {{ field }} + {% endfor %} + ignore_missing: true + tag: remove_fields{% endif %} + - remove: + field: event.original + tag: remove_original_event + if: ctx?.tags == null || !(ctx.tags.contains("preserve_original_event")) + ignore_failure: true + ignore_missing: true +on_failure: + - append: + field: error.message + value: '{% raw %}Processor {{{_ingest.on_failure_processor_type}}} with tag {{{_ingest.on_failure_processor_tag}}} in pipeline {{{_ingest.on_failure_pipeline}}} failed with message: {{{_ingest.on_failure_message}}}{% endraw %}' + - set: + field: event.kind + value: pipeline_error diff --git a/x-pack/plugins/integration_assistant/server/types.ts b/x-pack/plugins/integration_assistant/server/types.ts new file mode 100644 index 0000000000000..c6f9959715383 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/types.ts @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IntegrationAssistantPluginSetup {} +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IntegrationAssistantPluginStart {} + +export interface BuildIntegrationAPIRequest { + packageName: string; + packageTitle: string; + packageVersion: string; + dataStreamName: string; + inputTypes: string[]; + formSamples: string[]; + ingestPipeline: object; + docs: object[]; +} + +export interface EcsMappingAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; +} + +export interface EcsMappingNewPipelineAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + mapping: object; +} + +export interface CategorizationAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} + +export interface RelatedAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} + +export interface CategorizationApiResponse { + results: { + pipeline: object; + docs: object[]; + }; +} + +export interface RelatedApiResponse { + results: { + pipeline: object; + docs: object[]; + }; +} + +export interface EcsMappingApiResponse { + results: { + mapping: object; + current_pipeline: object; + }; +} + +export interface CategorizationState { + rawSamples: string[]; + samples: string[]; + formattedSamples: string; + ecsTypes: string; + ecsCategories: string; + exAnswer: string; + lastExecutedChain: string; + packageName: string; + dataStreamName: string; + errors: object; + pipelineResults: object[]; + finalized: boolean; + reviewed: boolean; + currentMapping: object; + currentPipeline: object; + currentProcessors: object[]; + invalidCategorization: object; + initialPipeline: object; + results: object; +} + +export interface EcsMappingState { + ecs: string; + lastExecutedChain: string; + rawSamples: string[]; + samples: string[]; + formattedSamples: string; + exAnswer: string; + packageName: string; + dataStreamName: string; + finalized: boolean; + currentMapping: object; + currentPipeline: object; + duplicateFields: string[]; + missingKeys: string[]; + invalidEcsFields: string[]; + results: object; + logFormat: string; + ecsVersion: string; +} + +export interface RelatedState { + rawSamples: string[]; + samples: string[]; + formattedSamples: string; + ecs: string; + exAnswer: string; + packageName: string; + dataStreamName: string; + errors: object; + pipelineResults: object[]; + finalized: boolean; + reviewed: boolean; + currentMapping: object; + currentPipeline: object; + currentProcessors: object[]; + initialPipeline: object; + results: object; + lastExecutedChain: string; +} diff --git a/x-pack/plugins/integration_assistant/server/util/es.ts b/x-pack/plugins/integration_assistant/server/util/es.ts new file mode 100644 index 0000000000000..c79092ca0c800 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/es.ts @@ -0,0 +1,83 @@ +import { EcsMappingState } from "../types/EcsMapping"; +import { CategorizationState } from "../types/Categorization"; +import { RelatedState } from "../types/Related"; +import { Client } from "@elastic/elasticsearch"; + +interface DocTemplate { + _index: string; + _id: string; + _source: { + message: string; + }; +} + +function formatSample(sample: string): DocTemplate { + const docsTemplate: DocTemplate = { + _index: "index", + _id: "id", + _source: { message: "" }, + }; + const formatted: DocTemplate = { ...docsTemplate }; + formatted._source.message = sample; + return formatted; +} + +function newClient(): Client { + const client = new Client({ + node: "https://localhost:9200", + auth: { + username: "elastic", + password: "changeme", + }, + tls: { + rejectUnauthorized: false, + }, + }); + return client; +} + +async function _testPipeline( + samples: string[], + pipeline: object +): Promise<[any[], any[]]> { + const docs = samples.map((sample) => formatSample(sample)); + const results: object[] = []; + const errors: object[] = []; + + const client = newClient(); + try { + const output = await client.ingest.simulate({ docs, pipeline }); + for (const doc of output.docs) { + if (doc.doc?._source?.error) { + errors.push(doc.doc._source.error); + } else if (doc.doc?._source) { + results.push(doc.doc._source); + } + } + } catch (e) { + errors.push({ error: (e as Error).message }); + } + + return [errors, results]; +} + +export async function handleValidatePipeline( + state: EcsMappingState | CategorizationState | RelatedState +): Promise< + | Partial + | Partial + | Partial +> { + const [errors, results] = await _testPipeline( + state.rawSamples, + state.currentPipeline + ); + console.log("testing validate pipeline"); + console.log("errors", errors); + //console.log("results", results); + return { + errors, + pipelineResults: results, + lastExecutedChain: "validate_pipeline", + }; +} diff --git a/x-pack/plugins/integration_assistant/server/util/pipeline.ts b/x-pack/plugins/integration_assistant/server/util/pipeline.ts new file mode 100644 index 0000000000000..c481d947a78db --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/pipeline.ts @@ -0,0 +1,21 @@ +import { deepCopy } from './util'; + +interface Pipeline { + processors: any[]; +} + +export function combineProcessors(initialPipeline: Pipeline, processors: any[]): Pipeline { + // Create a deep copy of the initialPipeline to avoid modifying the original input + const currentPipeline = deepCopy(initialPipeline); + + // Access and modify the processors list in the copied pipeline + const currentProcessors = currentPipeline.processors; + const combinedProcessors = [ + ...currentProcessors.slice(0, -1), + ...processors, + ...currentProcessors.slice(-1), + ]; + currentPipeline.processors = combinedProcessors; + + return currentPipeline; +} diff --git a/x-pack/plugins/integration_assistant/server/util/samples.ts b/x-pack/plugins/integration_assistant/server/util/samples.ts new file mode 100644 index 0000000000000..c1eb41634203d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/samples.ts @@ -0,0 +1,88 @@ +import { CategorizationState, EcsMappingState, RelatedState } from '../types'; + +interface SampleObj { + [key: string]: any; +} + +interface NewObj { + [key: string]: { + [key: string]: SampleObj; + }; +} + +export function modifySamples(state: EcsMappingState | CategorizationState | RelatedState) { + const modifiedSamples: string[] = []; + const rawSamples = state.rawSamples; + const packageName = state.packageName; + const dataStreamName = state.dataStreamName; + + for (const sample of rawSamples) { + const sampleObj: SampleObj = JSON.parse(sample); + const newObj: NewObj = { + [packageName]: { + [dataStreamName]: sampleObj, + }, + }; + const modifiedSample = JSON.stringify(newObj); + modifiedSamples.push(modifiedSample); + } + + return modifiedSamples; +} + +function isEmptyValue(value: any): boolean { + return ( + value === null || + value === undefined || + (typeof value === 'object' && !Array.isArray(value) && Object.keys(value).length === 0) || + (Array.isArray(value) && value.length === 0) + ); +} + +function merge(target: Record, source: Record): Record { + for (const [key, sourceValue] of Object.entries(source)) { + const targetValue = target[key]; + if (Array.isArray(sourceValue)) { + // Directly assign arrays + target[key] = sourceValue; + } else if ( + typeof sourceValue === 'object' && + sourceValue !== null && + !Array.isArray(targetValue) + ) { + if (typeof targetValue !== 'object' || isEmptyValue(targetValue)) { + target[key] = merge({}, sourceValue); + } else { + target[key] = merge(targetValue, sourceValue); + } + } else if (!(key in target) || (isEmptyValue(targetValue) && !isEmptyValue(sourceValue))) { + target[key] = sourceValue; + } + } + return target; +} + +export function mergeSamples(objects: any[]): string { + let result: Record = {}; + + for (const obj of objects) { + let sample: Record = obj; + if (typeof obj === 'string') { + sample = JSON.parse(obj); + } + result = merge(result, sample); + } + + return JSON.stringify(result, null, 2); +} + +export function formatSamples(samples: string[]): string { + const formattedSamples: any[] = []; + + for (const sample of samples) { + const sampleObj = JSON.parse(sample); + formattedSamples.push(sampleObj); + } + + return JSON.stringify(formattedSamples, null, 2); +} diff --git a/x-pack/plugins/integration_assistant/server/util/util.ts b/x-pack/plugins/integration_assistant/server/util/util.ts new file mode 100644 index 0000000000000..2376783e514aa --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/util.ts @@ -0,0 +1,3 @@ +export function deepCopy(obj: T): T { + return JSON.parse(JSON.stringify(obj)); +} diff --git a/x-pack/plugins/integration_assistant/tsconfig.json b/x-pack/plugins/integration_assistant/tsconfig.json new file mode 100644 index 0000000000000..bc6dac3bea829 --- /dev/null +++ b/x-pack/plugins/integration_assistant/tsconfig.json @@ -0,0 +1,22 @@ +{ + "extends": "../../../tsconfig.base.json", + "compilerOptions": { + "outDir": "target/types" + }, + "include": [ + "index.ts", + "public/**/*.ts", + "public/**/*.tsx", + "server/**/*.ts", + "common/**/*.ts", + "../../typings/**/*", + ], + "exclude": [ + "target/**/*", + ], + "kbn_references": [ + "@kbn/core", + "@kbn/core-http-browser", + "@kbn/config-schema", + ] +} diff --git a/yarn.lock b/yarn.lock index 5f1865d8d5f8a..19c89b83287a9 100644 --- a/yarn.lock +++ b/yarn.lock @@ -35,21 +35,6 @@ "@jridgewell/gen-mapping" "^0.1.0" "@jridgewell/trace-mapping" "^0.3.9" -"@anthropic-ai/sdk@^0.9.1": - version "0.9.1" - resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.9.1.tgz#b2d2b7bf05c90dce502c9a2e869066870f69ba88" - integrity sha512-wa1meQ2WSfoY8Uor3EdrJq0jTiZJoKoSii2ZVWRY1oN4Tlr5s59pADg9T79FTbPe1/se5c3pBeZgJL63wmuoBA== - dependencies: - "@types/node" "^18.11.18" - "@types/node-fetch" "^2.6.4" - abort-controller "^3.0.0" - agentkeepalive "^4.2.1" - digest-fetch "^1.3.0" - form-data-encoder "1.7.2" - formdata-node "^4.3.2" - node-fetch "^2.6.7" - web-streams-polyfill "^3.2.1" - "@apidevtools/json-schema-ref-parser@^9.0.6": version "9.0.9" resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.0.9.tgz#d720f9256e3609621280584f2b47ae165359268b" @@ -5011,6 +4996,10 @@ version "0.0.0" uid "" +"@kbn/integration-assistant-plugin@link:x-pack/plugins/integration_assistant": + version "0.0.0" + uid "" + "@kbn/interactive-setup-plugin@link:src/plugins/interactive_setup": version "0.0.0" uid "" @@ -6739,48 +6728,68 @@ resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919" integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw== -"@langchain/community@^0.0.44", "@langchain/community@~0.0.41": - version "0.0.44" - resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.0.44.tgz#b4f3453e3fd0b7a8c704fc35b004d7d738bd3416" - integrity sha512-II9Hz90jJmfWRICtxTg1auQWzFw0npqacWiiOpaxNhzs6rptdf56gyfC48Z6n1ii4R8FfAlfX6YxhOE7lGGKXg== +"@langchain/community@^0.2.2": + version "0.2.2" + resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.2.2.tgz#0ce7cd56ff8940fe73983f1853e165d334a2a446" + integrity sha512-TtlZnPBYt7Sujc1hAYvdZKUmV97wuF15O7b4nBX4lBfQeW38N0DwGbhqpitDbpaJqZ2s8DM4rjapECk0kIdAww== dependencies: - "@langchain/core" "~0.1.44" - "@langchain/openai" "~0.0.19" + "@langchain/core" "~0.2.0" + "@langchain/openai" "~0.0.28" + binary-extensions "^2.2.0" expr-eval "^2.0.2" flat "^5.0.2" + js-yaml "^4.1.0" + langchain "~0.2.0" langsmith "~0.1.1" uuid "^9.0.0" zod "^3.22.3" zod-to-json-schema "^3.22.5" -"@langchain/core@0.1.53", "@langchain/core@^0.1.53", "@langchain/core@~0.1.44", "@langchain/core@~0.1.45": - version "0.1.53" - resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.1.53.tgz#40bf273b6d5e1426c60ce9cc259562fe656573f1" - integrity sha512-khfRTu2DSCNMPUmnKx7iH0TpEaunW/4BgR6STTteRRDd0NFtXGfAwUuY9sm0+EKi/XKhdAmpGnfLwSfNg5F0Qw== +"@langchain/core@0.2.0", "@langchain/core@>0.1.0 <0.3.0", "@langchain/core@>0.1.56 <0.3.0", "@langchain/core@>0.1.61 <0.3.0", "@langchain/core@^0.2.0", "@langchain/core@~0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.0.tgz#19c6374a5ad80daf8e14cb58582bc988109a1403" + integrity sha512-UbCJUp9eh2JXd9AW/vhPbTgtZoMgTqJgSan5Wf/EP27X8JM65lWdCOpJW+gHyBXvabbyrZz3/EGaptTUL5gutw== dependencies: ansi-styles "^5.0.0" camelcase "6" decamelize "1.2.0" - js-tiktoken "^1.0.8" + js-tiktoken "^1.0.12" langsmith "~0.1.7" ml-distance "^4.0.0" + mustache "^4.2.0" p-queue "^6.6.2" p-retry "4" uuid "^9.0.0" zod "^3.22.4" zod-to-json-schema "^3.22.3" -"@langchain/openai@^0.0.25", "@langchain/openai@~0.0.19": - version "0.0.25" - resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.25.tgz#8332abea1e3acb9b1169f90636e518c0ee90622e" - integrity sha512-cD9xPDDXK2Cjs6yYg27BpdzBnQZvBb1yaNgMoGLWIT27UQVRyT96PLC1OVMQOmMmHaKDBCj/1bW4GQQgX7+d2Q== +"@langchain/langgraph@^0.0.20": + version "0.0.20" + resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.0.20.tgz#9229af1a79107916910fa65fe185bf66cda7736f" + integrity sha512-/byqz3WDbIQqaPDmC+Bo2n36LBpD42yj8wR7KiDZvrOIJSlMIoqwZeRkONEp9D7o61ZRaAMwoUJWriG8L9xdFg== dependencies: - "@langchain/core" "~0.1.45" - js-tiktoken "^1.0.7" - openai "^4.26.0" + "@langchain/core" ">0.1.61 <0.3.0" + uuid "^9.0.1" + +"@langchain/openai@^0.0.33", "@langchain/openai@~0.0.28": + version "0.0.33" + resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.33.tgz#af88d815ff0095018c879d3a1a5a32b2795b5c69" + integrity sha512-hTBo9y9bHtFvMT5ySBW7TrmKhLSA91iNahigeqAFBVrLmBDz+6rzzLFc1mpq6JEAR3fZKdaUXqso3nB23jIpTw== + dependencies: + "@langchain/core" ">0.1.56 <0.3.0" + js-tiktoken "^1.0.12" + openai "^4.41.1" zod "^3.22.4" zod-to-json-schema "^3.22.3" +"@langchain/textsplitters@~0.0.0": + version "0.0.2" + resolved "https://registry.yarnpkg.com/@langchain/textsplitters/-/textsplitters-0.0.2.tgz#500baa8341fb7fc86fca531a4192665a319504a3" + integrity sha512-6bQOuYHTGYlkgPY/8M5WPq4nnXZpEysGzRopQCYjg2WLcEoIPUMMrXsAaNNdvU3BOeMrhin8izvpDPD165hX6Q== + dependencies: + "@langchain/core" ">0.1.0 <0.3.0" + js-tiktoken "^1.0.12" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.4" resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" @@ -10311,6 +10320,11 @@ resolved "https://registry.yarnpkg.com/@types/numeral/-/numeral-2.0.5.tgz#388e5c4ff4b0e1787f130753cbbe83d3ba770858" integrity sha512-kH8I7OSSwQu9DS9JYdFWbuvhVzvFRoCPCkGxNwoGgaPeDfEPJlcxNvEOypZhQ3XXHsGbfIuYcxcJxKUfJHnRfw== +"@types/nunjucks@^3.2.6": + version "3.2.6" + resolved "https://registry.yarnpkg.com/@types/nunjucks/-/nunjucks-3.2.6.tgz#6d6e0363719545df8b9a024279902edf68b2caa9" + integrity sha512-pHiGtf83na1nCzliuAdq8GowYiXvH5l931xZ0YEHaLMNFgynpEqx+IPStlu7UaDkehfvl01e4x/9Tpwhy7Ue3w== + "@types/object-hash@^1.3.0": version "1.3.0" resolved "https://registry.yarnpkg.com/@types/object-hash/-/object-hash-1.3.0.tgz#b20db2074129f71829d61ff404e618c4ac3d73cf" @@ -11394,6 +11408,11 @@ resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31" integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ== +a-sync-waterfall@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/a-sync-waterfall/-/a-sync-waterfall-1.0.1.tgz#75b6b6aa72598b497a125e7a2770f14f4c8a1fa7" + integrity sha512-RYTOHHdWipFUliRFMCS4X2Yn2X8M87V/OpSqWzKKOGhzqyUxzyVmhHDH9sAvG+ZuQf/TAOFsLCpMw09I1ufUnA== + abab@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.6.tgz#41b80f2c871d19686216b82309231cfd3cb3d291" @@ -12054,7 +12073,7 @@ arrify@^2.0.1: resolved "https://registry.yarnpkg.com/arrify/-/arrify-2.0.1.tgz#c9655e9331e0abcd588d2a7cad7e9956f66701fa" integrity sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug== -asap@^2.0.0: +asap@^2.0.0, asap@^2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY= @@ -12571,11 +12590,6 @@ bare-path@^2.0.0, bare-path@^2.1.0: dependencies: bare-os "^2.1.0" -base-64@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/base-64/-/base-64-0.1.0.tgz#780a99c84e7d600260361511c4877613bf24f6bb" - integrity sha512-Y5gU45svrR5tI2Vt/X9GPd3L0HNIKzGu202EjxrXMpuc2V2CiKgemAbUUsqYmZJvPtCXoUKjNZwBJzsNScUbXA== - base64-js@1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" @@ -14004,7 +14018,7 @@ commander@^4.0.1, commander@^4.1.1: resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== -commander@^5.0.0: +commander@^5.0.0, commander@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== @@ -15620,14 +15634,6 @@ diffie-hellman@^5.0.0: miller-rabin "^4.0.0" randombytes "^2.0.0" -digest-fetch@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/digest-fetch/-/digest-fetch-1.3.0.tgz#898e69264d00012a23cf26e8a3e40320143fc661" - integrity sha512-CGJuv6iKNM7QyZlM2T3sPAdZWd/p9zQiRNS9G+9COUCwzWFTs0Xp8NF5iePx7wtvhDykReiRRrSeNb4oMmB8lA== - dependencies: - base-64 "^0.1.0" - md5 "^2.3.0" - dir-glob@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-2.2.2.tgz#fa09f0694153c8918b18ba0deafae94769fc50c4" @@ -20912,10 +20918,10 @@ js-string-escape@^1.0.1: resolved "https://registry.yarnpkg.com/js-string-escape/-/js-string-escape-1.0.1.tgz#e2625badbc0d67c7533e9edc1068c587ae4137ef" integrity sha1-4mJbrbwNZ8dTPp7cEGjFh65BN+8= -js-tiktoken@^1.0.7, js-tiktoken@^1.0.8: - version "1.0.10" - resolved "https://registry.yarnpkg.com/js-tiktoken/-/js-tiktoken-1.0.10.tgz#2b343ec169399dcee8f9ef9807dbd4fafd3b30dc" - integrity sha512-ZoSxbGjvGyMT13x6ACo9ebhDha/0FHdKA+OsQcMOWcm1Zs7r90Rhk5lhERLzji+3rA7EKpXCgwXcM5fF3DMpdA== +js-tiktoken@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/js-tiktoken/-/js-tiktoken-1.0.12.tgz#af0f5cf58e5e7318240d050c8413234019424211" + integrity sha512-L7wURW1fH9Qaext0VzaUDpFGVQgjkdE3Dgsy9/+yXyGEpBKnylTd0mU0bfbNkKDlXRb6TEsZkwuflu1B8uQbJQ== dependencies: base64-js "^1.5.1" @@ -21302,17 +21308,16 @@ kuler@^2.0.0: resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3" integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== -langchain@^0.1.30: - version "0.1.30" - resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.1.30.tgz#e1adb3f1849fcd5c596c668300afd5dc8cb37a97" - integrity sha512-5h/vNMmutQ98tbB0sPDlAileZVca6A2McFgGa3+D56Dm8mSSCzTQL2DngPA6h09DlKDpSr7+6PdFw5Hoj0ZDSw== +langchain@^0.2.2, langchain@~0.2.0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.2.2.tgz#21605450458c77f022c88fdb7671bb82f4a9a17f" + integrity sha512-4tt2QuwW8AXdIL8CRkQeGOCoYYH3QbLHfQ09yD0iWLV1rwUYJ8mIYFAz/+u6CB8YNEyR/HI105s4xrxFQbWa9g== dependencies: - "@anthropic-ai/sdk" "^0.9.1" - "@langchain/community" "~0.0.41" - "@langchain/core" "~0.1.44" - "@langchain/openai" "~0.0.19" + "@langchain/core" "~0.2.0" + "@langchain/openai" "~0.0.28" + "@langchain/textsplitters" "~0.0.0" binary-extensions "^2.2.0" - js-tiktoken "^1.0.7" + js-tiktoken "^1.0.12" js-yaml "^4.1.0" jsonpointer "^5.0.1" langchainhub "~0.0.8" @@ -21330,10 +21335,10 @@ langchainhub@~0.0.8: resolved "https://registry.yarnpkg.com/langchainhub/-/langchainhub-0.0.8.tgz#fd4b96dc795e22e36c1a20bad31b61b0c33d3110" integrity sha512-Woyb8YDHgqqTOZvWIbm2CaFDGfZ4NTSyXV687AG4vXEfoNo7cGQp7nhl7wL3ehenKWmNEmcxCLgOZzW8jE6lOQ== -langsmith@^0.1.14, langsmith@~0.1.1, langsmith@~0.1.7: - version "0.1.14" - resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.14.tgz#2b889dbcfb49547614df276a4a5a063092a1585d" - integrity sha512-iEzQLLB7/0nRpAwNBAR7B7N64fyByg5UsNjSvLaCCkQ9AS68PSafjB8xQkyI8QXXrGjU1dEqDRoa8m4SUuRdUw== +langsmith@^0.1.28, langsmith@~0.1.1, langsmith@~0.1.7: + version "0.1.28" + resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.28.tgz#fbe01352d0b993fd11d4085dd337b1cec17ef28d" + integrity sha512-IQUbo7I7rEE6QYBhrcgwqvlkcUsHlia0yTQpDwWdITw/VJx1f7gLPjNdbwWE+jvOZ4HcD7gCf2HR6zFXputu5A== dependencies: "@types/uuid" "^9.0.1" commander "^10.0.1" @@ -23105,6 +23110,11 @@ mustache@^2.3.2: resolved "https://registry.yarnpkg.com/mustache/-/mustache-2.3.2.tgz#a6d4d9c3f91d13359ab889a812954f9230a3d0c5" integrity sha512-KpMNwdQsYz3O/SBS1qJ/o3sqUJ5wSb8gb0pul8CO0S56b9Y2ALm8zCfsjPXsqGFfoNBkDwZuZIAjhsZI03gYVQ== +mustache@^4.2.0: + version "4.2.0" + resolved "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz" + integrity sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ== + mutation-observer@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/mutation-observer/-/mutation-observer-1.0.3.tgz#42e9222b101bca82e5ba9d5a7acf4a14c0f263d0" @@ -23595,6 +23605,15 @@ numeral@^2.0.6: resolved "https://registry.yarnpkg.com/numeral/-/numeral-2.0.6.tgz#4ad080936d443c2561aed9f2197efffe25f4e506" integrity sha1-StCAk21EPCVhrtnyGX7//iX05QY= +nunjucks@^3.2.4: + version "3.2.4" + resolved "https://registry.yarnpkg.com/nunjucks/-/nunjucks-3.2.4.tgz#f0878eef528ce7b0aa35d67cc6898635fd74649e" + integrity sha512-26XRV6BhkgK0VOxfbU5cQI+ICFUtMLixv1noZn1tGU38kQH5A5nmmbk/O45xdyBhD1esk47nKrY0mvQpZIhRjQ== + dependencies: + a-sync-waterfall "^1.0.0" + asap "^2.0.3" + commander "^5.1.0" + nwsapi@^2.2.2: version "2.2.2" resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.2.tgz#e5418863e7905df67d51ec95938d67bf801f0bb0" @@ -23894,16 +23913,15 @@ open@^8.0.9, open@^8.4.0, open@~8.4.0: is-docker "^2.1.1" is-wsl "^2.2.0" -openai@^4.24.1, openai@^4.26.0: - version "4.26.1" - resolved "https://registry.yarnpkg.com/openai/-/openai-4.26.1.tgz#7b7c0225c09922445f68f3c4cdbd5775ed31108c" - integrity sha512-DvWbjhWbappsFRatOWmu4Dp1/Q4RG9oOz6CfOSjy0/Drb8G+5iAiqWAO4PfpGIkhOOKtvvNfQri2SItl+U7LhQ== +openai@^4.24.1, openai@^4.41.1: + version "4.47.1" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.47.1.tgz#1d23c7a8eb3d7bcdc69709cd905f4c9af0181dba" + integrity sha512-WWSxhC/69ZhYWxH/OBsLEirIjUcfpQ5+ihkXKp06hmeYXgBBIUCa9IptMzYx6NdkiOCsSGYCnTIsxaic3AjRCQ== dependencies: "@types/node" "^18.11.18" "@types/node-fetch" "^2.6.4" abort-controller "^3.0.0" agentkeepalive "^4.2.1" - digest-fetch "^1.3.0" form-data-encoder "1.7.2" formdata-node "^4.3.2" node-fetch "^2.6.7" @@ -30645,7 +30663,7 @@ uuid-browser@^3.1.0: resolved "https://registry.yarnpkg.com/uuid-browser/-/uuid-browser-3.1.0.tgz#0f05a40aef74f9e5951e20efbf44b11871e56410" integrity sha1-DwWkCu90+eWVHiDvv0SxGHHlZBA= -uuid@9.0.0, uuid@^9, uuid@^9.0.0: +uuid@9.0.0: version "9.0.0" resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.0.tgz#592f550650024a38ceb0c562f2f6aa435761efb5" integrity sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg== @@ -30660,6 +30678,11 @@ uuid@^8.0.0, uuid@^8.3.0, uuid@^8.3.2: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== +uuid@^9, uuid@^9.0.0, uuid@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" + integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== + v8-compile-cache-lib@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" From 071b3304ad7b6967e2b1a8ae1fe3a54c4701a2fd Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 24 May 2024 14:14:25 +0200 Subject: [PATCH 02/62] remove unecessary api code and update placeholder services --- .../public/api/services/apiRequest.tsx | 79 ------------------- .../api/services/categorizationService.tsx | 19 ----- .../public/api/services/ecsMappingService.tsx | 62 --------------- .../services/integrationBuilderService.tsx | 35 -------- .../public/api/services/relatedService.tsx | 19 ----- .../public/api/services/runnableClient.tsx | 14 ---- .../integration_assistant/public/services.ts | 53 ++++++------- 7 files changed, 22 insertions(+), 259 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx diff --git a/x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx b/x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx deleted file mode 100644 index c82701ed2f130..0000000000000 --- a/x-pack/plugins/integration_assistant/public/api/services/apiRequest.tsx +++ /dev/null @@ -1,79 +0,0 @@ -export const apiPostWithFileResponse = async ( - path: string, - body: string, - filename: string, - customHeaders?: Record, -): Promise => { - try { - const url = `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`; - const response = await fetch(url, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Access-Control-Allow-Origin': '*.sit.estc.dev', - ...customHeaders, - }, - body: body, - }); - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - - const blob = await response.blob(); - return new File([blob], filename); - } catch (e) { - console.error(e); - return null; - } -}; - -export const apiPost = async ( - path: string, - body: string | File, - customHeaders?: Record, -): Promise => { - try { - const url = `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`; - const response = await fetch(url, { - method: 'POST', - headers: { - 'Access-Control-Allow-Origin': '*.sit.estc.dev', - 'Content-Type': 'application/json', - ...customHeaders, - }, - body: body, - }); - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - - return response.json(); - } catch (e) { - console.error(e); - return null; - } -}; - -export const apiGet = async (path: string, customHeaders?: Record): Promise => { - try { - const url = `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`; - const response = await fetch(url, { - method: 'GET', - headers: { - 'Content-Type': 'application/json', - ...customHeaders, - }, - }); - - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - - return response.json(); - } catch (e) { - console.error(e); - return null; - } -}; diff --git a/x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx b/x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx deleted file mode 100644 index 7d51a05473dca..0000000000000 --- a/x-pack/plugins/integration_assistant/public/api/services/categorizationService.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import { newRunnable } from '@api/services/runnableClient'; - -export async function getCategorization(req: CategorizationAPIRequest) { - const client = newRunnable('categorization'); - let response = { results: { pipeline: {}, docs: [] } } as CategorizationApiResponse; - try { - response = (await client.invoke({ - package_name: req.packageName, - data_stream_name: req.dataStreamName, - raw_samples: req.formSamples, - current_pipeline: req.ingestPipeline, - })) as CategorizationApiResponse; - } catch (e) { - console.error(e); - return response; - } - - return response; -} diff --git a/x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx b/x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx deleted file mode 100644 index 8017028358cda..0000000000000 --- a/x-pack/plugins/integration_assistant/public/api/services/ecsMappingService.tsx +++ /dev/null @@ -1,62 +0,0 @@ -import { newRunnable } from '@Api/services/runnableClient'; -import { mergeDeeply, traverseAndMatchFields } from '@Utils/samples'; - -export function formatEcsResponse( - response: EcsMappingApiResponse, - packageName: string, - dataStreamName: string, - rawSamples: string[], -): EcsMappingTableItem[] { - const rawObjects = rawSamples.map((str) => JSON.parse(str)); - const mergedObject = mergeDeeply(rawObjects); - const matches = traverseAndMatchFields( - response.results.mapping[packageName][dataStreamName], - mergedObject, - packageName, - dataStreamName, - ); - // Sorting the matches by isEcs then alphabetically on sourceField: - matches.sort((a, b) => { - // First, sort by `isEcs` status, true first - if (a.isEcs && !b.isEcs) return -1; - if (!a.isEcs && b.isEcs) return 1; - - // Then, if `isEcs` status is the same, sort alphabetically by `source_field` - return a.sourceField.localeCompare(b.sourceField); - }); - - return matches; -} - -export async function getEcsMapping(req: EcsMappingAPIRequest) { - let response = { results: { mapping: {}, current_pipeline: {} } } as EcsMappingApiResponse; - const client = newRunnable('ecs'); - try { - response = (await client.invoke({ - package_name: req.packageName, - data_stream_name: req.dataStreamName, - raw_samples: req.formSamples, - })) as EcsMappingApiResponse; - } catch (e) { - console.error(e); - return response; - } - return response; -} - -export async function getUpdatedPipeline(req: EcsMappingNewPipelineAPIRequest) { - const client = newRunnable('ecs'); - let response = { results: { mapping: {}, current_pipeline: {} } } as EcsMappingApiResponse; - try { - response = (await client.invoke({ - package_name: req.packageName, - data_stream_name: req.dataStreamName, - raw_samples: req.formSamples, - current_mapping: req.mapping, - })) as EcsMappingApiResponse; - } catch (e) { - console.error(e); - return response; - } - return response; -} diff --git a/x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx b/x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx deleted file mode 100644 index 79aaabf043a1c..0000000000000 --- a/x-pack/plugins/integration_assistant/public/api/services/integrationBuilderService.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import { apiPostWithFileResponse, apiPost } from '@Api/services/apiRequest'; - -export async function buildIntegration(req: BuildIntegrationAPIRequest) { - const requestBody = { - package_name: req.packageName, - title: req.packageTitle, - description: 'test description', - data_stream: [ - { - name: req.dataStreamName, - type: req.inputTypes, - title: 'Test data stream title', - description: 'Test data stream description', - format: 'json', - samples: req.formSamples, - pipeline: req.ingestPipeline, - docs: req.docs, - }, - ], - }; - - const response = apiPostWithFileResponse( - 'integration_builder/package', - JSON.stringify(requestBody), - `${req.packageName}-${req.packageVersion}.zip`, - ); - return response; -} - -export async function installIntegration(file: File) { - const path = 'api/fleet/epm/packages'; - - const response = apiPost(path, file); - return response; -} diff --git a/x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx b/x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx deleted file mode 100644 index cf59ee1823471..0000000000000 --- a/x-pack/plugins/integration_assistant/public/api/services/relatedService.tsx +++ /dev/null @@ -1,19 +0,0 @@ -import { newRunnable } from '@api/services/runnableClient'; - -export async function getRelated(req: RelatedAPIRequest) { - const client = newRunnable('related'); - let response = { results: { pipeline: {}, docs: [] } } as RelatedApiResponse; - try { - response = (await client.invoke({ - package_name: req.packageName, - data_stream_name: req.dataStreamName, - raw_samples: req.formSamples, - current_pipeline: req.ingestPipeline, - })) as RelatedApiResponse; - } catch (e) { - console.error(e); - return response; - } - - return response; -} diff --git a/x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx b/x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx deleted file mode 100644 index 16a943dc55bcf..0000000000000 --- a/x-pack/plugins/integration_assistant/public/api/services/runnableClient.tsx +++ /dev/null @@ -1,14 +0,0 @@ -import { RemoteRunnable } from '@langchain/core/runnables/remote'; - -export const newRunnable = (path: string) => { - const remoteChain = new RemoteRunnable({ - url: `${import.meta.env.VITE_BASE_URL}/api/v1/${path}`, - options: { - timeout: 2000000, - headers: { - 'Access-Control-Allow-Origin': '*.sit.estc.dev', - }, - }, - }); - return remoteChain; -}; diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts index d98791ff5c240..9f8e8afec8b06 100644 --- a/x-pack/plugins/integration_assistant/public/services.ts +++ b/x-pack/plugins/integration_assistant/public/services.ts @@ -1,65 +1,56 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ import type { CoreStart } from '@kbn/core/public'; import type { IHttpFetchError } from '@kbn/core-http-browser'; import { - RANDOM_NUMBER_ROUTE_PATH, - RANDOM_NUMBER_BETWEEN_ROUTE_PATH, - POST_MESSAGE_ROUTE_PATH, - INTERNAL_GET_MESSAGE_BY_ID_ROUTE, + ECS_GRAPH_PATH, + CATEGORZATION_GRAPH_PATH, + RELATED_GRAPH_PATH, + INTEGRATION_BUILDER_PATH, } from '../common'; export interface Services { - fetchRandomNumber: () => Promise; - fetchRandomNumberBetween: (max: number) => Promise; - postMessage: (message: string, id: string) => Promise; - getMessageById: (id: string) => Promise; - addSuccessToast: (message: string) => void; + runEcsGraph: () => Promise; + runCategorizationGraph: () => Promise; + runRelatedGraph: () => Promise; + runIntegrationBuilder: () => Promise; } export function getServices(core: CoreStart): Services { return { - addSuccessToast: (message: string) => core.notifications.toasts.addSuccess(message), - fetchRandomNumber: async () => { + runEcsGraph: async () => { try { - const response = await core.http.fetch<{ randomNumber: number }>(RANDOM_NUMBER_ROUTE_PATH); - return response.randomNumber; + const response = await core.http.fetch<{}>(ECS_GRAPH_PATH); + return response; } catch (e) { return e; } }, - fetchRandomNumberBetween: async (max: number) => { + runCategorizationGraph: async () => { try { - const response = await core.http.fetch<{ randomNumber: number }>( - RANDOM_NUMBER_BETWEEN_ROUTE_PATH, - { query: { max } } - ); - return response.randomNumber; + const response = await core.http.fetch<{}>(CATEGORZATION_GRAPH_PATH); + return response; } catch (e) { return e; } }, - postMessage: async (message: string, id: string) => { + runRelatedGraph: async () => { try { - await core.http.post(`${POST_MESSAGE_ROUTE_PATH}/${id}`, { - body: JSON.stringify({ message }), - }); + const response = await core.http.fetch<{}>(RELATED_GRAPH_PATH); + return response; } catch (e) { return e; } }, - getMessageById: async (id: string) => { + runIntegrationBuilder: async () => { try { - const response = await core.http.get<{ message: string }>( - `${INTERNAL_GET_MESSAGE_BY_ID_ROUTE}/${id}` - ); - return response.message; + const response = await core.http.fetch<{}>(INTEGRATION_BUILDER_PATH); + return response; } catch (e) { return e; } From d102b28c3f790579853b2298bada1225f243075d Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 24 May 2024 14:22:13 +0200 Subject: [PATCH 03/62] update types, remove more code that is not needed anymore --- .../integration_assistant/common/index.ts | 59 ++++++ .../public/get_message_example.tsx | 85 -------- .../integration_assistant/public/index.ts | 10 +- .../integration_assistant/public/plugin.tsx | 17 +- .../public/post_message_example.tsx | 92 --------- .../public/random_number_between_example.tsx | 87 --------- .../public/random_number_example.tsx | 67 ------- .../public/stores/integrationBuilderStore.tsx | 184 ------------------ .../public/stores/sideNavStore.tsx | 10 - .../public/stores/useGlobalStore.tsx | 35 ---- .../integration_assistant/public/types.ts | 14 +- .../public/types/ApiRequests.tsx | 37 ---- .../public/types/ApiResponses.tsx | 20 -- .../public/types/IntegrationBuilder.tsx | 83 -------- .../public/types/SideNav.tsx | 4 - 15 files changed, 94 insertions(+), 710 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/public/get_message_example.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/post_message_example.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/random_number_between_example.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/random_number_example.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/types/SideNav.tsx diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts index 40004e801c99a..6047a20b9951a 100644 --- a/x-pack/plugins/integration_assistant/common/index.ts +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -14,3 +14,62 @@ export const CATEGORZATION_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/cate export const RELATED_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/related`; export const INTEGRATION_BUILDER_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/build`; + +export interface BuildIntegrationAPIRequest { + packageName: string; + packageTitle: string; + packageVersion: string; + dataStreamName: string; + inputTypes: string[]; + formSamples: string[]; + ingestPipeline: object; + docs: object[]; +} + +export interface EcsMappingAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; +} + +export interface EcsMappingNewPipelineAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + mapping: object; +} + +export interface CategorizationAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} + +export interface RelatedAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} + +export interface CategorizationApiResponse { + results: { + pipeline: object; + docs: object[]; + }; +} + +export interface RelatedApiResponse { + results: { + pipeline: object; + docs: object[]; + }; +} + +export interface EcsMappingApiResponse { + results: { + mapping: object; + current_pipeline: object; + }; +} diff --git a/x-pack/plugins/integration_assistant/public/get_message_example.tsx b/x-pack/plugins/integration_assistant/public/get_message_example.tsx deleted file mode 100644 index 47d12a6fed491..0000000000000 --- a/x-pack/plugins/integration_assistant/public/get_message_example.tsx +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -import React, { useCallback } from 'react'; -import { useState } from 'react'; -import { - EuiText, - EuiButton, - EuiLoadingSpinner, - EuiFieldText, - EuiCallOut, - EuiFormRow, -} from '@elastic/eui'; -import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; -import { Services } from './services'; - -interface Props { - getMessageById: Services['getMessageById']; -} - -export function GetMessageRouteExample({ getMessageById }: Props) { - const [error, setError] = useState(); - const [isFetching, setIsFetching] = useState(false); - const [message, setMessage] = useState(''); - const [id, setId] = useState(''); - - const doFetch = useCallback(async () => { - if (isFetching) return; - setIsFetching(true); - const response = await getMessageById(id); - - if (isHttpFetchError(response)) { - setError(response); - setMessage(''); - } else { - setError(undefined); - setMessage(response); - } - - setIsFetching(false); - }, [isFetching, getMessageById, setMessage, id]); - - return ( - - -

GET example with param

- -

This examples uses a simple GET route that takes an id as a param in the route path.

- - setId(e.target.value)} - data-test-subj="routingExampleGetMessageId" - /> - - - - doFetch()} - > - {isFetching ? : 'Get message'} - - - - {error !== undefined ? ( - - {error.message} - - ) : null} - {message !== '' ? ( -

- Message is:

{message}
-

- ) : null} -
-
- ); -} diff --git a/x-pack/plugins/integration_assistant/public/index.ts b/x-pack/plugins/integration_assistant/public/index.ts index df8a9d532f4a7..128c4ff1b1546 100644 --- a/x-pack/plugins/integration_assistant/public/index.ts +++ b/x-pack/plugins/integration_assistant/public/index.ts @@ -5,7 +5,11 @@ * 2.0. */ -import { PluginInitializer } from '@kbn/core/public'; -import { RoutingExamplePlugin } from './plugin'; +import { IntegrationAssistantPlugin } from './plugin'; -export const plugin: PluginInitializer<{}, {}> = () => new RoutingExamplePlugin(); +// This exports static code and TypeScript types, +// as well as, Kibana Platform `plugin()` initializer. +export function plugin() { + return new IntegrationAssistantPlugin(); +} +export type { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; diff --git a/x-pack/plugins/integration_assistant/public/plugin.tsx b/x-pack/plugins/integration_assistant/public/plugin.tsx index dbfa414e48681..2a8e072399eac 100644 --- a/x-pack/plugins/integration_assistant/public/plugin.tsx +++ b/x-pack/plugins/integration_assistant/public/plugin.tsx @@ -24,12 +24,25 @@ export class IntegrationAssistantPlugin return renderApp(startServices, params.element); }, }); - return {}; + return { + runEcsGraph() { + return 'test'; + }, + runCategorizationGraph() { + return 'test'; + }, + runRelatedGraph() { + return 'test'; + }, + runIntegrationBuilder() { + return 'test'; + }, + }; } public start(core: CoreStart) { return {}; } - public stop() { } + public stop() {} } diff --git a/x-pack/plugins/integration_assistant/public/post_message_example.tsx b/x-pack/plugins/integration_assistant/public/post_message_example.tsx deleted file mode 100644 index c60a41ca6fe81..0000000000000 --- a/x-pack/plugins/integration_assistant/public/post_message_example.tsx +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -import React, { useCallback } from 'react'; -import { useState } from 'react'; -import { - EuiText, - EuiButton, - EuiLoadingSpinner, - EuiFieldText, - EuiCallOut, - EuiFormRow, - EuiTextArea, -} from '@elastic/eui'; -import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; -import { Services } from './services'; - -interface Props { - postMessage: Services['postMessage']; - addSuccessToast: Services['addSuccessToast']; -} - -export function PostMessageRouteExample({ postMessage, addSuccessToast }: Props) { - const [error, setError] = useState(); - const [isPosting, setIsPosting] = useState(false); - const [message, setMessage] = useState(''); - const [id, setId] = useState(''); - - const doFetch = useCallback(async () => { - if (isPosting) return; - setIsPosting(true); - const response = await postMessage(message, id); - - if (response && isHttpFetchError(response)) { - setError(response); - } else { - setError(undefined); - addSuccessToast('Message was added!'); - setMessage(''); - setId(''); - } - - setIsPosting(false); - }, [isPosting, postMessage, addSuccessToast, setMessage, message, id]); - - return ( - - -

POST example with body

-

- This examples uses a simple POST route that takes a body parameter and an id as a param in - the route path. -

- - setId(e.target.value)} - data-test-subj="routingExampleSetMessageId" - /> - - - setMessage(e.target.value)} - /> - - - - doFetch()} - > - {isPosting ? : 'Post message'} - - - - {error !== undefined ? ( - - {error.message} - - ) : null} -
-
- ); -} diff --git a/x-pack/plugins/integration_assistant/public/random_number_between_example.tsx b/x-pack/plugins/integration_assistant/public/random_number_between_example.tsx deleted file mode 100644 index 68bea90fd88ff..0000000000000 --- a/x-pack/plugins/integration_assistant/public/random_number_between_example.tsx +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -import React, { useCallback } from 'react'; -import { useState } from 'react'; -import { - EuiText, - EuiButton, - EuiLoadingSpinner, - EuiFieldText, - EuiCallOut, - EuiFormRow, -} from '@elastic/eui'; -import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; -import { Services } from './services'; - -interface Props { - fetchRandomNumberBetween: Services['fetchRandomNumberBetween']; -} - -export function RandomNumberBetweenRouteExample({ fetchRandomNumberBetween }: Props) { - const [error, setError] = useState(); - const [randomNumber, setRandomNumber] = useState(0); - const [isFetching, setIsFetching] = useState(false); - const [maxInput, setMaxInput] = useState('10'); - - const doFetch = useCallback(async () => { - if (isFetching) return; - setIsFetching(true); - const response = await fetchRandomNumberBetween(Number.parseInt(maxInput, 10)); - - if (isHttpFetchError(response)) { - setError(response); - } else { - setRandomNumber(response); - } - - setIsFetching(false); - }, [isFetching, maxInput, fetchRandomNumberBetween]); - - return ( - - -

GET example with query

-

- This examples uses a simple GET route that takes a query parameter in the request and - returns a single number. -

- - setMaxInput(e.target.value)} - isInvalid={isNaN(Number(maxInput))} - /> - - - - doFetch()} - > - {isFetching ? : 'Generate random number'} - - - - {error !== undefined ? ( - - {error.message} - - ) : null} - {randomNumber > -1 ? ( -

- Random number is -
{randomNumber}
-

- ) : null} -
-
- ); -} diff --git a/x-pack/plugins/integration_assistant/public/random_number_example.tsx b/x-pack/plugins/integration_assistant/public/random_number_example.tsx deleted file mode 100644 index 8ae118a722b99..0000000000000 --- a/x-pack/plugins/integration_assistant/public/random_number_example.tsx +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -import React, { useCallback } from 'react'; -import { useState } from 'react'; -import { EuiText, EuiButton, EuiLoadingSpinner, EuiCallOut } from '@elastic/eui'; -import { type IHttpFetchError, isHttpFetchError } from '@kbn/core-http-browser'; -import { Services } from './services'; - -interface Props { - fetchRandomNumber: Services['fetchRandomNumber']; -} - -export function RandomNumberRouteExample({ fetchRandomNumber }: Props) { - const [error, setError] = useState(undefined); - const [randomNumber, setRandomNumber] = useState(0); - const [isFetching, setIsFetching] = useState(false); - - const doFetch = useCallback(async () => { - if (isFetching) return; - setIsFetching(true); - const response = await fetchRandomNumber(); - - if (isHttpFetchError(response)) { - setError(response); - } else { - setRandomNumber(response); - } - - setIsFetching(false); - }, [isFetching, fetchRandomNumber]); - - return ( - - -

GET example

-

- This examples uses a simple GET route that takes no parameters or body in the request and - returns a single number. -

- doFetch()} - > - {isFetching ? : 'Generate a random number'} - - - {error !== undefined ? ( - - {error} - - ) : null} - {randomNumber > -1 ? ( -

- Random number is
{randomNumber}
-

- ) : null} -
-
- ); -} diff --git a/x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx b/x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx deleted file mode 100644 index 909ef95706dd0..0000000000000 --- a/x-pack/plugins/integration_assistant/public/stores/integrationBuilderStore.tsx +++ /dev/null @@ -1,184 +0,0 @@ -import { StateCreator } from 'zustand'; -import { mergeDeeply } from '@Utils/samples'; - -const initialIntegrationBuilderStepsState = { - integrationBuilderStep1: 'current', - integrationBuilderStep2: 'incomplete', - integrationBuilderStep3: 'incomplete', - integrationBuilderStep4: 'incomplete', - integrationBuilderStep5: 'incomplete', -}; - -export const integrationBuilderStepsState: StateCreator< - IntegrationBuilderStepsState, - [['zustand/devtools', never]], - [], - IntegrationBuilderStepsState -> = (set): IntegrationBuilderStepsState => ({ - ...initialIntegrationBuilderStepsState, - setIntegrationBuilderStepsState: (key, value) => set(() => ({ [key]: value })), - resetIntegrationBuilderStepsState: () => set(() => ({ ...initialIntegrationBuilderStepsState })), -}); - -const initialEcsMappingFormState = { - packageName: '', - packageTitle: '', - packageVersion: '0.1.0', - dataStreamName: '', - dataStreamTitle: '', - logFormat: 'json', - inputTypes: [], - formSamples: [], - sampleCount: 0, - uniqueKeysCount: 0, -}; - -export const ecsMappingFormState: StateCreator< - EcsMappingFormState, - [['zustand/devtools', never]], - [], - EcsMappingFormState -> = (set): EcsMappingFormState => ({ - ...initialEcsMappingFormState, - addFormSamples: (value) => - set((state) => { - // New formSamples after adding the new valid samples - const newFormSamples = [...state.formSamples, ...value]; - - // Calculate sampleCount as the length of newFormSamples - const newSampleCount = newFormSamples.length; - const rawObjects = newFormSamples.map((line) => JSON.parse(line)); - // Calculate uniqueKeysCount by merging all objects and counting the keys - const mergedObject = mergeDeeply(rawObjects); - const newUniqueKeysCount = Object.keys(mergedObject).length; - - return { - formSamples: newFormSamples, - sampleCount: newSampleCount, - uniqueKeysCount: newUniqueKeysCount, - }; - }), - setSampleCount: (value) => set(() => ({ sampleCount: value })), - setUniqueKeysCount: (value) => set(() => ({ uniqueKeysCount: value })), - setEcsMappingFormValue: (key, value) => set(() => ({ [key]: value })), - setEcsMappingFormArrayValue: (key, value) => set(() => ({ [key]: value })), - resetEcsMappingFormState: () => set(() => ({ ...initialEcsMappingFormState })), -}); - -const initialIntegrationBuilderChainItemsState = { - mapping: {}, - ingestPipeline: {}, - docs: [], - integrationBuilderZipFile: null, -}; - -export const integrationBuilderChainItemsState: StateCreator< - IntegrationBuilderChainItemsState, - [['zustand/devtools', never]], - [], - IntegrationBuilderChainItemsState -> = (set): IntegrationBuilderChainItemsState => ({ - ...initialIntegrationBuilderChainItemsState, - setIntegrationBuilderZipFile: (file) => set(() => ({ integrationBuilderZipFile: file })), - setIntegrationBuilderChainItemsState(key, value) { - set(() => ({ [key]: value })); - }, - updateChainItem: (path, newValue, itemType) => - set((state) => { - const keys = path.split('.'); - const lastKey = keys.pop(); - const lastObj = keys.reduce((acc, key) => (acc[key] = acc[key] || {}), state[itemType]); - if (lastKey) { - lastObj[lastKey] = newValue; - } - return { [itemType]: { ...state[itemType] } }; - }), - resetChainItemsState: () => set(() => ({ ...initialIntegrationBuilderChainItemsState })), -}); - -const initialEcsMappingTableState = { - ecsMappingTablePopoverState: {}, - ecsMappingTableState: [], - ecsMappingTableItemsWithEcs: 0, -}; - -export const ecsMappingTableState: StateCreator< - EcsMappingTableState, - [['zustand/devtools', never]], - [], - EcsMappingTableState -> = (set): EcsMappingTableState => ({ - ...initialEcsMappingTableState, - setEcsMappingTableItemsWithEcs: (value) => set(() => ({ ecsMappingTableItemsWithEcs: value })), - setEcsMappingTablePopoverState: (identifier) => - set((state) => ({ - ecsMappingTablePopoverState: { - ...state.ecsMappingTablePopoverState, - [identifier]: !state.ecsMappingTablePopoverState[identifier], - }, - })), - setEcsMappingTableState: (value) => set(() => ({ ecsMappingTableState: value })), - updateEcsMappingTableItem: (id, newDestinationField) => - set((state) => { - const updatedTableState = state.ecsMappingTableState.map((item) => { - if (item.id === id) { - return { ...item, destinationField: newDestinationField }; - } - return item; - }); - - return { ecsMappingTableState: updatedTableState }; - }), - resetEcsMappingTableState: () => set(() => ({ ...initialEcsMappingTableState })), -}); - -const initialIntegrationBuilderContinueState = { - ecsButtonContinue: false, - categorizationButtonContinue: false, - relatedButtonContinue: false, -}; - -export const integrationBuilderContinueState: StateCreator< - IntegrationBuilderContinueState, - [['zustand/devtools', never]], - [], - IntegrationBuilderContinueState -> = (set): IntegrationBuilderContinueState => ({ - ...initialIntegrationBuilderContinueState, - setContinueButtonState: (key, value) => set(() => ({ [key]: value })), - resetContinueButtonState: () => set(() => ({ ...initialIntegrationBuilderContinueState })), -}); - -const initialIntegrationBuilderIsLoadingState = { - relatedIsLoading: false, - categorizationIsLoading: false, - ecsMappingIsLoading: false, -}; - -export const integrationBuilderIsLoadingState: StateCreator< - IntegrationBuilderIsLoadingState, - [['zustand/devtools', never]], - [], - IntegrationBuilderIsLoadingState -> = (set): IntegrationBuilderIsLoadingState => ({ - ...initialIntegrationBuilderIsLoadingState, - setIsLoadingState: (key, value) => set(() => ({ [key]: value })), - resetIsLoadingState: () => set(() => ({ ...initialIntegrationBuilderIsLoadingState })), -}); - -const initialIntegrationBuilderHeaderState = { - isPortalLoading: false, - integrationBuilderHeaderTitle: "", -}; - -export const integrationBuilderHeaderState: StateCreator< - IntegrationBuilderHeaderState, - [['zustand/devtools', never]], - [], - IntegrationBuilderHeaderState -> = (set): IntegrationBuilderHeaderState => ({ - ...initialIntegrationBuilderHeaderState, - setIsPortalLoadingState: (value) => set(() => ({ isPortalLoading: value })), - setIntegrationBuilderHeaderTitle: (value) => set(() => ({ integrationBuilderHeaderTitle: value })), - resetIntegrationBuilderHeaderState: () => set(() => ({ ...initialIntegrationBuilderHeaderState })), -}); \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx b/x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx deleted file mode 100644 index f2b8d78f5c2ce..0000000000000 --- a/x-pack/plugins/integration_assistant/public/stores/sideNavStore.tsx +++ /dev/null @@ -1,10 +0,0 @@ -import { StateCreator } from 'zustand'; - -export const sideNavState: StateCreator = ( - set, -): SideNavState => ({ - selected: '', - setSelected: (value) => { - set(() => ({ selected: value })); - }, -}); diff --git a/x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx b/x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx deleted file mode 100644 index 086c42a94133f..0000000000000 --- a/x-pack/plugins/integration_assistant/public/stores/useGlobalStore.tsx +++ /dev/null @@ -1,35 +0,0 @@ -import { devtools } from 'zustand/middleware'; -import { create } from 'zustand'; -import { - integrationBuilderStepsState, - ecsMappingFormState, - integrationBuilderChainItemsState, - ecsMappingTableState, - integrationBuilderContinueState, - integrationBuilderIsLoadingState, - integrationBuilderHeaderState, -} from '@Stores/integrationBuilderStore'; - -import { sideNavState } from '@Stores/sideNavStore'; - -export const useGlobalStore = create< - IntegrationBuilderStepsState & - EcsMappingFormState & - IntegrationBuilderChainItemsState & - EcsMappingTableState & - IntegrationBuilderContinueState & - IntegrationBuilderIsLoadingState & - IntegrationBuilderHeaderState & - SideNavState ->()( - devtools((...a) => ({ - ...integrationBuilderStepsState(...a), - ...ecsMappingFormState(...a), - ...integrationBuilderChainItemsState(...a), - ...ecsMappingTableState(...a), - ...integrationBuilderContinueState(...a), - ...integrationBuilderIsLoadingState(...a), - ...integrationBuilderHeaderState(...a), - ...sideNavState(...a), - })), -); diff --git a/x-pack/plugins/integration_assistant/public/types.ts b/x-pack/plugins/integration_assistant/public/types.ts index 8c984dee240bb..886b5852beef2 100644 --- a/x-pack/plugins/integration_assistant/public/types.ts +++ b/x-pack/plugins/integration_assistant/public/types.ts @@ -7,7 +7,10 @@ import { NavigationPublicPluginStart } from '@kbn/navigation-plugin/public'; export interface IntegrationAssistantPluginSetup { - getGreeting: () => string; + runEcsGraph: () => string; + runRelatedGraph: () => string; + runCategorizationGraph: () => string; + runIntegrationBuilder: () => string; } // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IntegrationAssistantPluginStart {} @@ -15,3 +18,12 @@ export interface IntegrationAssistantPluginStart {} export interface AppPluginStartDependencies { navigation: NavigationPublicPluginStart; } + +export interface EcsMappingTableItem { + sourceField: string; + destinationField: string; + isEcs: boolean; + description: string; + id: string; + exampleValue: any; +} diff --git a/x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx b/x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx deleted file mode 100644 index 71052ae5f92ab..0000000000000 --- a/x-pack/plugins/integration_assistant/public/types/ApiRequests.tsx +++ /dev/null @@ -1,37 +0,0 @@ -interface BuildIntegrationAPIRequest { - packageName: string; - packageTitle: string; - packageVersion: string; - dataStreamName: string; - inputTypes: string[]; - formSamples: string[]; - ingestPipeline: object; - docs: Array; -} - -interface EcsMappingAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; -} - -interface EcsMappingNewPipelineAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - mapping: object; -} - -interface CategorizationAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; -} - -interface RelatedAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; -} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx b/x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx deleted file mode 100644 index f3b0c02395a3a..0000000000000 --- a/x-pack/plugins/integration_assistant/public/types/ApiResponses.tsx +++ /dev/null @@ -1,20 +0,0 @@ -type CategorizationApiResponse = { - results: { - pipeline: object; - docs: Array; - }; -}; - -type RelatedApiResponse = { - results: { - pipeline: object; - docs: Array; - }; -}; - -type EcsMappingApiResponse = { - results: { - mapping: object; - current_pipeline: object; - }; -}; \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx b/x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx deleted file mode 100644 index 34e70fbdbbeee..0000000000000 --- a/x-pack/plugins/integration_assistant/public/types/IntegrationBuilder.tsx +++ /dev/null @@ -1,83 +0,0 @@ -type EcsMappingTableItem = { - sourceField: string; - destinationField: string; - isEcs: boolean; - description: string; - id: string; - exampleValue: any; -}; - -interface EcsMappingFormState { - packageName: string; - packageTitle: string; - packageVersion: string; - dataStreamName: string; - dataStreamTitle: string; - logFormat: string; - inputTypes: string[]; - formSamples: string[]; - sampleCount: number; - uniqueKeysCount: number; - addFormSamples: (samples: string[]) => void; - setSampleCount: (value: number) => void; - setUniqueKeysCount: (value: number) => void; - setEcsMappingFormValue: (key: string, value: string) => void; - setEcsMappingFormArrayValue: (key: string, value: string[]) => void; - resetEcsMappingFormState: () => void; -} - -interface EcsMappingTableState { - ecsMappingTableState: EcsMappingTableItem[]; - ecsMappingTablePopoverState: {}; - ecsMappingTableItemsWithEcs: number; - setEcsMappingTableItemsWithEcs: (value: number) => void; - setEcsMappingTablePopoverState: (identifier: string) => void; - setEcsMappingTableState: (value: EcsMappingTableItem[]) => void; - updateEcsMappingTableItem: (id: string, newDestinationField: string) => void; - resetEcsMappingTableState: () => void; -} - -interface IntegrationBuilderStepsState { - integrationBuilderStep1: string; - integrationBuilderStep2: string; - integrationBuilderStep3: string; - integrationBuilderStep4: string; - integrationBuilderStep5: string; - setIntegrationBuilderStepsState: (key: string, value: string) => void; - resetIntegrationBuilderStepsState: () => void; -} - -interface IntegrationBuilderContinueState { - ecsButtonContinue: boolean, - relatedButtonContinue: boolean, - categorizationButtonContinue: boolean, - setContinueButtonState: (key: string, value: boolean) => void; - resetContinueButtonState: () => void; -} - -interface IntegrationBuilderIsLoadingState { - relatedIsLoading: boolean, - ecsMappingIsLoading: boolean, - categorizationIsLoading: boolean, - setIsLoadingState: (key: string, value: boolean) => void; - resetIsLoadingState: () => void; -} - -interface IntegrationBuilderChainItemsState { - ingestPipeline: object; - docs: Array; - mapping: object; - integrationBuilderZipFile: File | null; - setIntegrationBuilderZipFile: (file: File) => void; - setIntegrationBuilderChainItemsState: (key: string, value: object) => void; - updateChainItem: (path: string, newValue: object, itemType: string) => void; - resetChainItemsState: () => void; -} - -interface IntegrationBuilderHeaderState { - integrationBuilderHeaderTitle: string; - isPortalLoading: boolean; - setIsPortalLoadingState: (value: boolean) => void; - setIntegrationBuilderHeaderTitle: (value: string) => void; - resetIntegrationBuilderHeaderState: () => void; -} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/public/types/SideNav.tsx b/x-pack/plugins/integration_assistant/public/types/SideNav.tsx deleted file mode 100644 index cbca821ad1be6..0000000000000 --- a/x-pack/plugins/integration_assistant/public/types/SideNav.tsx +++ /dev/null @@ -1,4 +0,0 @@ -interface SideNavState { - selected: string; - setSelected: (value: string) => void; -} From b89cf96bc7e7ff048d3843f96bf7df7e0150d6c2 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 24 May 2024 14:55:22 +0200 Subject: [PATCH 04/62] cleaning up a lot of old code, reformat for kibana etc --- .../BuildIntegrationButtons.tsx | 32 +- .../components/Buttons/ActionButton.tsx | 25 +- .../Buttons/ContinueButton.test.tsx | 21 +- .../components/Buttons/ContinueButton.tsx | 23 +- .../components/Buttons/GoBackButton.tsx | 14 +- .../public/components/Buttons/ResetButton.tsx | 16 +- .../Categorization/CategorizationButtons.tsx | 32 +- .../public/components/Ecs/EcsButtons.tsx | 42 +- .../public/components/Ecs/EcsFileUpload.tsx | 12 +- .../public/components/Ecs/EcsForm.tsx | 33 +- .../public/components/Ecs/EcsFormStats.tsx | 22 +- .../public/components/Ecs/EcsTable.tsx | 33 +- .../components/EmptyPrompt/EmptyPrompt.tsx | 21 +- .../Headers/IntegrationBuilderHeader.tsx | 30 +- .../IntegrationBuilderSteps.tsx | 14 +- .../IntegrationResults/DocsResults.tsx | 14 +- .../IntegrationResults/PipelineResults.tsx | 12 +- .../public/components/Links/CustomLink.tsx | 31 - .../components/Portal/ProgressPortal.tsx | 12 +- .../components/Related/RelatedButtons.tsx | 28 +- .../public/components/SideNav/SideNav.tsx | 79 --- .../ViewResults/ViewResultsButtons.tsx | 25 +- .../public/constants/ecsFields.tsx | 541 ++++++++++++------ .../public/constants/headerTitles.tsx | 25 +- .../public/constants/routePaths.tsx | 11 +- .../integration_assistant/public/index.ts | 2 - .../BuildIntegrationPage.tsx | 19 +- .../IntegrationBuilder/CategorizationPage.tsx | 32 +- .../IntegrationBuilder/EcsMapperPage.tsx | 45 +- .../pages/IntegrationBuilder/RelatedPage.tsx | 30 +- .../IntegrationBuilder/ViewResultsPage.tsx | 36 +- .../public/pages/Main/MainPage.tsx | 12 +- .../integration_assistant/public/plugin.tsx | 16 +- .../public/utils/samples.tsx | 18 +- 34 files changed, 850 insertions(+), 508 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx diff --git a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx index 27db590329bbb..485892f4da71e 100644 --- a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx @@ -1,12 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiFlexGroup } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { buildIntegration, installIntegration } from '@api/services/integrationBuilderService'; -import RoutePaths from '@Constants/routePaths'; -import ActionButton from '@Components/Buttons/ActionButton'; -import GoBackButton from '@Components/Buttons/GoBackButton'; +import RoutePaths from '../../constants/routePaths'; +import ActionButton from '../Buttons/ActionButton'; +import GoBackButton from '../Buttons/GoBackButton'; -const BuildIntegrationButtons = () => { +export const BuildIntegrationButtons = () => { const integrationBuilderZipFile = useGlobalStore((state) => state.integrationBuilderZipFile); const packageName = useGlobalStore((state) => state.packageName); const packageTitle = useGlobalStore((state) => state.packageTitle); @@ -17,8 +25,12 @@ const BuildIntegrationButtons = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); const docs = useGlobalStore((state) => state.docs); - const setIntegrationBuilderZipFile = useGlobalStore((state) => state.setIntegrationBuilderZipFile); - const setIntegrationBuilderStepsState = useGlobalStore((state) => state.setIntegrationBuilderStepsState); + const setIntegrationBuilderZipFile = useGlobalStore( + (state) => state.setIntegrationBuilderZipFile + ); + const setIntegrationBuilderStepsState = useGlobalStore( + (state) => state.setIntegrationBuilderStepsState + ); const onBuildClick = async () => { const req = { @@ -63,10 +75,12 @@ const BuildIntegrationButtons = () => { - + ); }; - -export default BuildIntegrationButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx index 83f6f4be3735a..3ac6a4c69c15c 100644 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx @@ -1,3 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiButton } from '@elastic/eui'; import { MouseEventHandler } from 'react'; @@ -8,12 +16,21 @@ interface ActionButtonProps { isDisabled?: boolean; } -const ActionButton = ({ text, onActionClick, isLoading = false, isDisabled = false }: ActionButtonProps) => { +export const ActionButton = ({ + text, + onActionClick, + isLoading = false, + isDisabled = false, +}: ActionButtonProps) => { return ( - + {text} ); }; - -export default ActionButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx index 3bdfeeefb520b..79ea1bcad2d20 100644 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx @@ -1,9 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { beforeEach, describe, expect, it } from 'vitest'; import { act, render, screen } from '@testing-library/react'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { userEvent } from '@testing-library/user-event'; -import ContinueButton from '@Components/Buttons/ContinueButton'; -import RoutePaths from '@Constants/routePaths'; +import ContinueButton from './ContinueButton'; +import RoutePaths from '../../constants/routePaths'; import { BrowserRouter as Router } from 'react-router-dom'; describe('ContinueButton Tests', () => { @@ -17,7 +25,7 @@ describe('ContinueButton Tests', () => { render( - , + ); const user = userEvent.setup(); await act(async () => { @@ -31,8 +39,11 @@ describe('ContinueButton Tests', () => { const ecsButtonContinue = useGlobalStore.getState().ecsButtonContinue; render( - - , + + ); expect(screen.getByLabelText('continue-button')).toBeDefined(); expect(screen.getByLabelText('continue-button')).toBeDisabled(); diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx index 8370c76f95bbf..10bd9217810b1 100644 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx @@ -1,3 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiButton } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { useNavigate } from 'react-router-dom'; @@ -6,12 +14,19 @@ interface ContinueButtonProps { continuePath: string; isDisabled: boolean; currentStep: string; - completeStep: string + completeStep: string; } -const ContinueButton = ({ continuePath, isDisabled, currentStep, completeStep }: ContinueButtonProps) => { +export const ContinueButton = ({ + continuePath, + isDisabled, + currentStep, + completeStep, +}: ContinueButtonProps) => { const setSelected = useGlobalStore((state) => state.setSelected); - const setIntegrationBuilderStepsState = useGlobalStore((state) => state.setIntegrationBuilderStepsState); + const setIntegrationBuilderStepsState = useGlobalStore( + (state) => state.setIntegrationBuilderStepsState + ); const navigate = useNavigate(); const selectAndNavigate = (path) => { @@ -37,5 +52,3 @@ const ContinueButton = ({ continuePath, isDisabled, currentStep, completeStep }: ); }; - -export default ContinueButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx index a5c3209dec9dc..23760f8ac646a 100644 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx @@ -1,13 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiButton } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import RoutePaths from '@Constants/routePaths'; +import RoutePaths from '../../constants/routePaths'; import { useNavigate } from 'react-router-dom'; interface GoBackButtonProps { path: RoutePaths; } -const GoBackButton = ({ path }: GoBackButtonProps) => { +export const GoBackButton = ({ path }: GoBackButtonProps) => { const setSelected = useGlobalStore((state) => state.setSelected); const navigate = useNavigate(); @@ -22,5 +30,3 @@ const GoBackButton = ({ path }: GoBackButtonProps) => { ); }; - -export default GoBackButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx index 9a8f77445a70f..74eaedf805d35 100644 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx @@ -1,11 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiButton } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -const ResetButton = () => { +export const ResetButton = () => { const resetEcsMappingFormState = useGlobalStore((state) => state.resetEcsMappingFormState); const resetChainItemsState = useGlobalStore((state) => state.resetChainItemsState); const resetEcsMappingTableState = useGlobalStore((state) => state.resetEcsMappingTableState); - const resetIntegrationBuilderStepsState = useGlobalStore((state) => state.resetIntegrationBuilderStepsState); + const resetIntegrationBuilderStepsState = useGlobalStore( + (state) => state.resetIntegrationBuilderStepsState + ); const resetContinueButtonState = useGlobalStore((state) => state.resetContinueButtonState); const resetIsLoadingState = useGlobalStore((state) => state.resetIsLoadingState); @@ -24,5 +34,3 @@ const ResetButton = () => { ); }; - -export default ResetButton; diff --git a/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx index 32ad72c34d9c5..ff41e488651f5 100644 --- a/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx @@ -1,21 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { EuiFlexGroup } from '@elastic/eui'; import { getCategorization } from '@Api/services/categorizationService'; -import RoutePaths from '@Constants/routePaths'; +import RoutePaths from '../../constants/routePaths'; -import ContinueButton from '@Components/Buttons/ContinueButton'; -import ActionButton from '@Components/Buttons/ActionButton'; -import GoBackButton from '@Components/Buttons/GoBackButton'; +import { ContinueButton } from '../Buttons/ContinueButton'; +import { ActionButton } from '../Buttons/ActionButton'; +import { GoBackButton } from '../Buttons/GoBackButton'; -const CategorizationButtons = () => { +export const CategorizationButtons = () => { const packageName = useGlobalStore((state) => state.packageName); const dataStreamName = useGlobalStore((state) => state.dataStreamName); const formSamples = useGlobalStore((state) => state.formSamples); const categorizationIsLoading = useGlobalStore((state) => state.categorizationIsLoading); - const categorizationButtonContinue = useGlobalStore((state) => state.categorizationButtonContinue); + const categorizationButtonContinue = useGlobalStore( + (state) => state.categorizationButtonContinue + ); const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); - const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setIntegrationBuilderChainItemsState = useGlobalStore( + (state) => state.setIntegrationBuilderChainItemsState + ); const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); @@ -49,12 +61,10 @@ const CategorizationButtons = () => { ); }; - -export default CategorizationButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx index 3b464b3e1aae9..0dd42e9cb75a2 100644 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx @@ -1,20 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiFlexGroup } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { getEcsMapping, formatEcsResponse } from '@Api/services/ecsMappingService'; -import RoutePaths from '@Constants/routePaths'; -import ContinueButton from '@Components/Buttons/ContinueButton'; -import ActionButton from '@Components/Buttons/ActionButton'; -import ResetButton from '@components/Buttons/ResetButton'; +import RoutePaths from '../../constants/routePaths'; +import { ContinueButton } from '../Buttons/ContinueButton'; +import { ActionButton } from '../Buttons/ActionButton'; +import { ResetButton } from '../Buttons/ResetButton'; -const EcsButtons = () => { +export const EcsButtons = () => { const packageName = useGlobalStore((state) => state.packageName); const dataStreamName = useGlobalStore((state) => state.dataStreamName); const formSamples = useGlobalStore((state) => state.formSamples); const ecsMappingIsLoading = useGlobalStore((state) => state.ecsMappingIsLoading); const ecsButtonContinue = useGlobalStore((state) => state.ecsButtonContinue); const setEcsMappingTableState = useGlobalStore((state) => state.setEcsMappingTableState); - const setEcsMappingTableItemsWithEcs = useGlobalStore((state) => state.setEcsMappingTableItemsWithEcs); - const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setEcsMappingTableItemsWithEcs = useGlobalStore( + (state) => state.setEcsMappingTableItemsWithEcs + ); + const setIntegrationBuilderChainItemsState = useGlobalStore( + (state) => state.setIntegrationBuilderChainItemsState + ); const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); @@ -28,7 +40,12 @@ const EcsButtons = () => { setIntegrationBuilderChainItemsState('mapping', response.results.mapping); setIntegrationBuilderChainItemsState('ingestPipeline', response.results.current_pipeline); - const formatedEcsTableData = formatEcsResponse(response, packageName, dataStreamName, formSamples); + const formatedEcsTableData = formatEcsResponse( + response, + packageName, + dataStreamName, + formSamples + ); setEcsMappingTableState(formatedEcsTableData); const count = formatedEcsTableData.filter((item) => item.isEcs === true).length; @@ -47,10 +64,13 @@ const EcsButtons = () => { isDisabled={ecsButtonContinue} onActionClick={onCreateEcsMappingClick} /> - + ); }; - -export default EcsButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx index af0d942ac139b..8f71d37166f88 100644 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx @@ -1,7 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiFilePicker, useGeneratedHtmlId } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -const EcsFileUpload = () => { +export const EcsFileUpload = () => { const filePickerId = useGeneratedHtmlId({ prefix: 'filePicker' }); const addFormSamples = useGlobalStore((state) => state.addFormSamples); @@ -63,5 +71,3 @@ const EcsFileUpload = () => { /> ); }; - -export default EcsFileUpload; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx index fa97e8a07129b..7f0bb26bbe9dd 100644 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx @@ -1,3 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiForm, EuiFieldText, @@ -9,11 +17,11 @@ import { EuiSelect, useGeneratedHtmlId, } from '@elastic/eui'; -import EcsButtons from '@components/Ecs/EcsButtons'; +import { EcsButtons } from './EcsButtons'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import EcsFileUpload from '@Components/Ecs/EcsFileUpload'; +import { EcsFileUpload } from './EcsFileUpload'; -const EcsForm = () => { +export const EcsForm = () => { const packageName = useGlobalStore((state) => state.packageName); const packageTitle = useGlobalStore((state) => state.packageTitle); const packageVersion = useGlobalStore((state) => state.packageVersion); @@ -30,7 +38,7 @@ const EcsForm = () => { const onInputTypeChange = (selected) => { setEcsMappingFormArrayValue( 'inputTypes', - selected.map((item) => item.label), + selected.map((item) => item.label) ); }; @@ -57,7 +65,10 @@ const EcsForm = () => { onChange={(e) => handleFormStateChange('packageTitle', e.target.value)} /> - + { onChange={(e) => handleFormStateChange('packageVersion', e.target.value)} /> - + { onChange={(e) => handleFormStateChange('dataStreamName', e.target.value)} /> - + { ); }; - -export default EcsForm; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx index 34e383584a7e2..08d114cc991b4 100644 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx @@ -1,7 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiStat, EuiFlexGroup, EuiFlexItem, EuiPanel } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -const EcsFormStats = () => { +export const EcsFormStats = () => { const sampleCount = useGlobalStore((state) => state.sampleCount); const uniqueKeysCount = useGlobalStore((state) => state.uniqueKeysCount); const ecsMappingTableItemsWithEcs = useGlobalStore((state) => state.ecsMappingTableItemsWithEcs); @@ -16,11 +24,17 @@ const EcsFormStats = () => { - + @@ -29,5 +43,3 @@ const EcsFormStats = () => { ); }; - -export default EcsFormStats; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx index db954ea90423a..c3430ebac810d 100644 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx @@ -1,3 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiBasicTable, EuiFlexItem, @@ -12,8 +20,9 @@ import { import { useState } from 'react'; import { getUpdatedPipeline } from '@Api/services/ecsMappingService'; import { useGlobalStore } from '@Stores/useGlobalStore'; +import { EcsMappingTableItem } from '../../types'; -const EcsTable = () => { +export const EcsTable = () => { const packageName = useGlobalStore((state) => state.packageName); const dataStreamName = useGlobalStore((state) => state.dataStreamName); const formSamples = useGlobalStore((state) => state.formSamples); @@ -21,8 +30,12 @@ const EcsTable = () => { const ecsMappingTablePopoverState = useGlobalStore((state) => state.ecsMappingTablePopoverState); const ecsMappingTableState = useGlobalStore((state) => state.ecsMappingTableState); const mapping = useGlobalStore((state) => state.mapping); - const setEcsMappingTablePopoverState = useGlobalStore((state) => state.setEcsMappingTablePopoverState); - const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setEcsMappingTablePopoverState = useGlobalStore( + (state) => state.setEcsMappingTablePopoverState + ); + const setIntegrationBuilderChainItemsState = useGlobalStore( + (state) => state.setIntegrationBuilderChainItemsState + ); const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); const updateEcsMappingTableItem = useGlobalStore((state) => state.updateEcsMappingTableItem); @@ -37,7 +50,11 @@ const EcsTable = () => { setIsLoadingState('ecsMappingIsLoading', true); setContinueButtonState('ecsButtonContinue', false); updateEcsMappingTableItem(id, newDestinationField); - updateChainItem(`${packageName}.${dataStreamName}.${sourceField}`, newDestinationField, 'mapping'); + updateChainItem( + `${packageName}.${dataStreamName}.${sourceField}`, + newDestinationField, + 'mapping' + ); const req = { packageName, dataStreamName, formSamples, mapping }; const response = await getUpdatedPipeline(req); @@ -79,7 +96,11 @@ const EcsTable = () => { }; }; - const { pageOfItems, totalItemCount } = getEcsTablePage(ecsMappingTableState, pageIndex, pageSize); + const { pageOfItems, totalItemCount } = getEcsTablePage( + ecsMappingTableState, + pageIndex, + pageSize + ); const pagination = { pageIndex, @@ -176,5 +197,3 @@ const EcsTable = () => { ); }; - -export default EcsTable; diff --git a/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx b/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx index e17be1430d695..9f0a767890937 100644 --- a/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx +++ b/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx @@ -1,6 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPageTemplate } from '@elastic/eui'; -import GoBackButton from '@Components/Buttons/GoBackButton'; -import RoutePaths from '@Constants/routePaths'; +import { GoBackButton } from '../Buttons/GoBackButton'; +import RoutePaths from '../../constants/routePaths'; interface EmptyPromptProps { title: string; @@ -8,12 +16,13 @@ interface EmptyPromptProps { goBackPath: RoutePaths; } -const EmptyPrompt = ({ title, description, goBackPath }: EmptyPromptProps) => { +export const EmptyPrompt = ({ title, description, goBackPath }: EmptyPromptProps) => { return ( - {title}} actions={}> + {title}} + actions={} + > {description} ); }; - -export default EmptyPrompt; diff --git a/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx b/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx index bd86b792c3a85..d2660e2d4010c 100644 --- a/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx @@ -1,21 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPageTemplate } from '@elastic/eui'; import { useLocation } from 'react-router-dom'; -import IntegrationBuilderSteps from '@Components/IntegrationBuilderSteps/IntegrationBuilderSteps'; -import ProgressPortal from '@Components/Portal/ProgressPortal'; +import IntegrationBuilderSteps from '../IntegrationBuilderSteps/IntegrationBuilderSteps'; +import ProgressPortal from '../Portal/ProgressPortal'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import HeaderTitles from '@Constants/headerTitles'; +import HeaderTitles from '../../constants/headerTitles'; -const IntegrationBuilderHeader = () => { - const location = useLocation(); - const isPortalLoading = useGlobalStore((state) => state.isPortalLoading); - const pageTitle = HeaderTitles[location.pathname as keyof typeof HeaderTitles] || 'Unknown Page'; +export const IntegrationBuilderHeader = () => { + const location = useLocation(); + const isPortalLoading = useGlobalStore((state) => state.isPortalLoading); + const pageTitle = HeaderTitles[location.pathname as keyof typeof HeaderTitles] || 'Unknown Page'; return ( <> - - {pageTitle && pageTitle != 'Base Page' && } + + {pageTitle && pageTitle !== 'Base Page' && } {isPortalLoading && } - + ); }; - -export default IntegrationBuilderHeader; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx index 470c23e1717a3..6ea2b0fd2bfd8 100644 --- a/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx +++ b/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx @@ -1,10 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiStepsHorizontal, EuiStepsHorizontalProps } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { useNavigate } from 'react-router-dom'; -import RoutePaths from '@Constants/routePaths'; +import RoutePaths from '../../constants/routePaths'; -const IntegrationBuilderSteps = () => { +export const IntegrationBuilderSteps = () => { const step1 = useGlobalStore((state) => state.integrationBuilderStep1); const step2 = useGlobalStore((state) => state.integrationBuilderStep2); const step3 = useGlobalStore((state) => state.integrationBuilderStep3); @@ -60,5 +68,3 @@ const IntegrationBuilderSteps = () => { return ; }; - -export default IntegrationBuilderSteps; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx index f896b96cd0af4..f34083213efe5 100644 --- a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx +++ b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx @@ -1,11 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiCodeBlock, EuiAccordion, EuiPanel, useGeneratedHtmlId } from '@elastic/eui'; import { css } from '@emotion/react'; interface DocsResultsProps { - docs: Array; + docs: object[]; } -const DocResults = ({ docs }: DocsResultsProps) => { +export const DocResults = ({ docs }: DocsResultsProps) => { const simpleAccordionId = useGeneratedHtmlId({ prefix: 'docs_results' }); return ( @@ -28,5 +36,3 @@ const DocResults = ({ docs }: DocsResultsProps) => { ); }; - -export default DocResults; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx index 395926f4e3e39..073bb102d9491 100644 --- a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx +++ b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx @@ -1,3 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiCodeBlock, EuiAccordion, EuiPanel, useGeneratedHtmlId } from '@elastic/eui'; import { css } from '@emotion/react'; @@ -5,7 +13,7 @@ interface PipelineResultsProps { pipeline: object; } -const PipelineResults = ({ pipeline }: PipelineResultsProps) => { +export const PipelineResults = ({ pipeline }: PipelineResultsProps) => { const simpleAccordionId = useGeneratedHtmlId({ prefix: 'ingest_pipeline_results' }); return ( @@ -28,5 +36,3 @@ const PipelineResults = ({ pipeline }: PipelineResultsProps) => { ); }; - -export default PipelineResults; diff --git a/x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx b/x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx deleted file mode 100644 index a78ea50f8e5c4..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Links/CustomLink.tsx +++ /dev/null @@ -1,31 +0,0 @@ -import { EuiLink } from '@elastic/eui'; -import { useNavigate } from 'react-router'; - -const isModifiedEvent = (event) => !!(event.metaKey || event.altKey || event.ctrlKey || event.shiftKey); - -const isLeftClickEvent = (event) => event.button === 0; - -export default function CustomLink({ to, ...props }) { - const navigate = useNavigate(); - - function onClick(event) { - if (event.defaultPrevented) { - return; - } - - // If target prop is set (e.g. to "_blank"), let browser handle link. - if (event.target.getAttribute('target')) { - return; - } - - if (isModifiedEvent(event) || !isLeftClickEvent(event)) { - return; - } - - // Prevent regular link behavior, which causes a browser refresh. - event.preventDefault(); - navigate(to); - } - - return ; -} diff --git a/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx b/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx index df81bd15a0e85..0b4e7d46ce140 100644 --- a/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx @@ -1,11 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPortal, EuiProgress } from '@elastic/eui'; -const ProgressPortal = () => { +export const ProgressPortal = () => { return ( ); }; - -export default ProgressPortal; diff --git a/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx index 26971736e969f..d925f890c2e38 100644 --- a/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx @@ -1,13 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { EuiFlexGroup } from '@elastic/eui'; import { getCategorization } from '@Api/services/categorizationService'; -import RoutePaths from '@Constants/routePaths'; +import RoutePaths from '../../constants/routePaths'; -import ContinueButton from '@Components/Buttons/ContinueButton'; -import ActionButton from '@Components/Buttons/ActionButton'; -import GoBackButton from '@Components/Buttons/GoBackButton'; +import { ContinueButton } from '../Buttons/ContinueButton'; +import { ActionButton } from '../Buttons/ActionButton'; +import { GoBackButton } from '../Buttons/GoBackButton'; -const RelatedButtons = () => { +export const RelatedButtons = () => { const packageName = useGlobalStore((state) => state.packageName); const dataStreamName = useGlobalStore((state) => state.dataStreamName); const formSamples = useGlobalStore((state) => state.formSamples); @@ -15,7 +23,9 @@ const RelatedButtons = () => { const relatedButtonContinue = useGlobalStore((state) => state.relatedButtonContinue); const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); - const setIntegrationBuilderChainItemsState = useGlobalStore((state) => state.setIntegrationBuilderChainItemsState); + const setIntegrationBuilderChainItemsState = useGlobalStore( + (state) => state.setIntegrationBuilderChainItemsState + ); const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); @@ -49,12 +59,10 @@ const RelatedButtons = () => { ); }; - -export default RelatedButtons; diff --git a/x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx b/x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx deleted file mode 100644 index 6e33e7dbe7b21..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/SideNav/SideNav.tsx +++ /dev/null @@ -1,79 +0,0 @@ -import { useNavigate, useLocation } from 'react-router-dom'; -import { useEffect } from 'react'; -import { EuiSideNav, EuiIcon } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import RoutePaths from '@Constants/routePaths'; - -const SideNav = () => { - const navigate = useNavigate(); - const location = useLocation(); - const selected = useGlobalStore((state) => state.selected); - const setSelected = useGlobalStore((state) => state.setSelected); - const selectItem = (name) => { - setSelected(name); - navigate(name); - }; - useEffect(() => { - if (!selected) { - setSelected(location.pathname); - } - }, []); - - return ( - , - id: '0', - href: '/', - items: [ - { - name: 'Integration Builder', - icon: , - id: '0.1', - items: [ - { - name: 'ECS Mapping', - id: '0.1.0', - isSelected: selected === RoutePaths.ECS_MAPPING_PATH, - onClick: () => selectItem(RoutePaths.ECS_MAPPING_PATH), - }, - { - name: 'Add Categorization', - id: '0.1.1', - isSelected: selected === RoutePaths.CATEGORIZATION_PATH, - onClick: () => selectItem(RoutePaths.CATEGORIZATION_PATH), - href: '#', - }, - { - name: 'Add Related Fields', - id: '0.1.2', - isSelected: selected === RoutePaths.RELATED_PATH, - onClick: () => selectItem(RoutePaths.RELATED_PATH), - href: '#', - }, - { - name: 'View Results', - id: '0.1.3', - isSelected: selected === RoutePaths.INTEGRATION_BUILDER_RESULTS_PATH, - onClick: () => selectItem(RoutePaths.INTEGRATION_BUILDER_RESULTS_PATH), - href: '#', - }, - { - name: 'Build & Deploy', - id: '0.1.4', - isSelected: selected === RoutePaths.INTEGRATION_BUILDER_BUILD_PATH, - onClick: () => selectItem(RoutePaths.INTEGRATION_BUILDER_BUILD_PATH), - href: '#', - }, - ], - }, - ], - }, - ]} - /> - ); -}; - -export default SideNav; diff --git a/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx index c1ea0d1becb29..5f57af2be69c2 100644 --- a/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx @@ -1,15 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiFlexGroup } from '@elastic/eui'; -import ContinueButton from '@Components/Buttons/ContinueButton'; -import GoBackButton from '@Components/Buttons/GoBackButton'; -import RoutePaths from '@Constants/routePaths'; +import { ContinueButton } from '../Buttons/ContinueButton'; +import { GoBackButton } from '../Buttons/GoBackButton'; +import RoutePaths from '../../constants/routePaths'; -const ViewResults = () => { +export const ViewResults = () => { return ( - + ); }; - -export default ViewResults; diff --git a/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx b/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx index 73cacd4b6a863..4e5bfe8b9994b 100644 --- a/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx +++ b/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + export const ECSFIELDS = { '@timestamp': 'Date/time when the event originated.\nThis is the date/time extracted from the event, typically representing when the event was generated by the source.\nIf the event source has no original timestamp, this value is typically populated by the first time the event was received by the pipeline.\nRequired field for all events.', @@ -5,7 +12,8 @@ export const ECSFIELDS = { 'Extended build information for the agent.\nThis field is intended to contain any build information that a data source may provide, no specific formatting is required.', 'agent.ephemeral_id': 'Ephemeral identifier of this agent (if one exists).\nThis id normally changes across restarts, but `agent.id` does not.', - 'agent.id': 'Unique identifier of this agent (if one exists).\nExample: For Beats this would be beat.id.', + 'agent.id': + 'Unique identifier of this agent (if one exists).\nExample: For Beats this would be beat.id.', 'agent.name': 'Custom name of the agent.\nThis is a name that can be given to an agent. This can be helpful if for example two Filebeat instances are running on the same host but a human readable separation is needed on which Filebeat instance data is coming from.', 'agent.type': @@ -64,7 +72,8 @@ export const ECSFIELDS = { 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', 'cloud.account.name': 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', - 'cloud.availability_zone': 'Availability zone in which this host, resource, or service is located.', + 'cloud.availability_zone': + 'Availability zone in which this host, resource, or service is located.', 'cloud.instance.id': 'Instance ID of the host machine.', 'cloud.instance.name': 'Instance name of the host machine.', 'cloud.machine.type': 'Machine type of the host machine.', @@ -72,19 +81,26 @@ export const ECSFIELDS = { 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', 'cloud.origin.account.name': 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', - 'cloud.origin.availability_zone': 'Availability zone in which this host, resource, or service is located.', + 'cloud.origin.availability_zone': + 'Availability zone in which this host, resource, or service is located.', 'cloud.origin.instance.id': 'Instance ID of the host machine.', 'cloud.origin.instance.name': 'Instance name of the host machine.', 'cloud.origin.machine.type': 'Machine type of the host machine.', - 'cloud.origin.project.id': 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', - 'cloud.origin.project.name': 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', - 'cloud.origin.provider': 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', + 'cloud.origin.project.id': + 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', + 'cloud.origin.project.name': + 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', + 'cloud.origin.provider': + 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', 'cloud.origin.region': 'Region in which this host, resource, or service is located.', 'cloud.origin.service.name': 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', - 'cloud.project.id': 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', - 'cloud.project.name': 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', - 'cloud.provider': 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', + 'cloud.project.id': + 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', + 'cloud.project.name': + 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', + 'cloud.provider': + 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', 'cloud.region': 'Region in which this host, resource, or service is located.', 'cloud.service.name': 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', @@ -92,13 +108,17 @@ export const ECSFIELDS = { 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', 'cloud.target.account.name': 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', - 'cloud.target.availability_zone': 'Availability zone in which this host, resource, or service is located.', + 'cloud.target.availability_zone': + 'Availability zone in which this host, resource, or service is located.', 'cloud.target.instance.id': 'Instance ID of the host machine.', 'cloud.target.instance.name': 'Instance name of the host machine.', 'cloud.target.machine.type': 'Machine type of the host machine.', - 'cloud.target.project.id': 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', - 'cloud.target.project.name': 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', - 'cloud.target.provider': 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', + 'cloud.target.project.id': + 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', + 'cloud.target.project.name': + 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', + 'cloud.target.provider': + 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', 'cloud.target.region': 'Region in which this host, resource, or service is located.', 'cloud.target.service.name': 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', @@ -114,7 +134,8 @@ export const ECSFIELDS = { 'container.image.name': 'Name of the image the container was built on.', 'container.image.tag': 'Container image tags.', 'container.labels': 'Image labels.', - 'container.memory.usage': 'Memory usage percentage and it ranges from 0 to 1. Scaling factor: 1000.', + 'container.memory.usage': + 'Memory usage percentage and it ranges from 0 to 1. Scaling factor: 1000.', 'container.name': 'Container name.', 'container.network.egress.bytes': 'The number of bytes (gauge) sent out on all network interfaces by the container since the last metric collection.', @@ -213,7 +234,8 @@ export const ECSFIELDS = { 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'dll.pe.go_imports': 'List of imported Go language element names and types.', 'dll.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'dll.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'dll.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', 'dll.pe.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', 'dll.pe.imphash': @@ -221,7 +243,8 @@ export const ECSFIELDS = { 'dll.pe.import_hash': 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', 'dll.pe.imports': 'List of imported element names and types.', - 'dll.pe.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'dll.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', 'dll.pe.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', 'dll.pe.original_file_name': 'Internal name of the file, provided at compile-time.', @@ -234,7 +257,8 @@ export const ECSFIELDS = { 'dll.pe.sections.name': 'PE Section List name.', 'dll.pe.sections.physical_size': 'PE Section List physical size.', 'dll.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'dll.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', + 'dll.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', 'dns.answers': 'An array containing an object for each answer section returned by the server.\nThe main keys that should be present in these objects are defined by ECS. Records that have more information may contain more keys than what ECS defines.\nNot all DNS data sources give all details about DNS answers. At minimum, answer objects must contain the `data` key. If more information is available, map as much of it to ECS as possible, and add any additional fields to the answer objects as custom fields.', 'dns.answers.class': 'The class of DNS data contained in this resource record.', @@ -267,7 +291,8 @@ export const ECSFIELDS = { 'The type of DNS event captured, query or answer.\nIf your source of DNS events only gives you DNS queries, you should only create dns events of type `dns.type:query`.\nIf your source of DNS events gives you answers as well, you should create one event per query (optionally as soon as the query is seen). And a second event containing all query details as well as an array of answers.', 'ecs.version': 'ECS version this event conforms to. `ecs.version` is a required field and must exist in all events.\nWhen querying across multiple indices -- which may conform to slightly different ECS versions -- this field lets integrations adjust to the schema version of the events.', - 'email.attachments': 'A list of objects describing the attachment files sent along with an email message.', + 'email.attachments': + 'A list of objects describing the attachment files sent along with an email message.', 'email.attachments.file.extension': 'Attachment file extension, excluding the leading dot.', 'email.attachments.file.hash.md5': 'MD5 hash.', 'email.attachments.file.hash.sha1': 'SHA1 hash.', @@ -282,10 +307,13 @@ export const ECSFIELDS = { 'email.attachments.file.size': 'Attachment file size in bytes.', 'email.bcc.address': 'The email address of BCC recipient', 'email.cc.address': 'The email address of CC recipient', - 'email.content_type': 'Information about how the message is to be displayed.\nTypically a MIME type.', - 'email.delivery_timestamp': 'The date and time when the email message was received by the service or client.', + 'email.content_type': + 'Information about how the message is to be displayed.\nTypically a MIME type.', + 'email.delivery_timestamp': + 'The date and time when the email message was received by the service or client.', 'email.direction': 'The direction of the message based on the sending and receiving domains.', - 'email.from.address': 'The email address of the sender, typically from the RFC 5322 `From:` header field.', + 'email.from.address': + 'The email address of the sender, typically from the RFC 5322 `From:` header field.', 'email.local_id': 'Unique identifier given to the email by the source that created the event.\nIdentifier is not persistent across hops.', 'email.message_id': @@ -294,10 +322,12 @@ export const ECSFIELDS = { 'The date and time the email message was composed. Many email clients will fill in this value automatically when the message is sent by a user.', 'email.reply_to.address': 'The address that replies should be delivered to based on the value in the RFC 5322 `Reply-To:` header.', - 'email.sender.address': 'Per RFC 5322, specifies the address responsible for the actual transmission of the message.', + 'email.sender.address': + 'Per RFC 5322, specifies the address responsible for the actual transmission of the message.', 'email.subject': 'A brief summary of the topic of the message.', 'email.to.address': 'The email address of recipient', - 'email.x_mailer': 'The name of the application that was used to draft and send the original email message.', + 'email.x_mailer': + 'The name of the application that was used to draft and send the original email message.', 'error.code': 'Error code describing the error.', 'error.id': 'Unique identifier for the error.', 'error.message': 'Error message.', @@ -317,8 +347,10 @@ export const ECSFIELDS = { "Name of the dataset.\nIf an event source publishes more than one type of log or events (e.g. access log, error log), the dataset is used to specify which one the event comes from.\nIt's recommended but not required to start the dataset name with the module name, followed by a dot, then the dataset name.", 'event.duration': 'Duration of the event in nanoseconds.\nIf event.start and event.end are known this value should be the difference between the end and start time.', - 'event.end': 'event.end contains the date when the event ended or when the activity was last observed.', - 'event.hash': 'Hash (perhaps logstash fingerprint) of raw field to be able to demonstrate log integrity.', + 'event.end': + 'event.end contains the date when the event ended or when the activity was last observed.', + 'event.hash': + 'Hash (perhaps logstash fingerprint) of raw field to be able to demonstrate log integrity.', 'event.id': 'Unique ID to describe the event.', 'event.ingested': "Timestamp when an event arrived in the central data store.\nThis is different from `@timestamp`, which is when the event originally occurred. It's also different from `event.created`, which is meant to capture the first time an agent saw the event.\nIn normal conditions, assuming no tampering, the timestamps should chronologically look like this: `@timestamp` < `event.created` < `event.ingested`.", @@ -344,7 +376,8 @@ export const ECSFIELDS = { 'Sequence number of the event.\nThe sequence number is a value published by some event sources, to make the exact ordering of events unambiguous, regardless of the timestamp precision.', 'event.severity': "The numeric severity of the event according to your event source.\nWhat the different severity values mean can be different between sources and use cases. It's up to the implementer to make sure severities are consistent across events from the same source.\nThe Syslog severity belongs in `log.syslog.severity.code`. `event.severity` is meant to represent the severity according to the event source (e.g. firewall, IDS). If the event source does not publish its own severity, you may optionally copy the `log.syslog.severity.code` to `event.severity`.", - 'event.start': 'event.start contains the date when the event started or when the activity was first observed.', + 'event.start': + 'event.start contains the date when the event started or when the activity was first observed.', 'event.timezone': 'This field should be populated when the event\'s timestamp does not include timezone information already (e.g. default Syslog timestamps). It\'s optional otherwise.\nAcceptable timezone formats are: a canonical ID (e.g. "Europe/Amsterdam"), abbreviated (e.g. "EST") or an HH:mm differential (e.g. "-05:00").', 'event.type': @@ -359,7 +392,8 @@ export const ECSFIELDS = { 'faas.trigger.request_id': 'The ID of the trigger request , message, event, etc.', 'faas.trigger.type': 'The trigger for the function execution.', 'faas.version': 'The version of a serverless function.', - 'file.accessed': 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', + 'file.accessed': + 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', 'file.attributes': "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", 'file.code_signature.digest_algorithm': @@ -372,7 +406,8 @@ export const ECSFIELDS = { 'file.code_signature.subject_name': 'Subject name of the code signer', 'file.code_signature.team_id': 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'file.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'file.code_signature.timestamp': + 'Date and time when the code signature was generated and signed.', 'file.code_signature.trusted': 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', 'file.code_signature.valid': @@ -381,7 +416,8 @@ export const ECSFIELDS = { 'file.ctime': 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', 'file.device': 'Device that is the source of the file.', - 'file.directory': 'Directory where the file is located. It should include the drive letter, when appropriate.', + 'file.directory': + 'Directory where the file is located. It should include the drive letter, when appropriate.', 'file.drive_letter': 'Drive letter where the file is located. This field is only relevant on Windows.\nThe value should be uppercase, and not include the colon.', 'file.elf.architecture': 'Machine architecture of the ELF file.', @@ -394,7 +430,8 @@ export const ECSFIELDS = { 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'file.elf.go_imports': 'List of imported Go language element names and types.', 'file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.elf.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', 'file.elf.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', 'file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', @@ -408,7 +445,8 @@ export const ECSFIELDS = { 'file.elf.import_hash': 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', 'file.elf.imports': 'List of imported element names and types.', - 'file.elf.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', 'file.elf.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', 'file.elf.sections': @@ -447,13 +485,15 @@ export const ECSFIELDS = { 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'file.macho.go_imports': 'List of imported Go language element names and types.', 'file.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.macho.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', 'file.macho.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', 'file.macho.import_hash': 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for symhash.', 'file.macho.imports': 'List of imported element names and types.', - 'file.macho.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'file.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', 'file.macho.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', 'file.macho.sections': @@ -462,7 +502,8 @@ export const ECSFIELDS = { 'file.macho.sections.name': 'Mach-O Section List name.', 'file.macho.sections.physical_size': 'Mach-O Section List physical size.', 'file.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.macho.sections.virtual_size': 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'file.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', 'file.macho.symhash': 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a Mach-O implementation of the Windows PE imphash', 'file.mime_type': @@ -471,7 +512,8 @@ export const ECSFIELDS = { 'file.mtime': 'Last time the file content was modified.', 'file.name': 'Name of the file including the extension, without the directory.', 'file.owner': "File owner's username.", - 'file.path': 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', + 'file.path': + 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', 'file.pe.architecture': 'CPU architecture target for the file.', 'file.pe.company': 'Internal company name of the file, provided at compile-time.', 'file.pe.description': 'Internal description of the file, provided at compile-time.', @@ -480,7 +522,8 @@ export const ECSFIELDS = { 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'file.pe.go_imports': 'List of imported Go language element names and types.', 'file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', 'file.pe.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', 'file.pe.imphash': @@ -488,7 +531,8 @@ export const ECSFIELDS = { 'file.pe.import_hash': 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', 'file.pe.imports': 'List of imported element names and types.', - 'file.pe.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', 'file.pe.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', 'file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', @@ -501,7 +545,8 @@ export const ECSFIELDS = { 'file.pe.sections.name': 'PE Section List name.', 'file.pe.sections.physical_size': 'PE Section List physical size.', 'file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', + 'file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', 'file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', 'file.target_path': 'Target path for symlinks.', 'file.type': 'File type (file, dir, or symlink).', @@ -510,17 +555,20 @@ export const ECSFIELDS = { 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', 'file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', 'file.x509.issuer.country': 'List of country \\(C) codes', - 'file.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', 'file.x509.issuer.locality': 'List of locality names (L)', 'file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', - 'file.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', + 'file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', 'file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', 'file.x509.not_after': 'Time at which the certificate is no longer considered valid.', 'file.x509.not_before': 'Time at which the certificate is first considered valid.', 'file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', 'file.x509.public_key_curve': 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'file.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', 'file.x509.public_key_size': 'The size of the public key space in bits.', 'file.x509.serial_number': 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', @@ -528,7 +576,8 @@ export const ECSFIELDS = { 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', 'file.x509.subject.common_name': 'List of common names (CN) of subject.', 'file.x509.subject.country': 'List of country \\(C) code', - 'file.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', 'file.x509.subject.locality': 'List of locality names (L)', 'file.x509.subject.organization': 'List of organizations (O) of subject.', 'file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', @@ -629,13 +678,15 @@ export const ECSFIELDS = { "Original log level of the log event.\nIf the source of the event provides a log level or textual severity, this is the one that goes in `log.level`. If your source doesn't specify one, you may put your event transport's severity here (e.g. Syslog severity).\nSome examples are `warn`, `err`, `i`, `informational`.", 'log.logger': 'The name of the logger inside an application. This is usually the name of the class which initialized the logger, or can be a custom name.', - 'log.origin.file.line': 'The line number of the file containing the source code which originated the log event.', + 'log.origin.file.line': + 'The line number of the file containing the source code which originated the log event.', 'log.origin.file.name': 'The name of the file containing the source code which originated the log event.\nNote that this field is not meant to capture the log file. The correct field to capture the log file is `log.file.path`.', 'log.origin.function': 'The name of the function or method which originated the log event.', 'log.syslog': 'The Syslog metadata of the event, if the event was transmitted via Syslog. Please see RFCs 5424 or 3164.', - 'log.syslog.appname': 'The device or application that originated the Syslog message, if available.', + 'log.syslog.appname': + 'The device or application that originated the Syslog message, if available.', 'log.syslog.facility.code': 'The Syslog numeric facility of the log event, if available.\nAccording to RFCs 5424 and 3164, this value should be an integer between 0 and 23.', 'log.syslog.facility.name': 'The Syslog text-based facility of the log event, if available.', @@ -652,7 +703,8 @@ export const ECSFIELDS = { "The Syslog numeric severity of the log event, if available.\nIf the event source publishing via Syslog provides a different severity value (e.g. firewall, IDS), your source's text severity should go to `log.level`. If the event source does not specify a distinct severity, you can optionally copy the Syslog severity to `log.level`.", 'log.syslog.structured_data': 'Structured data expressed in RFC 5424 messages, if available. These are key-value pairs formed from the structured data portion of the syslog message, as defined in RFC 5424 Section 6.3.', - 'log.syslog.version': 'The version of the Syslog protocol specification. Only applicable for RFC 5424 messages.', + 'log.syslog.version': + 'The version of the Syslog protocol specification. Only applicable for RFC 5424 messages.', message: 'For log events the message field contains the log message, optimized for viewing in a log viewer.\nFor structured logs without an original message field, other fields can be concatenated to form a human-readable summary of the event.\nIf multiple messages exist, they can be combined into one message.', 'network.application': @@ -685,7 +737,8 @@ export const ECSFIELDS = { 'Observer.egress holds information like interface number and name, vlan, and zone information to classify egress traffic. Single armed monitoring such as a network sensor on a span port should only use observer.ingress to categorize traffic.', 'observer.egress.interface.alias': 'Interface alias as reported by the system, typically used in firewall implementations for e.g. inside, outside, or dmz logical interface naming.', - 'observer.egress.interface.id': 'Interface ID as reported by an observer (typically SNMP interface ID).', + 'observer.egress.interface.id': + 'Interface ID as reported by an observer (typically SNMP interface ID).', 'observer.egress.interface.name': 'Interface name as reported by the system.', 'observer.egress.vlan.id': 'VLAN ID as reported by the observer.', 'observer.egress.vlan.name': 'Optional VLAN name as reported by the observer.', @@ -709,7 +762,8 @@ export const ECSFIELDS = { 'Observer.ingress holds information like interface number and name, vlan, and zone information to classify ingress traffic. Single armed monitoring such as a network sensor on a span port should only use observer.ingress to categorize traffic.', 'observer.ingress.interface.alias': 'Interface alias as reported by the system, typically used in firewall implementations for e.g. inside, outside, or dmz logical interface naming.', - 'observer.ingress.interface.id': 'Interface ID as reported by an observer (typically SNMP interface ID).', + 'observer.ingress.interface.id': + 'Interface ID as reported by an observer (typically SNMP interface ID).', 'observer.ingress.interface.name': 'Interface name as reported by the system.', 'observer.ingress.vlan.id': 'VLAN ID as reported by the observer.', 'observer.ingress.vlan.name': 'Optional VLAN name as reported by the observer.', @@ -740,7 +794,8 @@ export const ECSFIELDS = { 'orchestrator.cluster.url': 'URL of the API used to manage the cluster.', 'orchestrator.cluster.version': 'The version of the cluster.', 'orchestrator.namespace': 'Namespace in which the action is taking place.', - 'orchestrator.organization': 'Organization affected by the event (for multi-tenant orchestrator setups).', + 'orchestrator.organization': + 'Organization affected by the event (for multi-tenant orchestrator setups).', 'orchestrator.resource.annotation': 'The list of annotations added to the resource.', 'orchestrator.resource.id': 'Unique ID of the resource being acted upon.', 'orchestrator.resource.ip': @@ -783,7 +838,8 @@ export const ECSFIELDS = { 'process.code_signature.subject_name': 'Subject name of the code signer', 'process.code_signature.team_id': 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'process.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'process.code_signature.timestamp': + 'Date and time when the code signature was generated and signed.', 'process.code_signature.trusted': 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', 'process.code_signature.valid': @@ -799,8 +855,10 @@ export const ECSFIELDS = { 'process.elf.go_import_hash': 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'process.elf.go_imports': 'List of imported Go language element names and types.', - 'process.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'process.elf.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', 'process.elf.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', 'process.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', @@ -814,7 +872,8 @@ export const ECSFIELDS = { 'process.elf.import_hash': 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', 'process.elf.imports': 'List of imported element names and types.', - 'process.elf.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'process.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', 'process.elf.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', 'process.elf.sections': @@ -878,7 +937,8 @@ export const ECSFIELDS = { 'process.entry_leader.saved_user.id': 'Unique identifier of the user.', 'process.entry_leader.saved_user.name': 'Short name or login of the user.', 'process.entry_leader.start': 'The time the process started.', - 'process.entry_leader.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', 'process.entry_leader.supplemental_groups.name': 'Name of the group.', 'process.entry_leader.tty': 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', @@ -920,7 +980,8 @@ export const ECSFIELDS = { 'process.group_leader.saved_user.id': 'Unique identifier of the user.', 'process.group_leader.saved_user.name': 'Short name or login of the user.', 'process.group_leader.start': 'The time the process started.', - 'process.group_leader.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', 'process.group_leader.supplemental_groups.name': 'Name of the group.', 'process.group_leader.tty': 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', @@ -942,7 +1003,8 @@ export const ECSFIELDS = { 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', 'process.io': 'A chunk of input or output (IO) from a single process.\nThis field only appears on the top level process object, which is the process that wrote the output or read the input.', - 'process.io.bytes_skipped': 'An array of byte offsets and lengths denoting where IO data has been skipped.', + 'process.io.bytes_skipped': + 'An array of byte offsets and lengths denoting where IO data has been skipped.', 'process.io.bytes_skipped.length': 'The length of bytes skipped.', 'process.io.bytes_skipped.offset': "The byte offset into this event's io.text (or io.bytes in the future) where length bytes were skipped.", @@ -958,8 +1020,10 @@ export const ECSFIELDS = { 'process.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'process.macho.go_imports': 'List of imported Go language element names and types.', - 'process.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'process.macho.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', 'process.macho.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', 'process.macho.import_hash': @@ -974,7 +1038,8 @@ export const ECSFIELDS = { 'process.macho.sections.entropy': 'Shannon entropy calculation from the section.', 'process.macho.sections.name': 'Mach-O Section List name.', 'process.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'process.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.macho.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', 'process.macho.sections.virtual_size': 'Mach-O Section List virtual size. This is always the same as `physical_size`.', 'process.macho.symhash': @@ -994,7 +1059,8 @@ export const ECSFIELDS = { 'process.parent.code_signature.subject_name': 'Subject name of the code signer', 'process.parent.code_signature.team_id': 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'process.parent.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'process.parent.code_signature.timestamp': + 'Date and time when the code signature was generated and signed.', 'process.parent.code_signature.trusted': 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', 'process.parent.code_signature.valid': @@ -1010,7 +1076,8 @@ export const ECSFIELDS = { 'process.parent.elf.go_import_hash': 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'process.parent.elf.go_imports': 'List of imported Go language element names and types.', - 'process.parent.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', 'process.parent.elf.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', 'process.parent.elf.go_stripped': @@ -1039,7 +1106,8 @@ export const ECSFIELDS = { 'process.parent.elf.sections.physical_offset': 'ELF Section List offset.', 'process.parent.elf.sections.physical_size': 'ELF Section List physical size.', 'process.parent.elf.sections.type': 'ELF Section List type.', - 'process.parent.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.parent.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', 'process.parent.elf.sections.virtual_address': 'ELF Section List virtual address.', 'process.parent.elf.sections.virtual_size': 'ELF Section List virtual size.', 'process.parent.elf.segments': @@ -1072,7 +1140,8 @@ export const ECSFIELDS = { 'process.parent.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'process.parent.macho.go_imports': 'List of imported Go language element names and types.', - 'process.parent.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.macho.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', 'process.parent.macho.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', 'process.parent.macho.go_stripped': @@ -1089,7 +1158,8 @@ export const ECSFIELDS = { 'process.parent.macho.sections.entropy': 'Shannon entropy calculation from the section.', 'process.parent.macho.sections.name': 'Mach-O Section List name.', 'process.parent.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'process.parent.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.parent.macho.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', 'process.parent.macho.sections.virtual_size': 'Mach-O Section List virtual size. This is always the same as `physical_size`.', 'process.parent.macho.symhash': @@ -1102,7 +1172,8 @@ export const ECSFIELDS = { 'process.parent.pe.go_import_hash': 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'process.parent.pe.go_imports': 'List of imported Go language element names and types.', - 'process.parent.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', 'process.parent.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', 'process.parent.pe.go_stripped': @@ -1125,7 +1196,8 @@ export const ECSFIELDS = { 'process.parent.pe.sections.entropy': 'Shannon entropy calculation from the section.', 'process.parent.pe.sections.name': 'PE Section List name.', 'process.parent.pe.sections.physical_size': 'PE Section List physical size.', - 'process.parent.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.parent.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', 'process.parent.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', 'process.parent.pgid': @@ -1140,7 +1212,8 @@ export const ECSFIELDS = { 'process.parent.saved_user.id': 'Unique identifier of the user.', 'process.parent.saved_user.name': 'Short name or login of the user.', 'process.parent.start': 'The time the process started.', - 'process.parent.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', 'process.parent.supplemental_groups.name': 'Name of the group.', 'process.parent.thread.id': 'Thread ID.', 'process.parent.thread.name': 'Thread name.', @@ -1164,7 +1237,8 @@ export const ECSFIELDS = { 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'process.pe.go_imports': 'List of imported Go language element names and types.', 'process.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'process.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', 'process.pe.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', 'process.pe.imphash': @@ -1172,7 +1246,8 @@ export const ECSFIELDS = { 'process.pe.import_hash': 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', 'process.pe.imports': 'List of imported element names and types.', - 'process.pe.imports_names_entropy': 'Shannon entropy calculation from the list of imported element names and types.', + 'process.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', 'process.pe.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', 'process.pe.original_file_name': 'Internal name of the file, provided at compile-time.', @@ -1185,7 +1260,8 @@ export const ECSFIELDS = { 'process.pe.sections.name': 'PE Section List name.', 'process.pe.sections.physical_size': 'PE Section List physical size.', 'process.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'process.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', + 'process.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', 'process.pgid': 'Deprecated for removal in next major version release. This field is superseded by `process.group_leader.pid`.\nIdentifier of the group of processes the process belongs to.', 'process.pid': 'Process id.', @@ -1231,12 +1307,14 @@ export const ECSFIELDS = { 'process.session_leader.real_user.name': 'Short name or login of the user.', 'process.session_leader.same_as_process': "This boolean is used to identify if a leader process is the same as the top level process.\nFor example, if `process.group_leader.same_as_process = true`, it means the process event in question is the leader of its process group. Details under `process.*` like `pid` would be the same under `process.group_leader.*` The same applies for both `process.session_leader` and `process.entry_leader`.\nThis field exists to the benefit of EQL and other rule engines since it's not possible to compare equality between two fields in a single document. e.g `process.entity_id` = `process.group_leader.entity_id` (top level process is the process group leader) OR `process.entity_id` = `process.entry_leader.entity_id` (top level process is the entry session leader)\nInstead these rules could be written like: `process.group_leader.same_as_process: true` OR `process.entry_leader.same_as_process: true`\nNote: This field is only set on `process.entry_leader`, `process.session_leader` and `process.group_leader`.", - 'process.session_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.saved_group.id': + 'Unique identifier for the group on the system/platform.', 'process.session_leader.saved_group.name': 'Name of the group.', 'process.session_leader.saved_user.id': 'Unique identifier of the user.', 'process.session_leader.saved_user.name': 'Short name or login of the user.', 'process.session_leader.start': 'The time the process started.', - 'process.session_leader.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', 'process.session_leader.supplemental_groups.name': 'Name of the group.', 'process.session_leader.tty': 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', @@ -1254,7 +1332,8 @@ export const ECSFIELDS = { 'process.thread.name': 'Thread name.', 'process.title': 'Process title.\nThe proctitle, some times the same as process name. Can also be different: for example a browser setting its title to the web page currently opened.', - 'process.tty': 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', + 'process.tty': + 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', 'process.tty.char_device.major': 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', 'process.tty.char_device.minor': @@ -1284,11 +1363,13 @@ export const ECSFIELDS = { 'related.user': 'All the user names or other user identifiers seen on the event.', 'rule.author': 'Name, organization, or pseudonym of the author or authors who created the rule used to generate this event.', - 'rule.category': 'A categorization value keyword used by the entity using the rule for detection of this event.', + 'rule.category': + 'A categorization value keyword used by the entity using the rule for detection of this event.', 'rule.description': 'The description of the rule generating the event.', 'rule.id': 'A rule ID that is unique within the scope of an agent, observer, or other entity using the rule for detection of this event.', - 'rule.license': 'Name of the license under which the rule used to generate this event is made available.', + 'rule.license': + 'Name of the license under which the rule used to generate this event is made available.', 'rule.name': 'The name of the rule or signature generating the event.', 'rule.reference': "Reference URL to additional information about the rule used to generate this event.\nThe URL can point to the vendor's documentation about the rule. If that's not available, it can also be a link to a more general page describing this type of alert.", @@ -1469,7 +1550,8 @@ export const ECSFIELDS = { 'threat.enrichments.indicator.as.organization.name': 'Organization name.', 'threat.enrichments.indicator.confidence': 'Identifies the vendor-neutral confidence rating using the None/Low/Medium/High scale defined in Appendix A of the STIX 2.1 framework. Vendor-specific confidence scales may be added as custom fields.', - 'threat.enrichments.indicator.description': 'Describes the type of action conducted by the threat.', + 'threat.enrichments.indicator.description': + 'Describes the type of action conducted by the threat.', 'threat.enrichments.indicator.email.address': 'Identifies a threat indicator as an email address (irrespective of direction).', 'threat.enrichments.indicator.file.accessed': @@ -1478,12 +1560,14 @@ export const ECSFIELDS = { "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", 'threat.enrichments.indicator.file.code_signature.digest_algorithm': 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', - 'threat.enrichments.indicator.file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'threat.enrichments.indicator.file.code_signature.exists': + 'Boolean to capture if a signature is present.', 'threat.enrichments.indicator.file.code_signature.signing_id': 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', 'threat.enrichments.indicator.file.code_signature.status': 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', - 'threat.enrichments.indicator.file.code_signature.subject_name': 'Subject name of the code signer', + 'threat.enrichments.indicator.file.code_signature.subject_name': + 'Subject name of the code signer', 'threat.enrichments.indicator.file.code_signature.team_id': 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', 'threat.enrichments.indicator.file.code_signature.timestamp': @@ -1509,19 +1593,22 @@ export const ECSFIELDS = { 'threat.enrichments.indicator.file.elf.exports': 'List of exported element names and types.', 'threat.enrichments.indicator.file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'threat.enrichments.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.elf.go_imports': + 'List of imported Go language element names and types.', 'threat.enrichments.indicator.file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', 'threat.enrichments.indicator.file.elf.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', 'threat.enrichments.indicator.file.elf.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'threat.enrichments.indicator.file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'threat.enrichments.indicator.file.elf.header.abi_version': + 'Version of the ELF Application Binary Interface (ABI).', 'threat.enrichments.indicator.file.elf.header.class': 'Header class of the ELF file.', 'threat.enrichments.indicator.file.elf.header.data': 'Data table of the ELF header.', 'threat.enrichments.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', 'threat.enrichments.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', - 'threat.enrichments.indicator.file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'threat.enrichments.indicator.file.elf.header.os_abi': + 'Application Binary Interface (ABI) of the Linux OS.', 'threat.enrichments.indicator.file.elf.header.type': 'Header type of the ELF file.', 'threat.enrichments.indicator.file.elf.header.version': 'Version of the ELF header.', 'threat.enrichments.indicator.file.elf.import_hash': @@ -1533,8 +1620,10 @@ export const ECSFIELDS = { 'Variance for Shannon entropy calculation from the list of imported element names and types.', 'threat.enrichments.indicator.file.elf.sections': 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', - 'threat.enrichments.indicator.file.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'threat.enrichments.indicator.file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.elf.sections.chi2': + 'Chi-square probability distribution of the section.', + 'threat.enrichments.indicator.file.elf.sections.entropy': + 'Shannon entropy calculation from the section.', 'threat.enrichments.indicator.file.elf.sections.flags': 'ELF Section List flags.', 'threat.enrichments.indicator.file.elf.sections.name': 'ELF Section List name.', 'threat.enrichments.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', @@ -1542,13 +1631,15 @@ export const ECSFIELDS = { 'threat.enrichments.indicator.file.elf.sections.type': 'ELF Section List type.', 'threat.enrichments.indicator.file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'threat.enrichments.indicator.file.elf.sections.virtual_address': + 'ELF Section List virtual address.', 'threat.enrichments.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', 'threat.enrichments.indicator.file.elf.segments': 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', 'threat.enrichments.indicator.file.elf.segments.sections': 'ELF object segment sections.', 'threat.enrichments.indicator.file.elf.segments.type': 'ELF object segment type.', - 'threat.enrichments.indicator.file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'threat.enrichments.indicator.file.elf.shared_libraries': + 'List of shared libraries used by this ELF object.', 'threat.enrichments.indicator.file.elf.telfhash': 'telfhash symbol hash for ELF file.', 'threat.enrichments.indicator.file.extension': 'File extension, excluding the leading dot.\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', @@ -1568,17 +1659,22 @@ export const ECSFIELDS = { 'MIME type should identify the format of the file or stream of bytes using https://www.iana.org/assignments/media-types/media-types.xhtml[IANA official types], where possible. When more than one type is applicable, the most specific type should be used.', 'threat.enrichments.indicator.file.mode': 'Mode of the file in octal representation.', 'threat.enrichments.indicator.file.mtime': 'Last time the file content was modified.', - 'threat.enrichments.indicator.file.name': 'Name of the file including the extension, without the directory.', + 'threat.enrichments.indicator.file.name': + 'Name of the file including the extension, without the directory.', 'threat.enrichments.indicator.file.owner': "File owner's username.", 'threat.enrichments.indicator.file.path': 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', 'threat.enrichments.indicator.file.pe.architecture': 'CPU architecture target for the file.', - 'threat.enrichments.indicator.file.pe.company': 'Internal company name of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.description': 'Internal description of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.company': + 'Internal company name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.description': + 'Internal description of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.file_version': + 'Internal version of the file, provided at compile-time.', 'threat.enrichments.indicator.file.pe.go_import_hash': 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'threat.enrichments.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.pe.go_imports': + 'List of imported Go language element names and types.', 'threat.enrichments.indicator.file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', 'threat.enrichments.indicator.file.pe.go_imports_names_var_entropy': @@ -1594,23 +1690,28 @@ export const ECSFIELDS = { 'Shannon entropy calculation from the list of imported element names and types.', 'threat.enrichments.indicator.file.pe.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.original_file_name': + 'Internal name of the file, provided at compile-time.', 'threat.enrichments.indicator.file.pe.pehash': 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'threat.enrichments.indicator.file.pe.product': 'Internal product name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.product': + 'Internal product name of the file, provided at compile-time.', 'threat.enrichments.indicator.file.pe.sections': 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', - 'threat.enrichments.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.pe.sections.entropy': + 'Shannon entropy calculation from the section.', 'threat.enrichments.indicator.file.pe.sections.name': 'PE Section List name.', 'threat.enrichments.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', 'threat.enrichments.indicator.file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', 'threat.enrichments.indicator.file.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', - 'threat.enrichments.indicator.file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', + 'threat.enrichments.indicator.file.size': + 'File size in bytes.\nOnly relevant when `file.type` is "file".', 'threat.enrichments.indicator.file.target_path': 'Target path for symlinks.', 'threat.enrichments.indicator.file.type': 'File type (file, dir, or symlink).', - 'threat.enrichments.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'threat.enrichments.indicator.file.uid': + 'The user ID (UID) or security identifier (SID) of the file owner.', 'threat.enrichments.indicator.file.x509.alternative_names': 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', 'threat.enrichments.indicator.file.x509.issuer.common_name': @@ -1623,32 +1724,42 @@ export const ECSFIELDS = { 'List of organizations (O) of issuing certificate authority.', 'threat.enrichments.indicator.file.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.file.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'threat.enrichments.indicator.file.x509.not_before': 'Time at which the certificate is first considered valid.', - 'threat.enrichments.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.file.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.file.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.file.x509.public_key_algorithm': + 'Algorithm used to generate the public key.', 'threat.enrichments.indicator.file.x509.public_key_curve': 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', 'threat.enrichments.indicator.file.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.enrichments.indicator.file.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.enrichments.indicator.file.x509.public_key_size': + 'The size of the public key space in bits.', 'threat.enrichments.indicator.file.x509.serial_number': 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', 'threat.enrichments.indicator.file.x509.signature_algorithm': 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'threat.enrichments.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.enrichments.indicator.file.x509.subject.common_name': + 'List of common names (CN) of subject.', 'threat.enrichments.indicator.file.x509.subject.country': 'List of country \\(C) code', 'threat.enrichments.indicator.file.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', 'threat.enrichments.indicator.file.x509.subject.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.enrichments.indicator.file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'threat.enrichments.indicator.file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.subject.organization': + 'List of organizations (O) of subject.', + 'threat.enrichments.indicator.file.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.file.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', 'threat.enrichments.indicator.file.x509.version_number': 'Version of x509 format.', 'threat.enrichments.indicator.first_seen': 'The date and time when intelligence source first reported sighting this indicator.', 'threat.enrichments.indicator.geo.city_name': 'City name.', - 'threat.enrichments.indicator.geo.continent_code': "Two-letter code representing continent's name.", + 'threat.enrichments.indicator.geo.continent_code': + "Two-letter code representing continent's name.", 'threat.enrichments.indicator.geo.continent_name': 'Name of the continent.', 'threat.enrichments.indicator.geo.country_iso_code': 'Country ISO code.', 'threat.enrichments.indicator.geo.country_name': 'Country name.', @@ -1659,8 +1770,10 @@ export const ECSFIELDS = { 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', 'threat.enrichments.indicator.geo.region_iso_code': 'Region ISO code.', 'threat.enrichments.indicator.geo.region_name': 'Region name.', - 'threat.enrichments.indicator.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'threat.enrichments.indicator.ip': 'Identifies a threat indicator as an IP address (irrespective of direction).', + 'threat.enrichments.indicator.geo.timezone': + 'The time zone of the location, such as IANA time zone name.', + 'threat.enrichments.indicator.ip': + 'Identifies a threat indicator as an IP address (irrespective of direction).', 'threat.enrichments.indicator.last_seen': 'The date and time when intelligence source last reported sighting this indicator.', 'threat.enrichments.indicator.marking.tlp': 'Traffic Light Protocol sharing markings.', @@ -1668,9 +1781,11 @@ export const ECSFIELDS = { 'threat.enrichments.indicator.modified_at': 'The date and time when intelligence source last modified information for this indicator.', 'threat.enrichments.indicator.name': 'The display name indicator in an UI friendly format', - 'threat.enrichments.indicator.port': 'Identifies a threat indicator as a port number (irrespective of direction).', + 'threat.enrichments.indicator.port': + 'Identifies a threat indicator as a port number (irrespective of direction).', 'threat.enrichments.indicator.provider': "The name of the indicator's provider.", - 'threat.enrichments.indicator.reference': 'Reference URL linking to additional information about this indicator.', + 'threat.enrichments.indicator.reference': + 'Reference URL linking to additional information about this indicator.', 'threat.enrichments.indicator.registry.data.bytes': 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', 'threat.enrichments.indicator.registry.data.strings': @@ -1682,8 +1797,10 @@ export const ECSFIELDS = { 'threat.enrichments.indicator.registry.value': 'Name of the value written.', 'threat.enrichments.indicator.scanner_stats': 'Count of AV/EDR vendors that successfully detected malicious file or URL.', - 'threat.enrichments.indicator.sightings': 'Number of times this indicator was observed conducting threat activity.', - 'threat.enrichments.indicator.type': 'Type of indicator as represented by Cyber Observable in STIX 2.0.', + 'threat.enrichments.indicator.sightings': + 'Number of times this indicator was observed conducting threat activity.', + 'threat.enrichments.indicator.type': + 'Type of indicator as represented by Cyber Observable in STIX 2.0.', 'threat.enrichments.indicator.url.domain': 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', 'threat.enrichments.indicator.url.extension': @@ -1710,7 +1827,8 @@ export const ECSFIELDS = { 'threat.enrichments.indicator.url.username': 'Username of the request.', 'threat.enrichments.indicator.x509.alternative_names': 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'threat.enrichments.indicator.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', 'threat.enrichments.indicator.x509.issuer.country': 'List of country \\(C) codes', 'threat.enrichments.indicator.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', @@ -1719,10 +1837,14 @@ export const ECSFIELDS = { 'List of organizations (O) of issuing certificate authority.', 'threat.enrichments.indicator.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'threat.enrichments.indicator.x509.not_before': 'Time at which the certificate is first considered valid.', - 'threat.enrichments.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.x509.public_key_algorithm': + 'Algorithm used to generate the public key.', 'threat.enrichments.indicator.x509.public_key_curve': 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', 'threat.enrichments.indicator.x509.public_key_exponent': @@ -1738,15 +1860,19 @@ export const ECSFIELDS = { 'Distinguished name (DN) of the certificate subject entity.', 'threat.enrichments.indicator.x509.subject.locality': 'List of locality names (L)', 'threat.enrichments.indicator.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.enrichments.indicator.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'threat.enrichments.indicator.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', 'threat.enrichments.indicator.x509.version_number': 'Version of x509 format.', 'threat.enrichments.matched.atomic': 'Identifies the atomic indicator value that matched a local environment endpoint or network event.', 'threat.enrichments.matched.field': 'Identifies the field of the atomic indicator that matched a local environment endpoint or network event.', - 'threat.enrichments.matched.id': 'Identifies the _id of the indicator document enriching the event.', - 'threat.enrichments.matched.index': 'Identifies the _index of the indicator document enriching the event.', + 'threat.enrichments.matched.id': + 'Identifies the _id of the indicator document enriching the event.', + 'threat.enrichments.matched.index': + 'Identifies the _index of the indicator document enriching the event.', 'threat.enrichments.matched.occurred': 'Indicates when the indicator match was generated', 'threat.enrichments.matched.type': 'Identifies the type of match that caused the event to be enriched with the given indicator', @@ -1771,7 +1897,8 @@ export const ECSFIELDS = { 'threat.indicator.confidence': 'Identifies the vendor-neutral confidence rating using the None/Low/Medium/High scale defined in Appendix A of the STIX 2.1 framework. Vendor-specific confidence scales may be added as custom fields.', 'threat.indicator.description': 'Describes the type of action conducted by the threat.', - 'threat.indicator.email.address': 'Identifies a threat indicator as an email address (irrespective of direction).', + 'threat.indicator.email.address': + 'Identifies a threat indicator as an email address (irrespective of direction).', 'threat.indicator.file.accessed': 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', 'threat.indicator.file.attributes': @@ -1786,12 +1913,14 @@ export const ECSFIELDS = { 'threat.indicator.file.code_signature.subject_name': 'Subject name of the code signer', 'threat.indicator.file.code_signature.team_id': 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'threat.indicator.file.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', + 'threat.indicator.file.code_signature.timestamp': + 'Date and time when the code signature was generated and signed.', 'threat.indicator.file.code_signature.trusted': 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', 'threat.indicator.file.code_signature.valid': 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', - 'threat.indicator.file.created': 'File creation time.\nNote that not all filesystems store the creation time.', + 'threat.indicator.file.created': + 'File creation time.\nNote that not all filesystems store the creation time.', 'threat.indicator.file.ctime': 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', 'threat.indicator.file.device': 'Device that is the source of the file.', @@ -1808,12 +1937,14 @@ export const ECSFIELDS = { 'threat.indicator.file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'threat.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', - 'threat.indicator.file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', 'threat.indicator.file.elf.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', 'threat.indicator.file.elf.go_stripped': 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'threat.indicator.file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'threat.indicator.file.elf.header.abi_version': + 'Version of the ELF Application Binary Interface (ABI).', 'threat.indicator.file.elf.header.class': 'Header class of the ELF file.', 'threat.indicator.file.elf.header.data': 'Data table of the ELF header.', 'threat.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', @@ -1837,7 +1968,8 @@ export const ECSFIELDS = { 'threat.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', 'threat.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', 'threat.indicator.file.elf.sections.type': 'ELF Section List type.', - 'threat.indicator.file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', 'threat.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', 'threat.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', 'threat.indicator.file.elf.segments': @@ -1869,13 +2001,17 @@ export const ECSFIELDS = { 'threat.indicator.file.path': 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', 'threat.indicator.file.pe.architecture': 'CPU architecture target for the file.', - 'threat.indicator.file.pe.company': 'Internal company name of the file, provided at compile-time.', - 'threat.indicator.file.pe.description': 'Internal description of the file, provided at compile-time.', - 'threat.indicator.file.pe.file_version': 'Internal version of the file, provided at compile-time.', + 'threat.indicator.file.pe.company': + 'Internal company name of the file, provided at compile-time.', + 'threat.indicator.file.pe.description': + 'Internal description of the file, provided at compile-time.', + 'threat.indicator.file.pe.file_version': + 'Internal version of the file, provided at compile-time.', 'threat.indicator.file.pe.go_import_hash': 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', 'threat.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', - 'threat.indicator.file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', 'threat.indicator.file.pe.go_imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of Go imports.', 'threat.indicator.file.pe.go_stripped': @@ -1889,16 +2025,19 @@ export const ECSFIELDS = { 'Shannon entropy calculation from the list of imported element names and types.', 'threat.indicator.file.pe.imports_names_var_entropy': 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'threat.indicator.file.pe.original_file_name': + 'Internal name of the file, provided at compile-time.', 'threat.indicator.file.pe.pehash': 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'threat.indicator.file.pe.product': 'Internal product name of the file, provided at compile-time.', + 'threat.indicator.file.pe.product': + 'Internal product name of the file, provided at compile-time.', 'threat.indicator.file.pe.sections': 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', 'threat.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', 'threat.indicator.file.pe.sections.name': 'PE Section List name.', 'threat.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', - 'threat.indicator.file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', 'threat.indicator.file.pe.sections.virtual_size': 'PE Section List virtual size. This is always the same as `physical_size`.', 'threat.indicator.file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', @@ -1907,16 +2046,22 @@ export const ECSFIELDS = { 'threat.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', 'threat.indicator.file.x509.alternative_names': 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'threat.indicator.file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', 'threat.indicator.file.x509.issuer.country': 'List of country \\(C) codes', - 'threat.indicator.file.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', 'threat.indicator.file.x509.issuer.locality': 'List of locality names (L)', - 'threat.indicator.file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', 'threat.indicator.file.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'threat.indicator.file.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'threat.indicator.file.x509.not_before': 'Time at which the certificate is first considered valid.', + 'threat.indicator.file.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.indicator.file.x509.not_before': + 'Time at which the certificate is first considered valid.', 'threat.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', 'threat.indicator.file.x509.public_key_curve': 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', @@ -1929,13 +2074,17 @@ export const ECSFIELDS = { 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', 'threat.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', 'threat.indicator.file.x509.subject.country': 'List of country \\(C) code', - 'threat.indicator.file.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', 'threat.indicator.file.x509.subject.locality': 'List of locality names (L)', 'threat.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.indicator.file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'threat.indicator.file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.indicator.file.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', 'threat.indicator.file.x509.version_number': 'Version of x509 format.', - 'threat.indicator.first_seen': 'The date and time when intelligence source first reported sighting this indicator.', + 'threat.indicator.first_seen': + 'The date and time when intelligence source first reported sighting this indicator.', 'threat.indicator.geo.city_name': 'City name.', 'threat.indicator.geo.continent_code': "Two-letter code representing continent's name.", 'threat.indicator.geo.continent_name': 'Name of the continent.', @@ -1949,16 +2098,20 @@ export const ECSFIELDS = { 'threat.indicator.geo.region_iso_code': 'Region ISO code.', 'threat.indicator.geo.region_name': 'Region name.', 'threat.indicator.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'threat.indicator.ip': 'Identifies a threat indicator as an IP address (irrespective of direction).', - 'threat.indicator.last_seen': 'The date and time when intelligence source last reported sighting this indicator.', + 'threat.indicator.ip': + 'Identifies a threat indicator as an IP address (irrespective of direction).', + 'threat.indicator.last_seen': + 'The date and time when intelligence source last reported sighting this indicator.', 'threat.indicator.marking.tlp': 'Traffic Light Protocol sharing markings.', 'threat.indicator.marking.tlp_version': 'Traffic Light Protocol version.', 'threat.indicator.modified_at': 'The date and time when intelligence source last modified information for this indicator.', 'threat.indicator.name': 'The display name indicator in an UI friendly format', - 'threat.indicator.port': 'Identifies a threat indicator as a port number (irrespective of direction).', + 'threat.indicator.port': + 'Identifies a threat indicator as a port number (irrespective of direction).', 'threat.indicator.provider': "The name of the indicator's provider.", - 'threat.indicator.reference': 'Reference URL linking to additional information about this indicator.', + 'threat.indicator.reference': + 'Reference URL linking to additional information about this indicator.', 'threat.indicator.registry.data.bytes': 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', 'threat.indicator.registry.data.strings': @@ -1968,8 +2121,10 @@ export const ECSFIELDS = { 'threat.indicator.registry.key': 'Hive-relative path of keys.', 'threat.indicator.registry.path': 'Full path, including hive, key and value', 'threat.indicator.registry.value': 'Name of the value written.', - 'threat.indicator.scanner_stats': 'Count of AV/EDR vendors that successfully detected malicious file or URL.', - 'threat.indicator.sightings': 'Number of times this indicator was observed conducting threat activity.', + 'threat.indicator.scanner_stats': + 'Count of AV/EDR vendors that successfully detected malicious file or URL.', + 'threat.indicator.sightings': + 'Number of times this indicator was observed conducting threat activity.', 'threat.indicator.type': 'Type of indicator as represented by Cyber Observable in STIX 2.0.', 'threat.indicator.url.domain': 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', @@ -1988,7 +2143,8 @@ export const ECSFIELDS = { 'The query field describes the query string of the request, such as "q=elasticsearch".\nThe `?` is excluded from the query string. If a URL contains no `?`, there is no query field. If there is a `?` but no query, the query field exists with an empty string. The `exists` query can be used to differentiate between the two cases.', 'threat.indicator.url.registered_domain': 'The highest registered url domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'threat.indicator.url.scheme': 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', + 'threat.indicator.url.scheme': + 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', 'threat.indicator.url.subdomain': 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', 'threat.indicator.url.top_level_domain': @@ -1996,11 +2152,14 @@ export const ECSFIELDS = { 'threat.indicator.url.username': 'Username of the request.', 'threat.indicator.x509.alternative_names': 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'threat.indicator.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', 'threat.indicator.x509.issuer.country': 'List of country \\(C) codes', - 'threat.indicator.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', 'threat.indicator.x509.issuer.locality': 'List of locality names (L)', - 'threat.indicator.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', 'threat.indicator.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', 'threat.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', @@ -2009,7 +2168,8 @@ export const ECSFIELDS = { 'threat.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', 'threat.indicator.x509.public_key_curve': 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.indicator.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.indicator.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', 'threat.indicator.x509.public_key_size': 'The size of the public key space in bits.', 'threat.indicator.x509.serial_number': 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', @@ -2017,11 +2177,14 @@ export const ECSFIELDS = { 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', 'threat.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', 'threat.indicator.x509.subject.country': 'List of country \\(C) code', - 'threat.indicator.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', 'threat.indicator.x509.subject.locality': 'List of locality names (L)', 'threat.indicator.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.indicator.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'threat.indicator.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.indicator.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', 'threat.indicator.x509.version_number': 'Version of x509 format.', 'threat.software.alias': 'The alias(es) of the software for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae associated software description.', @@ -2064,29 +2227,39 @@ export const ECSFIELDS = { 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', 'tls.client.hash.sha256': 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', - 'tls.client.issuer': 'Distinguished name of subject of the issuer of the x.509 certificate presented by the client.', - 'tls.client.ja3': 'A hash that identifies clients based on how they perform an SSL/TLS handshake.', - 'tls.client.not_after': 'Date/Time indicating when client certificate is no longer considered valid.', - 'tls.client.not_before': 'Date/Time indicating when client certificate is first considered valid.', + 'tls.client.issuer': + 'Distinguished name of subject of the issuer of the x.509 certificate presented by the client.', + 'tls.client.ja3': + 'A hash that identifies clients based on how they perform an SSL/TLS handshake.', + 'tls.client.not_after': + 'Date/Time indicating when client certificate is no longer considered valid.', + 'tls.client.not_before': + 'Date/Time indicating when client certificate is first considered valid.', 'tls.client.server_name': 'Also called an SNI, this tells the server which hostname to which the client is attempting to connect to. When this value is available, it should get copied to `destination.domain`.', - 'tls.client.subject': 'Distinguished name of subject of the x.509 certificate presented by the client.', + 'tls.client.subject': + 'Distinguished name of subject of the x.509 certificate presented by the client.', 'tls.client.supported_ciphers': 'Array of ciphers offered by the client during the client hello.', 'tls.client.x509.alternative_names': 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'tls.client.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'tls.client.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', 'tls.client.x509.issuer.country': 'List of country \\(C) codes', - 'tls.client.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'tls.client.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', 'tls.client.x509.issuer.locality': 'List of locality names (L)', - 'tls.client.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', - 'tls.client.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', + 'tls.client.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'tls.client.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', 'tls.client.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', 'tls.client.x509.not_after': 'Time at which the certificate is no longer considered valid.', 'tls.client.x509.not_before': 'Time at which the certificate is first considered valid.', 'tls.client.x509.public_key_algorithm': 'Algorithm used to generate the public key.', 'tls.client.x509.public_key_curve': 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'tls.client.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.client.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', 'tls.client.x509.public_key_size': 'The size of the public key space in bits.', 'tls.client.x509.serial_number': 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', @@ -2094,7 +2267,8 @@ export const ECSFIELDS = { 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', 'tls.client.x509.subject.common_name': 'List of common names (CN) of subject.', 'tls.client.x509.subject.country': 'List of country \\(C) code', - 'tls.client.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'tls.client.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', 'tls.client.x509.subject.locality': 'List of locality names (L)', 'tls.client.x509.subject.organization': 'List of organizations (O) of subject.', 'tls.client.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', @@ -2105,7 +2279,8 @@ export const ECSFIELDS = { 'Boolean flag indicating if the TLS negotiation was successful and transitioned to an encrypted tunnel.', 'tls.next_protocol': 'String indicating the protocol being tunneled. Per the values in the IANA registry (https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids), this string should be lower case.', - 'tls.resumed': 'Boolean flag indicating if this TLS connection was resumed from an existing TLS negotiation.', + 'tls.resumed': + 'Boolean flag indicating if this TLS connection was resumed from an existing TLS negotiation.', 'tls.server.certificate': 'PEM-encoded stand-alone certificate offered by the server. This is usually mutually-exclusive of `server.certificate_chain` since this value also exists in that list.', 'tls.server.certificate_chain': @@ -2117,25 +2292,33 @@ export const ECSFIELDS = { 'tls.server.hash.sha256': 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the server. For consistency with other hash values, this value should be formatted as an uppercase hash.', 'tls.server.issuer': 'Subject of the issuer of the x.509 certificate presented by the server.', - 'tls.server.ja3s': 'A hash that identifies servers based on how they perform an SSL/TLS handshake.', - 'tls.server.not_after': 'Timestamp indicating when server certificate is no longer considered valid.', - 'tls.server.not_before': 'Timestamp indicating when server certificate is first considered valid.', + 'tls.server.ja3s': + 'A hash that identifies servers based on how they perform an SSL/TLS handshake.', + 'tls.server.not_after': + 'Timestamp indicating when server certificate is no longer considered valid.', + 'tls.server.not_before': + 'Timestamp indicating when server certificate is first considered valid.', 'tls.server.subject': 'Subject of the x.509 certificate presented by the server.', 'tls.server.x509.alternative_names': 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'tls.server.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'tls.server.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', 'tls.server.x509.issuer.country': 'List of country \\(C) codes', - 'tls.server.x509.issuer.distinguished_name': 'Distinguished name (DN) of issuing certificate authority.', + 'tls.server.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', 'tls.server.x509.issuer.locality': 'List of locality names (L)', - 'tls.server.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', - 'tls.server.x509.issuer.organizational_unit': 'List of organizational units (OU) of issuing certificate authority.', + 'tls.server.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'tls.server.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', 'tls.server.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', 'tls.server.x509.not_after': 'Time at which the certificate is no longer considered valid.', 'tls.server.x509.not_before': 'Time at which the certificate is first considered valid.', 'tls.server.x509.public_key_algorithm': 'Algorithm used to generate the public key.', 'tls.server.x509.public_key_curve': 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'tls.server.x509.public_key_exponent': 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.server.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', 'tls.server.x509.public_key_size': 'The size of the public key space in bits.', 'tls.server.x509.serial_number': 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', @@ -2143,7 +2326,8 @@ export const ECSFIELDS = { 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', 'tls.server.x509.subject.common_name': 'List of common names (CN) of subject.', 'tls.server.x509.subject.country': 'List of country \\(C) code', - 'tls.server.x509.subject.distinguished_name': 'Distinguished name (DN) of the certificate subject entity.', + 'tls.server.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', 'tls.server.x509.subject.locality': 'List of locality names (L)', 'tls.server.x509.subject.organization': 'List of organizations (O) of subject.', 'tls.server.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', @@ -2159,7 +2343,8 @@ export const ECSFIELDS = { 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', 'url.extension': 'The field contains the file extension from the original request url, excluding the leading dot.\nThe file extension is only set if it exists, as not every url has a file extension.\nThe leading period must not be included. For example, the value must be "png", not ".png".\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', - 'url.fragment': 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', + 'url.fragment': + 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', 'url.full': 'If full URLs are important to your use case, they should be stored in `url.full`, whether this field is reconstructed or present in the event source.', 'url.original': diff --git a/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx b/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx index 5c0681be954c3..c347cce9698f8 100644 --- a/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx +++ b/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx @@ -1,11 +1,16 @@ -enum HeaderTitles { - '/'= 'Base Page', - '/integration_builder/ecs_mapping'= 'ECS Mapping', - '/integration_builder/categorization'= 'Add Categorization', - '/integration_builder/related'= 'Add Related Fields', - '/integration_builder/results'= 'View Results', - '/integration_builder/build'= 'Build & Deploy', - '/agent_analyzer'= 'Agent Analyzer', - } +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -export default HeaderTitles; \ No newline at end of file +export enum HeaderTitles { + '/' = 'Base Page', + '/integration_builder/ecs_mapping' = 'ECS Mapping', + '/integration_builder/categorization' = 'Add Categorization', + '/integration_builder/related' = 'Add Related Fields', + '/integration_builder/results' = 'View Results', + '/integration_builder/build' = 'Build & Deploy', + '/agent_analyzer' = 'Agent Analyzer', +} diff --git a/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx b/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx index c5f39e8b56e9b..a10563fb8167b 100644 --- a/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx +++ b/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx @@ -1,4 +1,11 @@ -enum RoutePaths { +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export enum RoutePaths { BASE_PATH = '/', ECS_MAPPING_PATH = '/integration_builder/ecs_mapping', CATEGORIZATION_PATH = '/integration_builder/categorization', @@ -7,5 +14,3 @@ enum RoutePaths { INTEGRATION_BUILDER_BUILD_PATH = '/integration_builder/build', AGENT_ANALYZER_PATH = '/agent_analyzer', } - -export default RoutePaths; diff --git a/x-pack/plugins/integration_assistant/public/index.ts b/x-pack/plugins/integration_assistant/public/index.ts index 128c4ff1b1546..1ef32ec38169b 100644 --- a/x-pack/plugins/integration_assistant/public/index.ts +++ b/x-pack/plugins/integration_assistant/public/index.ts @@ -7,8 +7,6 @@ import { IntegrationAssistantPlugin } from './plugin'; -// This exports static code and TypeScript types, -// as well as, Kibana Platform `plugin()` initializer. export function plugin() { return new IntegrationAssistantPlugin(); } diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx index 31dccf6cd8fbc..0be8e1f3e5299 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx @@ -1,12 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import React from 'react'; import { EuiPageTemplate } from '@elastic/eui'; -import BuildIntegrationButtons from '@components/BuildIntegration/BuildIntegrationButtons'; +import BuildIntegrationButtons from '../../components/BuildIntegration/BuildIntegrationButtons'; -const BuildIntegration = () => { +export const BuildIntegration = () => { return ( - - - + + + ); }; - -export default BuildIntegration; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx index 9fd744d894b98..4b30014937377 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx @@ -1,12 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; +import { useGlobalStore } from '../../stores/useGlobalStore'; -import EmptyPrompt from '@components/EmptyPrompt/EmptyPrompt'; -import CategorizationButtons from '@Components/Categorization/CategorizationButtons'; -import PipelineResults from '@Components/IntegrationResults/PipelineResults'; -import RoutePaths from '@Constants/routePaths'; +import EmptyPrompt from '../../components/EmptyPrompt/EmptyPrompt'; +import CategorizationButtons from '../../components/Categorization/CategorizationButtons'; +import PipelineResults from '../../components/IntegrationResults/PipelineResults'; +import RoutePaths from '../../constants/routePaths'; -const CategorizationPage = () => { +export const CategorizationPage = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); if (Object.keys(ingestPipeline).length <= 0) { @@ -19,12 +27,10 @@ const CategorizationPage = () => { ); } return ( - - - - - + + + + + ); }; - -export default CategorizationPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx index 079bd1b28b145..09ec1154e6a2e 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx @@ -1,27 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import EcsFormStats from '@Components/Ecs/EcsFormStats'; -import EcsButtons from '@components/Ecs/EcsButtons'; -import EcsForm from '@Components/Ecs/EcsForm'; -import EcsTable from '@Components/Ecs/EcsTable'; - +import EcsFormStats from '../../components/Ecs/EcsFormStats'; +import EcsButtons from '../../components/Ecs/EcsButtons'; +import EcsForm from '../../components/Ecs/EcsForm'; +import EcsTable from '../../components/Ecs/EcsTable'; -const EcsMapperPage = () => { +export const EcsMapperPage = () => { const ecsMappingTableState = useGlobalStore((state) => state.ecsMappingTableState); return ( - - {ecsMappingTableState.length <= 0 && } - {ecsMappingTableState.length >= 1 && ( - <> - - - - - - - )} - + + {ecsMappingTableState.length <= 0 && } + {ecsMappingTableState.length >= 1 && ( + <> + + + + + + + )} + ); }; - -export default EcsMapperPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx index f2b20e2cf3b7f..abae0836b12e5 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx @@ -1,12 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import EmptyPrompt from '@components/EmptyPrompt/EmptyPrompt'; -import RelatedButtons from '@Components/Related/RelatedButtons'; -import PipelineResults from '@Components/IntegrationResults/PipelineResults'; -import RoutePaths from '@Constants/routePaths'; +import EmptyPrompt from '../../components/EmptyPrompt/EmptyPrompt'; +import RelatedButtons from '../../components/Related/RelatedButtons'; +import PipelineResults from '../../components/IntegrationResults/PipelineResults'; +import RoutePaths from '../../constants/routePaths'; -const RelatedPage = () => { +export const RelatedPage = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); if (Object.keys(ingestPipeline).length <= 0) { @@ -19,12 +27,10 @@ const RelatedPage = () => { ); } return ( - - - - - + + + + + ); }; - -export default RelatedPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx index 28a925b8b0198..c20a1d4b1ef84 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx @@ -1,12 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import DocResults from '@Components/IntegrationResults/DocsResults'; -import PipelineResults from '@Components/IntegrationResults/PipelineResults'; -import EmptyPrompt from '@components/EmptyPrompt/EmptyPrompt'; -import FinalResultsButtons from '@components/ViewResults/ViewResultsButtons'; -import RoutePaths from '@Constants/routePaths'; +import DocResults from '../../components/IntegrationResults/DocsResults'; +import PipelineResults from '../../components/IntegrationResults/PipelineResults'; +import EmptyPrompt from '../../components/EmptyPrompt/EmptyPrompt'; +import FinalResultsButtons from '../../components/ViewResults/ViewResultsButtons'; +import RoutePaths from '../../constants/routePaths'; -const ViewResultsPage = () => { +export const ViewResultsPage = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); const docs = useGlobalStore((state) => state.docs); @@ -20,14 +28,12 @@ const ViewResultsPage = () => { ); } return ( - - - - - - - + + + + + + + ); }; - -export default ViewResultsPage; diff --git a/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx b/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx index 5fda4e9a684eb..fb7ab85aa0f19 100644 --- a/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx @@ -1,11 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React from 'react'; import { EuiPageSection } from '@elastic/eui'; -const MainPage = () => { +export const MainPage = () => { return (

Mainpage Test

); }; - -export default MainPage; diff --git a/x-pack/plugins/integration_assistant/public/plugin.tsx b/x-pack/plugins/integration_assistant/public/plugin.tsx index 2a8e072399eac..00538859d5c26 100644 --- a/x-pack/plugins/integration_assistant/public/plugin.tsx +++ b/x-pack/plugins/integration_assistant/public/plugin.tsx @@ -6,8 +6,9 @@ */ import { CoreStart, Plugin, CoreSetup, AppMountParameters } from '@kbn/core/public'; +import { i18n } from '@kbn/i18n'; import { getServices } from './services'; - +import { PLUGIN_ID, INTEGRATION_ASSISTANT_APP_ROUTE } from '../common'; import { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; export class IntegrationAssistantPlugin @@ -15,13 +16,20 @@ export class IntegrationAssistantPlugin { public setup(core: CoreSetup): IntegrationAssistantPluginSetup { core.application.register({ - id: 'integrationAssistant', - title: 'Integration Assistant', + id: PLUGIN_ID, + euiIconType: 'logoElastic', + title: i18n.translate('xpack.fleet.integrationAssistantAppTitle', { + defaultMessage: 'Integration Assistant', + }), + appRoute: INTEGRATION_ASSISTANT_APP_ROUTE, async mount(params: AppMountParameters) { const [coreStart] = await core.getStartServices(); const startServices = getServices(coreStart); const { renderApp } = await import('./app'); - return renderApp(startServices, params.element); + const unmount = renderApp(startServices, params.element); + return () => { + unmount(); + }; }, }); return { diff --git a/x-pack/plugins/integration_assistant/public/utils/samples.tsx b/x-pack/plugins/integration_assistant/public/utils/samples.tsx index 5b65be632b924..f56270da84af7 100644 --- a/x-pack/plugins/integration_assistant/public/utils/samples.tsx +++ b/x-pack/plugins/integration_assistant/public/utils/samples.tsx @@ -1,5 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import { htmlIdGenerator } from '@elastic/eui'; -import { ECSFIELDS } from '@Constants/ecsFields'; +import { ECSFIELDS } from '../constants/ecsFields'; +import { EcsMappingTableItem } from '../types'; function isEmptyValue(value): boolean { return value === null || value === ''; @@ -46,7 +54,7 @@ export function traverseAndMatchFields( mergedObject: object, packageName: string, dataStreamName: string, - path: string[] = [], + path: string[] = [] ): EcsMappingTableItem[] { const makeId = htmlIdGenerator(); let matches: EcsMappingTableItem[] = []; @@ -54,7 +62,7 @@ export function traverseAndMatchFields( Object.entries(mapping).forEach(([key, value]) => { if (typeof value === 'object' && value !== null) { matches = matches.concat( - traverseAndMatchFields(value, mergedObject, packageName, dataStreamName, path.concat(key)), + traverseAndMatchFields(value, mergedObject, packageName, dataStreamName, path.concat(key)) ); } else { const matchKey = value; @@ -65,11 +73,11 @@ export function traverseAndMatchFields( matches.push({ sourceField: fullPath, - destinationField: destinationField, + destinationField, isEcs: isECS, description: isECS ? ECSFIELDS[matchKey] : '', id: makeId(), - exampleValue: exampleValue, + exampleValue, }); } }); From 84c58d4bba24914b3efe401ea392cf8f006c32da Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 24 May 2024 15:00:06 +0200 Subject: [PATCH 05/62] fixing import paths --- .../BuildIntegration/BuildIntegrationButtons.tsx | 2 +- .../components/Buttons/ContinueButton.test.tsx | 2 +- .../public/components/Buttons/GoBackButton.tsx | 2 +- .../Categorization/CategorizationButtons.tsx | 2 +- .../public/components/Ecs/EcsButtons.tsx | 2 +- .../public/components/EmptyPrompt/EmptyPrompt.tsx | 2 +- .../IntegrationBuilderSteps.tsx | 2 +- .../public/components/Related/RelatedButtons.tsx | 2 +- .../components/ViewResults/ViewResultsButtons.tsx | 4 ++-- .../IntegrationBuilder/BuildIntegrationPage.tsx | 2 +- .../pages/IntegrationBuilder/CategorizationPage.tsx | 8 ++++---- .../pages/IntegrationBuilder/EcsMapperPage.tsx | 8 ++++---- .../public/pages/IntegrationBuilder/RelatedPage.tsx | 8 ++++---- .../pages/IntegrationBuilder/ViewResultsPage.tsx | 12 ++++++------ 14 files changed, 29 insertions(+), 29 deletions(-) diff --git a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx index 485892f4da71e..81bad651d3cfb 100644 --- a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx @@ -10,7 +10,7 @@ import { EuiFlexGroup } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { buildIntegration, installIntegration } from '@api/services/integrationBuilderService'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; import ActionButton from '../Buttons/ActionButton'; import GoBackButton from '../Buttons/GoBackButton'; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx index 79ea1bcad2d20..4573953361aba 100644 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx @@ -11,7 +11,7 @@ import { act, render, screen } from '@testing-library/react'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { userEvent } from '@testing-library/user-event'; import ContinueButton from './ContinueButton'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; import { BrowserRouter as Router } from 'react-router-dom'; describe('ContinueButton Tests', () => { diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx index 23760f8ac646a..52ca9503eba25 100644 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx @@ -8,7 +8,7 @@ import React from 'react'; import { EuiButton } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; import { useNavigate } from 'react-router-dom'; interface GoBackButtonProps { diff --git a/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx index ff41e488651f5..41276e60ef2cc 100644 --- a/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx @@ -9,7 +9,7 @@ import React from 'react'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { EuiFlexGroup } from '@elastic/eui'; import { getCategorization } from '@Api/services/categorizationService'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; import { ContinueButton } from '../Buttons/ContinueButton'; import { ActionButton } from '../Buttons/ActionButton'; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx index 0dd42e9cb75a2..ef929d08fb75a 100644 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx @@ -9,7 +9,7 @@ import React from 'react'; import { EuiFlexGroup } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { getEcsMapping, formatEcsResponse } from '@Api/services/ecsMappingService'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; import { ContinueButton } from '../Buttons/ContinueButton'; import { ActionButton } from '../Buttons/ActionButton'; import { ResetButton } from '../Buttons/ResetButton'; diff --git a/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx b/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx index 9f0a767890937..4bc784eb31f76 100644 --- a/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx +++ b/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx @@ -8,7 +8,7 @@ import React from 'react'; import { EuiPageTemplate } from '@elastic/eui'; import { GoBackButton } from '../Buttons/GoBackButton'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; interface EmptyPromptProps { title: string; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx index 6ea2b0fd2bfd8..fc3547b8280bd 100644 --- a/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx +++ b/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx @@ -10,7 +10,7 @@ import { EuiStepsHorizontal, EuiStepsHorizontalProps } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { useNavigate } from 'react-router-dom'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; export const IntegrationBuilderSteps = () => { const step1 = useGlobalStore((state) => state.integrationBuilderStep1); diff --git a/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx index d925f890c2e38..59b2229e30b5d 100644 --- a/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx @@ -9,7 +9,7 @@ import React from 'react'; import { useGlobalStore } from '@Stores/useGlobalStore'; import { EuiFlexGroup } from '@elastic/eui'; import { getCategorization } from '@Api/services/categorizationService'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; import { ContinueButton } from '../Buttons/ContinueButton'; import { ActionButton } from '../Buttons/ActionButton'; diff --git a/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx index 5f57af2be69c2..e5db8e43bda15 100644 --- a/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx @@ -9,9 +9,9 @@ import React from 'react'; import { EuiFlexGroup } from '@elastic/eui'; import { ContinueButton } from '../Buttons/ContinueButton'; import { GoBackButton } from '../Buttons/GoBackButton'; -import RoutePaths from '../../constants/routePaths'; +import { RoutePaths } from '../../constants/routePaths'; -export const ViewResults = () => { +export const ViewResultsButtons = () => { return ( { return ( diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx index 4b30014937377..fb03508574081 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx @@ -9,10 +9,10 @@ import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; import { useGlobalStore } from '../../stores/useGlobalStore'; -import EmptyPrompt from '../../components/EmptyPrompt/EmptyPrompt'; -import CategorizationButtons from '../../components/Categorization/CategorizationButtons'; -import PipelineResults from '../../components/IntegrationResults/PipelineResults'; -import RoutePaths from '../../constants/routePaths'; +import { EmptyPrompt } from '../../components/EmptyPrompt/EmptyPrompt'; +import { CategorizationButtons } from '../../components/Categorization/CategorizationButtons'; +import { PipelineResults } from '../../components/IntegrationResults/PipelineResults'; +import { RoutePaths } from '../../constants/routePaths'; export const CategorizationPage = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx index 09ec1154e6a2e..686f4067f5d62 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx @@ -8,10 +8,10 @@ import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import EcsFormStats from '../../components/Ecs/EcsFormStats'; -import EcsButtons from '../../components/Ecs/EcsButtons'; -import EcsForm from '../../components/Ecs/EcsForm'; -import EcsTable from '../../components/Ecs/EcsTable'; +import { EcsFormStats } from '../../components/Ecs/EcsFormStats'; +import { EcsButtons } from '../../components/Ecs/EcsButtons'; +import { EcsForm } from '../../components/Ecs/EcsForm'; +import { EcsTable } from '../../components/Ecs/EcsTable'; export const EcsMapperPage = () => { const ecsMappingTableState = useGlobalStore((state) => state.ecsMappingTableState); diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx index abae0836b12e5..dc468e2135135 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx @@ -9,10 +9,10 @@ import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import EmptyPrompt from '../../components/EmptyPrompt/EmptyPrompt'; -import RelatedButtons from '../../components/Related/RelatedButtons'; -import PipelineResults from '../../components/IntegrationResults/PipelineResults'; -import RoutePaths from '../../constants/routePaths'; +import { EmptyPrompt } from '../../components/EmptyPrompt/EmptyPrompt'; +import { RelatedButtons } from '../../components/Related/RelatedButtons'; +import { PipelineResults } from '../../components/IntegrationResults/PipelineResults'; +import { RoutePaths } from '../../constants/routePaths'; export const RelatedPage = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx index c20a1d4b1ef84..55c6f258598fe 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx @@ -8,11 +8,11 @@ import React from 'react'; import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import DocResults from '../../components/IntegrationResults/DocsResults'; -import PipelineResults from '../../components/IntegrationResults/PipelineResults'; -import EmptyPrompt from '../../components/EmptyPrompt/EmptyPrompt'; -import FinalResultsButtons from '../../components/ViewResults/ViewResultsButtons'; -import RoutePaths from '../../constants/routePaths'; +import { DocResults } from '../../components/IntegrationResults/DocsResults'; +import { PipelineResults } from '../../components/IntegrationResults/PipelineResults'; +import { EmptyPrompt } from '../../components/EmptyPrompt/EmptyPrompt'; +import { ViewResultsButtons } from '../../components/ViewResults/ViewResultsButtons'; +import { RoutePaths } from '../../constants/routePaths'; export const ViewResultsPage = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); @@ -33,7 +33,7 @@ export const ViewResultsPage = () => { - + ); }; From 4e6a6bbcc93f2f4267574c151b63620173b26f69 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 24 May 2024 16:26:15 +0200 Subject: [PATCH 06/62] adding license headers and fixing last type paths --- .../integration_assistant/common/constants.ts | 19 ++++ .../integration_assistant/common/index.ts | 88 +++++-------------- .../integration_assistant/common/types.ts | 71 +++++++++++++++ .../BuildIntegrationButtons.tsx | 4 +- .../Headers/IntegrationBuilderHeader.tsx | 6 +- .../public/constants/headerTitles.tsx | 16 ---- .../public/constants/routePaths.tsx | 16 ---- .../IntegrationBuilder/CategorizationPage.tsx | 4 +- .../graphs/categorization/categorization.ts | 8 +- .../server/graphs/categorization/constants.ts | 6 ++ .../server/graphs/categorization/errors.ts | 8 +- .../server/graphs/categorization/graph.ts | 6 ++ .../server/graphs/categorization/index.ts | 6 ++ .../server/graphs/categorization/invalid.ts | 8 +- .../server/graphs/categorization/prompts.ts | 6 ++ .../server/graphs/categorization/review.ts | 8 +- .../server/graphs/categorization/validate.ts | 6 ++ .../server/graphs/ecs/constants.ts | 7 ++ .../server/graphs/ecs/duplicates.ts | 6 ++ .../server/graphs/ecs/graph.ts | 6 ++ .../server/graphs/ecs/index.ts | 6 ++ .../server/graphs/ecs/invalid.ts | 6 ++ .../server/graphs/ecs/mapping.ts | 6 ++ .../server/graphs/ecs/missing.ts | 10 ++- .../server/graphs/ecs/pipeline.ts | 14 ++- .../server/graphs/ecs/prompts.ts | 6 ++ .../server/graphs/ecs/validate.ts | 6 ++ .../server/graphs/related/constants.ts | 6 ++ .../server/graphs/related/errors.ts | 8 +- .../server/graphs/related/graph.ts | 6 ++ .../server/graphs/related/index.ts | 6 ++ .../server/graphs/related/prompts.ts | 6 ++ .../server/graphs/related/related.ts | 8 +- .../server/graphs/related/review.ts | 8 +- .../server/providers/bedrock.ts | 6 ++ .../server/routes/index.ts | 5 +- .../integration_assistant/server/types.ts | 59 ------------- .../server/util/pipeline.ts | 6 ++ .../server/util/samples.ts | 6 ++ .../integration_assistant/server/util/util.ts | 6 ++ 40 files changed, 313 insertions(+), 182 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/common/constants.ts create mode 100644 x-pack/plugins/integration_assistant/common/types.ts delete mode 100644 x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/constants/routePaths.tsx diff --git a/x-pack/plugins/integration_assistant/common/constants.ts b/x-pack/plugins/integration_assistant/common/constants.ts new file mode 100644 index 0000000000000..9e734ba291a91 --- /dev/null +++ b/x-pack/plugins/integration_assistant/common/constants.ts @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// Plugin information +export const PLUGIN_ID = 'integrationAssistant'; + +// Public App Routes +export const INTEGRATION_ASSISTANT_APP_ROUTE = '/app/integration_assistant'; + +// Server API Routes +export const INTEGRATION_ASSISTANT_BASE_PATH = '/api/integration_assistant'; +export const ECS_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/ecs`; +export const CATEGORZATION_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/categorization`; +export const RELATED_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/related`; +export const INTEGRATION_BUILDER_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/build`; diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts index 6047a20b9951a..0c9f6636928ac 100644 --- a/x-pack/plugins/integration_assistant/common/index.ts +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -5,71 +5,23 @@ * 2.0. */ -export const INTEGRATION_ASSISTANT_BASE_PATH = '/api/integration_assistant'; - -export const ECS_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/ecs`; - -export const CATEGORZATION_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/categorization`; - -export const RELATED_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/related`; - -export const INTEGRATION_BUILDER_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/build`; - -export interface BuildIntegrationAPIRequest { - packageName: string; - packageTitle: string; - packageVersion: string; - dataStreamName: string; - inputTypes: string[]; - formSamples: string[]; - ingestPipeline: object; - docs: object[]; -} - -export interface EcsMappingAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; -} - -export interface EcsMappingNewPipelineAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - mapping: object; -} - -export interface CategorizationAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; -} - -export interface RelatedAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; -} - -export interface CategorizationApiResponse { - results: { - pipeline: object; - docs: object[]; - }; -} - -export interface RelatedApiResponse { - results: { - pipeline: object; - docs: object[]; - }; -} - -export interface EcsMappingApiResponse { - results: { - mapping: object; - current_pipeline: object; - }; -} +export type { + Pipeline, + BuildIntegrationAPIRequest, + EcsMappingAPIRequest, + EcsMappingNewPipelineAPIRequest, + CategorizationAPIRequest, + RelatedAPIRequest, +} from './types'; + +export type { CategorizationApiResponse, RelatedApiResponse, EcsMappingApiResponse } from './types'; + +export { + PLUGIN_ID, + INTEGRATION_ASSISTANT_APP_ROUTE, + ECS_GRAPH_PATH, + CATEGORZATION_GRAPH_PATH, + RELATED_GRAPH_PATH, + INTEGRATION_BUILDER_PATH, + INTEGRATION_ASSISTANT_BASE_PATH, +} from './constants'; diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts new file mode 100644 index 0000000000000..6b97c7e7df391 --- /dev/null +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export interface Pipeline { + processors: any[]; +} + +// Server Request Schemas +export interface BuildIntegrationAPIRequest { + packageName: string; + packageTitle: string; + packageVersion: string; + dataStreamName: string; + inputTypes: string[]; + formSamples: string[]; + ingestPipeline: object; + docs: object[]; +} + +export interface EcsMappingAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; +} + +export interface EcsMappingNewPipelineAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + mapping: object; +} + +export interface CategorizationAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} + +export interface RelatedAPIRequest { + packageName: string; + dataStreamName: string; + formSamples: string[]; + ingestPipeline: object; +} + +// Server Response Schemas +export interface CategorizationApiResponse { + results: { + pipeline: object; + docs: object[]; + }; +} + +export interface RelatedApiResponse { + results: { + pipeline: object; + docs: object[]; + }; +} + +export interface EcsMappingApiResponse { + results: { + mapping: object; + current_pipeline: object; + }; +} diff --git a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx index 81bad651d3cfb..3f768659a7c29 100644 --- a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx +++ b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx @@ -11,8 +11,8 @@ import { useGlobalStore } from '@Stores/useGlobalStore'; import { buildIntegration, installIntegration } from '@api/services/integrationBuilderService'; import { RoutePaths } from '../../constants/routePaths'; -import ActionButton from '../Buttons/ActionButton'; -import GoBackButton from '../Buttons/GoBackButton'; +import { ActionButton } from '../Buttons/ActionButton'; +import { GoBackButton } from '../Buttons/GoBackButton'; export const BuildIntegrationButtons = () => { const integrationBuilderZipFile = useGlobalStore((state) => state.integrationBuilderZipFile); diff --git a/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx b/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx index d2660e2d4010c..c0555e96263af 100644 --- a/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx +++ b/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx @@ -8,10 +8,10 @@ import React from 'react'; import { EuiPageTemplate } from '@elastic/eui'; import { useLocation } from 'react-router-dom'; -import IntegrationBuilderSteps from '../IntegrationBuilderSteps/IntegrationBuilderSteps'; -import ProgressPortal from '../Portal/ProgressPortal'; +import { IntegrationBuilderSteps } from '../IntegrationBuilderSteps/IntegrationBuilderSteps'; +import { ProgressPortal } from '../Portal/ProgressPortal'; import { useGlobalStore } from '@Stores/useGlobalStore'; -import HeaderTitles from '../../constants/headerTitles'; +import { HeaderTitles } from '../../constants/headerTitles'; export const IntegrationBuilderHeader = () => { const location = useLocation(); diff --git a/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx b/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx deleted file mode 100644 index c347cce9698f8..0000000000000 --- a/x-pack/plugins/integration_assistant/public/constants/headerTitles.tsx +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export enum HeaderTitles { - '/' = 'Base Page', - '/integration_builder/ecs_mapping' = 'ECS Mapping', - '/integration_builder/categorization' = 'Add Categorization', - '/integration_builder/related' = 'Add Related Fields', - '/integration_builder/results' = 'View Results', - '/integration_builder/build' = 'Build & Deploy', - '/agent_analyzer' = 'Agent Analyzer', -} diff --git a/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx b/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx deleted file mode 100644 index a10563fb8167b..0000000000000 --- a/x-pack/plugins/integration_assistant/public/constants/routePaths.tsx +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export enum RoutePaths { - BASE_PATH = '/', - ECS_MAPPING_PATH = '/integration_builder/ecs_mapping', - CATEGORIZATION_PATH = '/integration_builder/categorization', - RELATED_PATH = '/integration_builder/related', - INTEGRATION_BUILDER_RESULTS_PATH = '/integration_builder/results', - INTEGRATION_BUILDER_BUILD_PATH = '/integration_builder/build', - AGENT_ANALYZER_PATH = '/agent_analyzer', -} diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx index fb03508574081..e8a5c0b3c8adc 100644 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx +++ b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx @@ -12,7 +12,7 @@ import { useGlobalStore } from '../../stores/useGlobalStore'; import { EmptyPrompt } from '../../components/EmptyPrompt/EmptyPrompt'; import { CategorizationButtons } from '../../components/Categorization/CategorizationButtons'; import { PipelineResults } from '../../components/IntegrationResults/PipelineResults'; -import { RoutePaths } from '../../constants/routePaths'; +import { ECS_GRAPH_PATH } from '../../../common'; export const CategorizationPage = () => { const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); @@ -22,7 +22,7 @@ export const CategorizationPage = () => { ); } diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts index 327759b26bd90..ad6dcac358228 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; -import { Pipeline } from '../../../common/types'; +import { Pipeline } from '../../../common'; import { CATEGORIZATION_MAIN_PROMPT } from './prompts'; export async function handleCategorization(state: CategorizationState) { diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts index 2e679ab136dcb..1b6f268965ac5 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ export const ECS_CATEGORIES = { api: 'Covers events from API calls, including those from OS and network protocols. Allowed event.type combinations: access, admin, allowed, change, creation, deletion, denied, end, info, start, user', authentication: diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts index 225cc43edf884..ec840e644879b 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts @@ -1,9 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_ERROR_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; -import { Pipeline } from '../../../common/types'; +import { Pipeline } from '../../../common'; export async function handleErrors(state: CategorizationState) { const categorizationErrorPrompt = CATEGORIZATION_ERROR_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index b2e174f0f9ffa..74fe6ff9db4b1 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { CategorizationState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts index a92a00b93ea4d..03e8cb360de67 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/index.ts @@ -1 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ export { getCategorizationGraph } from './graph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts index 108e36af44d24..69c47474af7c2 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -1,10 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; -import { Pipeline } from '../../../common/types'; +import { Pipeline } from '../../../common'; export async function handleInvalidCategorization(state: CategorizationState) { const categorizationInvalidPrompt = CATEGORIZATION_VALIDATION_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts index af5066e8a0a7a..4b6649a663c11 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { ChatPromptTemplate } from '@langchain/core/prompts'; export const CATEGORIZATION_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts index cf317043be3c0..8f0860b1362bc 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts @@ -1,10 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_REVIEW_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; -import { Pipeline } from '../../../common/types'; +import { Pipeline } from '../../../common'; export async function handleReview(state: CategorizationState) { const categorizationReviewPrompt = CATEGORIZATION_REVIEW_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts index 9f79fb20c393f..5bd8eb1012dba 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { CategorizationState } from '../../types'; import { ECS_EVENT_TYPES_PER_CATEGORY, EVENT_CATEGORIES, EVENT_TYPES } from './constants'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts index 71c71344c0f37..1e37046f6c594 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + interface EcsFields { [key: string]: any; } diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts index 72108566d6952..dba8fffd7ff11 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { ECS_DUPLICATES_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 0025a5cd4b898..2d1d6764acc73 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { ECS_EXAMPLE_ANSWER, ECS_FIELDS } from './constants'; import { modifySamples, mergeSamples } from '../../util/samples'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts index 0f930a68699dd..91ea9fed3b3d3 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/index.ts @@ -1 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ export { getEcsGraph } from './graph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts index 249e64faf1c08..3e790106f3a7a 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { ECS_INVALID_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts index 9caf196f70717..4fc5685d09392 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { ECS_MAIN_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts index e0d01fb5acd47..39f5409941a18 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts @@ -1,10 +1,16 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { ECS_MISSING_PROMPT } from './prompts'; +import { ECS_MISSING_KEYS_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { EcsMappingState } from '../../types'; export async function handleMissingKeys(state: EcsMappingState) { - const ecsMissingPrompt = ECS_MISSING_PROMPT; + const ecsMissingPrompt = ECS_MISSING_KEYS_PROMPT; const model = getModel(); console.log('testing ecs missing'); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts index af96064daff0a..30389ac343f72 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -1,6 +1,12 @@ -import * as yaml from 'js-yaml'; +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import { load } from 'js-yaml'; import { Environment, FileSystemLoader } from 'nunjucks'; -import * as path from 'path'; +import { join } from 'path'; import { ECS_TYPES } from './constants'; import { EcsMappingState } from '../../types'; @@ -152,7 +158,7 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { // Retrieve all source field names from convert processors to populate single remove processor: const fieldsToRemove = processors.filter((p: any) => p.convert).map((p: any) => p.convert.field); - const templatesPath = path.join(__dirname, '../../templates'); + const templatesPath = join(__dirname, '../../templates'); const mappedValues = { processors, ecs_version: state.ecsVersion, @@ -170,7 +176,7 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { }); const template = env.getTemplate('pipeline.yml.njk'); const renderedTemplate = template.render(mappedValues); - const ingestPipeline = yaml.load(renderedTemplate) as IngestPipeline; + const ingestPipeline = load(renderedTemplate) as IngestPipeline; return ingestPipeline; } catch (error) { console.error('Error rendering template:', error); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts index 6cfa56382f64d..b4464670e3662 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { ChatPromptTemplate } from '@langchain/core/prompts'; export const ECS_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ [ diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts index 5640c27e58199..d23d4dcb2cd9d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { ECS_FULL } from './constants'; import { EcsMappingState } from '../../types'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/related/constants.ts index 3bf2ac0d418f6..61cc877667659 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/constants.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ export const RELATED_ECS_FIELDS = { 'related.hash': { type: 'keyword', diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts index 5d5c2d340febe..696eae4dee503 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts @@ -1,9 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { RELATED_ERROR_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; -import { Pipeline } from '../../../common/types'; +import { Pipeline } from '../../../common'; export async function handleErrors(state: RelatedState) { const relatedErrorPrompt = RELATED_ERROR_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 3a09ce6f3f046..c0380925264a2 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { RelatedState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/index.ts b/x-pack/plugins/integration_assistant/server/graphs/related/index.ts index eb205d904e83d..4294a4dd34ccd 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/index.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/index.ts @@ -1 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ export { getRelatedGraph } from './graph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts index 531e3203827c4..2a14b52907103 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/prompts.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { ChatPromptTemplate } from '@langchain/core/prompts'; export const RELATED_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts index 115aa6292c981..05281ca6bea9d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts @@ -1,9 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { RELATED_MAIN_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; -import { Pipeline } from '../../../common/types'; +import { Pipeline } from '../../../common'; export async function handleRelated(state: RelatedState) { const relatedMainPrompt = RELATED_MAIN_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts index 4de13d427a015..5bd3898cf18d2 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts @@ -1,9 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { RELATED_REVIEW_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; -import { Pipeline } from '../../../common/types'; +import { Pipeline } from '../../../common'; export async function handleReview(state: RelatedState) { const relatedReviewPrompt = RELATED_REVIEW_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts index beac9eb3863d1..f2becac6c9d39 100644 --- a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts +++ b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { BedrockChat } from '@langchain/community/chat_models/bedrock/web'; export function getModel(): BedrockChat { diff --git a/x-pack/plugins/integration_assistant/server/routes/index.ts b/x-pack/plugins/integration_assistant/server/routes/index.ts index 34d65dabb88c5..ebc597350969e 100644 --- a/x-pack/plugins/integration_assistant/server/routes/index.ts +++ b/x-pack/plugins/integration_assistant/server/routes/index.ts @@ -1,9 +1,8 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ export { registerRoutes } from './register_routes'; diff --git a/x-pack/plugins/integration_assistant/server/types.ts b/x-pack/plugins/integration_assistant/server/types.ts index c6f9959715383..990b3c121bb7f 100644 --- a/x-pack/plugins/integration_assistant/server/types.ts +++ b/x-pack/plugins/integration_assistant/server/types.ts @@ -10,65 +10,6 @@ export interface IntegrationAssistantPluginSetup {} // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IntegrationAssistantPluginStart {} -export interface BuildIntegrationAPIRequest { - packageName: string; - packageTitle: string; - packageVersion: string; - dataStreamName: string; - inputTypes: string[]; - formSamples: string[]; - ingestPipeline: object; - docs: object[]; -} - -export interface EcsMappingAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; -} - -export interface EcsMappingNewPipelineAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - mapping: object; -} - -export interface CategorizationAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; -} - -export interface RelatedAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; -} - -export interface CategorizationApiResponse { - results: { - pipeline: object; - docs: object[]; - }; -} - -export interface RelatedApiResponse { - results: { - pipeline: object; - docs: object[]; - }; -} - -export interface EcsMappingApiResponse { - results: { - mapping: object; - current_pipeline: object; - }; -} - export interface CategorizationState { rawSamples: string[]; samples: string[]; diff --git a/x-pack/plugins/integration_assistant/server/util/pipeline.ts b/x-pack/plugins/integration_assistant/server/util/pipeline.ts index c481d947a78db..89f075b839c25 100644 --- a/x-pack/plugins/integration_assistant/server/util/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/util/pipeline.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { deepCopy } from './util'; interface Pipeline { diff --git a/x-pack/plugins/integration_assistant/server/util/samples.ts b/x-pack/plugins/integration_assistant/server/util/samples.ts index c1eb41634203d..a01304c8402fc 100644 --- a/x-pack/plugins/integration_assistant/server/util/samples.ts +++ b/x-pack/plugins/integration_assistant/server/util/samples.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import { CategorizationState, EcsMappingState, RelatedState } from '../types'; interface SampleObj { diff --git a/x-pack/plugins/integration_assistant/server/util/util.ts b/x-pack/plugins/integration_assistant/server/util/util.ts index 2376783e514aa..cacc21c0391ba 100644 --- a/x-pack/plugins/integration_assistant/server/util/util.ts +++ b/x-pack/plugins/integration_assistant/server/util/util.ts @@ -1,3 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ export function deepCopy(obj: T): T { return JSON.parse(JSON.stringify(obj)); } From 1da8c88c1272037a89b2353738173bde4a47c3e6 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Sat, 25 May 2024 10:41:16 +0200 Subject: [PATCH 07/62] adding template files and more cleanup --- package.json | 11 +- .../integration_assistant/common/constants.ts | 2 +- .../integration_assistant/common/index.ts | 11 +- .../integration_assistant/common/types.ts | 37 +- .../integration_assistant/kibana.jsonc | 4 - .../integration_assistant/public/app.tsx | 61 +- .../BuildIntegrationButtons.tsx | 86 --- .../components/Buttons/ActionButton.tsx | 36 - .../Buttons/ContinueButton.test.tsx | 52 -- .../components/Buttons/ContinueButton.tsx | 54 -- .../components/Buttons/GoBackButton.tsx | 32 - .../public/components/Buttons/ResetButton.tsx | 36 - .../Categorization/CategorizationButtons.tsx | 70 -- .../public/components/Ecs/EcsButtons.tsx | 76 -- .../public/components/Ecs/EcsFileUpload.tsx | 73 -- .../public/components/Ecs/EcsForm.tsx | 150 ---- .../public/components/Ecs/EcsFormStats.tsx | 45 -- .../public/components/Ecs/EcsTable.tsx | 199 ----- .../components/EmptyPrompt/EmptyPrompt.tsx | 28 - .../Headers/IntegrationBuilderHeader.tsx | 27 - .../IntegrationBuilderSteps.tsx | 70 -- .../IntegrationResults/DocsResults.tsx | 38 - .../IntegrationResults/PipelineResults.tsx | 38 - .../components/Portal/ProgressPortal.tsx | 17 - .../components/Related/RelatedButtons.tsx | 68 -- .../ViewResults/ViewResultsButtons.tsx | 26 - .../integration_assistant/public/plugin.tsx | 17 +- .../integration_assistant/public/services.ts | 17 +- .../integration_assistant/public/types.ts | 11 +- .../server/graphs/ecs/graph.ts | 2 +- .../server/graphs/ecs/pipeline.ts | 2 +- .../server/routes/build_integration_routes.ts | 16 +- .../server/routes/categorization_routes.ts | 36 +- .../server/routes/ecs_routes.ts | 50 +- .../server/routes/related_routes.ts | 33 +- .../templates/agent/aws-cloudwatch.yml.njk | 76 ++ .../server/templates/agent/aws-s3.yml.njk | 130 ++++ .../agent/azure-blob-storage.yml.njk | 35 + .../templates/agent/azure-eventhub.yml.njk | 28 + .../templates/agent/cloudfoundry.yml.njk | 24 + .../server/templates/agent/common.yml.njk | 14 + .../server/templates/agent/filestream.yml.njk | 13 + .../server/templates/agent/gcp-pubsub.yml.njk | 27 + .../server/templates/agent/gcs.yml.njk | 35 + .../templates/agent/http_endpoint.yml.njk | 57 ++ .../server/templates/agent/journald.yml.njk | 44 ++ .../server/templates/agent/kafka.yml.njk | 100 +++ .../server/templates/agent/logfile.yml.njk | 13 + .../server/templates/agent/tcp.yml.njk | 19 + .../server/templates/agent/udp.yml.njk | 10 + .../server/templates/build/build.yml.njk | 3 + .../templates/changelog/changelog.yml.njk | 6 + .../templates/data_stream/fields/agent.yml | 44 ++ .../templates/data_stream/fields/beats.yml | 30 + .../templates/fields/base-fields.yml.njk | 20 + .../server/templates/img/logo.svg | 4 + .../manifest/aws-cloudwatch_manifest.yml.njk | 92 +++ .../manifest/aws-s3.yml_manifest.yml.njk | 177 +++++ .../azure-blob-storage_manifest.yml.njk | 74 ++ .../manifest/azure-eventhub_manifest.yml.njk | 74 ++ .../manifest/cloudfoundry_manifest.yml.njk | 101 +++ .../server/templates/manifest/common.yml.njk | 25 + .../templates/manifest/data_stream.yml.njk | 4 + .../manifest/filestream_manifest.yml.njk | 60 ++ .../manifest/gcp-pubsub_manifest.yml.njk | 64 ++ .../templates/manifest/gcs_manifest.yml.njk | 106 +++ .../manifest/http_endpoint_manifest.yml.njk | 133 ++++ .../manifest/journald_manifest.yml.njk | 77 ++ .../templates/manifest/kafka_manifest.yml.njk | 221 ++++++ .../manifest/logfile_manifest.yml.njk | 66 ++ .../server/templates/manifest/package.yml.njk | 28 + .../server/templates/manifest/ssl.yml.njk | 75 ++ .../templates/manifest/tcp_manifest.yml.njk | 57 ++ .../templates/manifest/udp_manifest.yml.njk | 39 + .../templates/{ => pipeline}/pipeline.yml.njk | 0 .../pipeline_tests/test-common-config.yml | 3 + .../server/templates/readme/README.md.njk | 24 + .../system_tests/docker-compose.yml.njk | 3 + .../system_tests/service-filestream.njk | 6 + .../templates/system_tests/service-gcs.njk | 7 + .../system_tests/service-logfile.njk | 6 + .../templates/system_tests/service-tcp.njk | 6 + .../templates/system_tests/service-udp.njk | 6 + .../test-filestream-config.yml.njk | 13 + .../system_tests/test-gcs-config.yml.njk | 10 + .../system_tests/test-logfile-config.yml.njk | 13 + .../system_tests/test-tcp-config.yml.njk | 7 + .../system_tests/test-udp-config.yml.njk | 7 + yarn.lock | 711 +++++++++++++++++- 89 files changed, 3208 insertions(+), 1350 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/common.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/build/build.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/changelog/changelog.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/data_stream/fields/agent.yml create mode 100644 x-pack/plugins/integration_assistant/server/templates/data_stream/fields/beats.yml create mode 100644 x-pack/plugins/integration_assistant/server/templates/fields/base-fields.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/img/logo.svg create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/aws-cloudwatch_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/aws-s3.yml_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/azure-blob-storage_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/azure-eventhub_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/cloudfoundry_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/common.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/data_stream.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/filestream_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/gcp-pubsub_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/gcs_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/http_endpoint_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/journald_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/kafka_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/logfile_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/package.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/ssl.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/tcp_manifest.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/manifest/udp_manifest.yml.njk rename x-pack/plugins/integration_assistant/server/templates/{ => pipeline}/pipeline.yml.njk (100%) create mode 100644 x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test-common-config.yml create mode 100644 x-pack/plugins/integration_assistant/server/templates/readme/README.md.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/docker-compose.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/service-filestream.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/service-gcs.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/service-logfile.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/service-tcp.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/service-udp.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/test-filestream-config.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/test-gcs-config.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/test-logfile-config.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/test-tcp-config.yml.njk create mode 100644 x-pack/plugins/integration_assistant/server/templates/system_tests/test-udp-config.yml.njk diff --git a/package.json b/package.json index 129523e5fc1ad..ebcc2d827c4dd 100644 --- a/package.json +++ b/package.json @@ -94,6 +94,9 @@ }, "dependencies": { "@appland/sql-parser": "^1.5.1", + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-sdk/credential-provider-node": "^3.583.0", + "@aws-sdk/types": "^3.577.0", "@babel/runtime": "^7.24.4", "@cfworker/json-schema": "^1.12.7", "@dnd-kit/core": "^6.1.0", @@ -935,10 +938,12 @@ "@paralleldrive/cuid2": "^2.2.2", "@reduxjs/toolkit": "1.9.7", "@slack/webhook": "^7.0.1", - "@smithy/eventstream-codec": "^2.0.12", + "@smithy/eventstream-codec": "^3.0.0", "@smithy/eventstream-serde-node": "^2.1.1", + "@smithy/protocol-http": "^4.0.0", + "@smithy/signature-v4": "^3.0.0", "@smithy/types": "^2.9.1", - "@smithy/util-utf8": "^2.0.0", + "@smithy/util-utf8": "^3.0.0", "@tanstack/react-query": "^4.29.12", "@tanstack/react-query-devtools": "^4.29.12", "@turf/along": "6.0.1", @@ -1733,4 +1738,4 @@ "zod-to-json-schema": "^3.22.3" }, "packageManager": "yarn@1.22.21" -} \ No newline at end of file +} diff --git a/x-pack/plugins/integration_assistant/common/constants.ts b/x-pack/plugins/integration_assistant/common/constants.ts index 9e734ba291a91..27b10b45cc0f3 100644 --- a/x-pack/plugins/integration_assistant/common/constants.ts +++ b/x-pack/plugins/integration_assistant/common/constants.ts @@ -14,6 +14,6 @@ export const INTEGRATION_ASSISTANT_APP_ROUTE = '/app/integration_assistant'; // Server API Routes export const INTEGRATION_ASSISTANT_BASE_PATH = '/api/integration_assistant'; export const ECS_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/ecs`; -export const CATEGORZATION_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/categorization`; +export const CATEGORIZATION_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/categorization`; export const RELATED_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/related`; export const INTEGRATION_BUILDER_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/build`; diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts index 0c9f6636928ac..34cd82738ca76 100644 --- a/x-pack/plugins/integration_assistant/common/index.ts +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -7,11 +7,10 @@ export type { Pipeline, - BuildIntegrationAPIRequest, - EcsMappingAPIRequest, - EcsMappingNewPipelineAPIRequest, - CategorizationAPIRequest, - RelatedAPIRequest, + BuildIntegrationApiRequest, + EcsMappingApiRequest, + CategorizationApiRequest, + RelatedApiRequest, } from './types'; export type { CategorizationApiResponse, RelatedApiResponse, EcsMappingApiResponse } from './types'; @@ -20,7 +19,7 @@ export { PLUGIN_ID, INTEGRATION_ASSISTANT_APP_ROUTE, ECS_GRAPH_PATH, - CATEGORZATION_GRAPH_PATH, + CATEGORIZATION_GRAPH_PATH, RELATED_GRAPH_PATH, INTEGRATION_BUILDER_PATH, INTEGRATION_ASSISTANT_BASE_PATH, diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index 6b97c7e7df391..d35bcecc23fd3 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -5,43 +5,54 @@ * 2.0. */ +export interface ProcessorObject { + on_failure?: ProcessorKey[]; + ignore_failure?: boolean; + if?: string; + tag?: string; + [key: string]: any; +} + +export interface ProcessorKey { + [processorName: string]: ProcessorObject; +} + export interface Pipeline { - processors: any[]; + name?: string; + description?: string; + version?: number; + processors: ProcessorKey[]; + on_failure?: ProcessorKey[]; } // Server Request Schemas -export interface BuildIntegrationAPIRequest { +export interface BuildIntegrationApiRequest { packageName: string; packageTitle: string; packageVersion: string; dataStreamName: string; + dataStreamTitle: string; inputTypes: string[]; formSamples: string[]; ingestPipeline: object; docs: object[]; } -export interface EcsMappingAPIRequest { - packageName: string; - dataStreamName: string; - formSamples: string[]; -} - -export interface EcsMappingNewPipelineAPIRequest { +export interface EcsMappingApiRequest { packageName: string; dataStreamName: string; formSamples: string[]; - mapping: object; + mapping?: object; } -export interface CategorizationAPIRequest { +export interface CategorizationApiRequest { packageName: string; dataStreamName: string; formSamples: string[]; ingestPipeline: object; } -export interface RelatedAPIRequest { +export interface RelatedApiRequest { packageName: string; dataStreamName: string; formSamples: string[]; @@ -66,6 +77,6 @@ export interface RelatedApiResponse { export interface EcsMappingApiResponse { results: { mapping: object; - current_pipeline: object; + pipeline: object; }; } diff --git a/x-pack/plugins/integration_assistant/kibana.jsonc b/x-pack/plugins/integration_assistant/kibana.jsonc index b972696804d85..55443d71b7f09 100644 --- a/x-pack/plugins/integration_assistant/kibana.jsonc +++ b/x-pack/plugins/integration_assistant/kibana.jsonc @@ -23,10 +23,6 @@ "usageCollection", "console" ], - "requiredBundles": [ - "esUiShared", - "kibanaReact" - ], "extraPublicDirs": [ "common" ] diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx index eb9589919d2ab..a98461bedb8aa 100644 --- a/x-pack/plugins/integration_assistant/public/app.tsx +++ b/x-pack/plugins/integration_assistant/public/app.tsx @@ -1,9 +1,8 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. */ import React from 'react'; @@ -16,20 +15,15 @@ import { EuiHorizontalRule, EuiListGroup, } from '@elastic/eui'; -import { RandomNumberRouteExample } from './random_number_example'; -import { RandomNumberBetweenRouteExample } from './random_number_between_example'; import { Services } from './services'; -import { PostMessageRouteExample } from './post_message_example'; -import { GetMessageRouteExample } from './get_message_example'; type Props = Services; function RoutingExplorer({ - fetchRandomNumber, - fetchRandomNumberBetween, - addSuccessToast, - postMessage, - getMessageById, + runEcsGraph, + runCategorizationGraph, + runRelatedGraph, + runIntegrationBuilder, }: Props) { return ( @@ -41,42 +35,17 @@ function RoutingExplorer({ - +

Run ECS graph

- - - - - - - - - + runEcsGraph(), + }, + ]} + />
diff --git a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx deleted file mode 100644 index 3f768659a7c29..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/BuildIntegration/BuildIntegrationButtons.tsx +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiFlexGroup } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; - -import { buildIntegration, installIntegration } from '@api/services/integrationBuilderService'; -import { RoutePaths } from '../../constants/routePaths'; -import { ActionButton } from '../Buttons/ActionButton'; -import { GoBackButton } from '../Buttons/GoBackButton'; - -export const BuildIntegrationButtons = () => { - const integrationBuilderZipFile = useGlobalStore((state) => state.integrationBuilderZipFile); - const packageName = useGlobalStore((state) => state.packageName); - const packageTitle = useGlobalStore((state) => state.packageTitle); - const packageVersion = useGlobalStore((state) => state.packageVersion); - const dataStreamName = useGlobalStore((state) => state.dataStreamName); - const inputTypes = useGlobalStore((state) => state.inputTypes); - const formSamples = useGlobalStore((state) => state.formSamples); - const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); - const docs = useGlobalStore((state) => state.docs); - - const setIntegrationBuilderZipFile = useGlobalStore( - (state) => state.setIntegrationBuilderZipFile - ); - const setIntegrationBuilderStepsState = useGlobalStore( - (state) => state.setIntegrationBuilderStepsState - ); - - const onBuildClick = async () => { - const req = { - packageName, - packageTitle, - packageVersion, - dataStreamName, - inputTypes, - formSamples, - ingestPipeline, - docs, - }; - const response = await buildIntegration(req); - if (response) { - setIntegrationBuilderZipFile(response); - console.log('Integration built successfully', response.name); - setIntegrationBuilderStepsState('integrationBuilderStep5', 'complete'); - } - }; - - const onDownloadClick = () => { - if (integrationBuilderZipFile) { - const url = window.URL.createObjectURL(integrationBuilderZipFile); - const a = document.createElement('a'); - a.href = url; - a.download = integrationBuilderZipFile.name; - document.body.appendChild(a); - a.click(); - a.remove(); - window.URL.revokeObjectURL(url); - } - }; - - const onInstallClick = async () => { - if (integrationBuilderZipFile) { - installIntegration(integrationBuilderZipFile); - } - console.log('installed'); - }; - - return ( - - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx deleted file mode 100644 index 3ac6a4c69c15c..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ActionButton.tsx +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiButton } from '@elastic/eui'; -import { MouseEventHandler } from 'react'; - -interface ActionButtonProps { - text: string; - onActionClick: MouseEventHandler; - isLoading?: boolean; - isDisabled?: boolean; -} - -export const ActionButton = ({ - text, - onActionClick, - isLoading = false, - isDisabled = false, -}: ActionButtonProps) => { - return ( - - {text} - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx deleted file mode 100644 index 4573953361aba..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.test.tsx +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { beforeEach, describe, expect, it } from 'vitest'; -import { act, render, screen } from '@testing-library/react'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { userEvent } from '@testing-library/user-event'; -import ContinueButton from './ContinueButton'; -import { RoutePaths } from '../../constants/routePaths'; -import { BrowserRouter as Router } from 'react-router-dom'; - -describe('ContinueButton Tests', () => { - beforeEach(() => { - useGlobalStore.getState().setContinueButtonState('ecsButtonContinue', false); - useGlobalStore.getState().setSelected(RoutePaths.ECS_MAPPING_PATH); - }); - describe('Click', () => { - it('Check State changes', async () => { - useGlobalStore.getState().setSelected(RoutePaths.ECS_MAPPING_PATH); - render( - - - - ); - const user = userEvent.setup(); - await act(async () => { - await user.click(await screen.getByText('Continue')); - }); - expect(useGlobalStore.getState().selected === RoutePaths.INGEST_PIPELINES_PATH).toBe(true); - }); - }); - describe('Rendering', () => { - it('ContinueButton Render', async () => { - const ecsButtonContinue = useGlobalStore.getState().ecsButtonContinue; - render( - - - - ); - expect(screen.getByLabelText('continue-button')).toBeDefined(); - expect(screen.getByLabelText('continue-button')).toBeDisabled(); - }); - }); -}); diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx deleted file mode 100644 index 10bd9217810b1..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ContinueButton.tsx +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiButton } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { useNavigate } from 'react-router-dom'; - -interface ContinueButtonProps { - continuePath: string; - isDisabled: boolean; - currentStep: string; - completeStep: string; -} - -export const ContinueButton = ({ - continuePath, - isDisabled, - currentStep, - completeStep, -}: ContinueButtonProps) => { - const setSelected = useGlobalStore((state) => state.setSelected); - const setIntegrationBuilderStepsState = useGlobalStore( - (state) => state.setIntegrationBuilderStepsState - ); - - const navigate = useNavigate(); - const selectAndNavigate = (path) => { - setSelected(path); - navigate(path); - }; - - const onContinueClick = () => { - selectAndNavigate(continuePath); - setIntegrationBuilderStepsState(completeStep, 'complete'); - setIntegrationBuilderStepsState(currentStep, 'current'); - }; - - return ( - - Continue - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx deleted file mode 100644 index 52ca9503eba25..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/GoBackButton.tsx +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiButton } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { RoutePaths } from '../../constants/routePaths'; -import { useNavigate } from 'react-router-dom'; - -interface GoBackButtonProps { - path: RoutePaths; -} - -export const GoBackButton = ({ path }: GoBackButtonProps) => { - const setSelected = useGlobalStore((state) => state.setSelected); - const navigate = useNavigate(); - - const onGoBackClick = () => { - setSelected(path); - navigate(-1); - }; - - return ( - - Go Back - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx b/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx deleted file mode 100644 index 74eaedf805d35..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Buttons/ResetButton.tsx +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiButton } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; - -export const ResetButton = () => { - const resetEcsMappingFormState = useGlobalStore((state) => state.resetEcsMappingFormState); - const resetChainItemsState = useGlobalStore((state) => state.resetChainItemsState); - const resetEcsMappingTableState = useGlobalStore((state) => state.resetEcsMappingTableState); - const resetIntegrationBuilderStepsState = useGlobalStore( - (state) => state.resetIntegrationBuilderStepsState - ); - const resetContinueButtonState = useGlobalStore((state) => state.resetContinueButtonState); - const resetIsLoadingState = useGlobalStore((state) => state.resetIsLoadingState); - - const onResetClick = () => { - resetEcsMappingFormState(); - resetChainItemsState(); - resetEcsMappingTableState(); - resetIntegrationBuilderStepsState(); - resetContinueButtonState(); - resetIsLoadingState(); - }; - - return ( - - Reset - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx deleted file mode 100644 index 41276e60ef2cc..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Categorization/CategorizationButtons.tsx +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { EuiFlexGroup } from '@elastic/eui'; -import { getCategorization } from '@Api/services/categorizationService'; -import { RoutePaths } from '../../constants/routePaths'; - -import { ContinueButton } from '../Buttons/ContinueButton'; -import { ActionButton } from '../Buttons/ActionButton'; -import { GoBackButton } from '../Buttons/GoBackButton'; - -export const CategorizationButtons = () => { - const packageName = useGlobalStore((state) => state.packageName); - const dataStreamName = useGlobalStore((state) => state.dataStreamName); - const formSamples = useGlobalStore((state) => state.formSamples); - const categorizationIsLoading = useGlobalStore((state) => state.categorizationIsLoading); - const categorizationButtonContinue = useGlobalStore( - (state) => state.categorizationButtonContinue - ); - const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); - const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); - const setIntegrationBuilderChainItemsState = useGlobalStore( - (state) => state.setIntegrationBuilderChainItemsState - ); - const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); - const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); - - const onCreateCategorizationClick = async () => { - setIsLoadingState('categorizationIsLoading', true); - setIsPortalLoadingState(true); - if (ingestPipeline === undefined) { - setIsLoadingState('categorizationIsLoading', false); - setIsPortalLoadingState(false); - return; - } - const req = { packageName, dataStreamName, formSamples, ingestPipeline }; - const response = await getCategorization(req); - if (response.results.pipeline !== undefined) { - setIntegrationBuilderChainItemsState('ingestPipeline', response.results.pipeline); - setIntegrationBuilderChainItemsState('docs', response.results.docs); - setContinueButtonState('categorizationButtonContinue', true); - } - setIsLoadingState('categorizationIsLoading', false); - setIsPortalLoadingState(false); - }; - - return ( - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx deleted file mode 100644 index ef929d08fb75a..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsButtons.tsx +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiFlexGroup } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { getEcsMapping, formatEcsResponse } from '@Api/services/ecsMappingService'; -import { RoutePaths } from '../../constants/routePaths'; -import { ContinueButton } from '../Buttons/ContinueButton'; -import { ActionButton } from '../Buttons/ActionButton'; -import { ResetButton } from '../Buttons/ResetButton'; - -export const EcsButtons = () => { - const packageName = useGlobalStore((state) => state.packageName); - const dataStreamName = useGlobalStore((state) => state.dataStreamName); - const formSamples = useGlobalStore((state) => state.formSamples); - const ecsMappingIsLoading = useGlobalStore((state) => state.ecsMappingIsLoading); - const ecsButtonContinue = useGlobalStore((state) => state.ecsButtonContinue); - const setEcsMappingTableState = useGlobalStore((state) => state.setEcsMappingTableState); - const setEcsMappingTableItemsWithEcs = useGlobalStore( - (state) => state.setEcsMappingTableItemsWithEcs - ); - const setIntegrationBuilderChainItemsState = useGlobalStore( - (state) => state.setIntegrationBuilderChainItemsState - ); - const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); - const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); - const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); - - const onCreateEcsMappingClick = async () => { - setIsLoadingState('ecsMappingIsLoading', true); - setIsPortalLoadingState(true); - const req = { packageName, dataStreamName, formSamples }; - const response = await getEcsMapping(req); - if (response.results.mapping !== undefined) { - setIntegrationBuilderChainItemsState('mapping', response.results.mapping); - setIntegrationBuilderChainItemsState('ingestPipeline', response.results.current_pipeline); - - const formatedEcsTableData = formatEcsResponse( - response, - packageName, - dataStreamName, - formSamples - ); - setEcsMappingTableState(formatedEcsTableData); - - const count = formatedEcsTableData.filter((item) => item.isEcs === true).length; - setEcsMappingTableItemsWithEcs(count); - - setContinueButtonState('ecsButtonContinue', true); - } - setIsLoadingState('ecsMappingIsLoading', false); - setIsPortalLoadingState(false); - }; - return ( - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx deleted file mode 100644 index 8f71d37166f88..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFileUpload.tsx +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiFilePicker, useGeneratedHtmlId } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; - -export const EcsFileUpload = () => { - const filePickerId = useGeneratedHtmlId({ prefix: 'filePicker' }); - const addFormSamples = useGlobalStore((state) => state.addFormSamples); - - const onHandleFileChange = (files: FileList | null) => { - if (!files) return; - - Array.from(files).forEach(processFile); - }; - - const processFile = (file: File) => { - if (!file.name.endsWith('.ndjson')) { - console.warn(`Skipped file ${file.name}, unsupported file extension.`); - return; - } - - const reader = new FileReader(); - - reader.onload = handleFileLoad; - reader.onerror = handleFileError; - - reader.readAsText(file); - }; - - const handleFileLoad = (e: ProgressEvent) => { - const text = e.target?.result; - if (typeof text !== 'string') return; - - const validLines = validateAndExtractLines(text); - if (validLines.length === 0) return; - addFormSamples(validLines); - }; - - const validateAndExtractLines = (text: string): string[] => { - const validLines: string[] = []; - text.split('\n').forEach((line, index) => { - try { - if (line.trim()) { - JSON.parse(line); - validLines.push(line); - } - } catch (error) { - console.error(`Error parsing line ${index + 1}: ${line}`, error); - } - }); - return validLines; - }; - - const handleFileError = (e: ProgressEvent) => { - console.error('Failed to read file:', e); - }; - - return ( - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx deleted file mode 100644 index 7f0bb26bbe9dd..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsForm.tsx +++ /dev/null @@ -1,150 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { - EuiForm, - EuiFieldText, - EuiFormRow, - EuiComboBox, - EuiPanel, - EuiFlexGroup, - EuiSpacer, - EuiSelect, - useGeneratedHtmlId, -} from '@elastic/eui'; -import { EcsButtons } from './EcsButtons'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { EcsFileUpload } from './EcsFileUpload'; - -export const EcsForm = () => { - const packageName = useGlobalStore((state) => state.packageName); - const packageTitle = useGlobalStore((state) => state.packageTitle); - const packageVersion = useGlobalStore((state) => state.packageVersion); - const dataStreamName = useGlobalStore((state) => state.dataStreamName); - const dataStreamTitle = useGlobalStore((state) => state.dataStreamTitle); - const logFormat = useGlobalStore((state) => state.logFormat); - const inputTypes = useGlobalStore((state) => state.inputTypes); - const setEcsMappingFormValue = useGlobalStore((state) => state.setEcsMappingFormValue); - const setEcsMappingFormArrayValue = useGlobalStore((state) => state.setEcsMappingFormArrayValue); - const selectLogId = useGeneratedHtmlId({ prefix: 'log_format' }); - const handleFormStateChange = (key: string, value: string) => { - setEcsMappingFormValue(key, value); - }; - const onInputTypeChange = (selected) => { - setEcsMappingFormArrayValue( - 'inputTypes', - selected.map((item) => item.label) - ); - }; - - return ( - - - - - handleFormStateChange('packageName', e.target.value)} - /> - - - handleFormStateChange('packageTitle', e.target.value)} - /> - - - handleFormStateChange('packageVersion', e.target.value)} - /> - - - handleFormStateChange('dataStreamName', e.target.value)} - /> - - - handleFormStateChange('dataStreamTitle', e.target.value)} - /> - - - handleFormStateChange('logFormat', e.target.value)} - aria-label="log-format-select" - /> - - - ({ label: type }))} - options={[ - { label: 'filestream', content: 'Log File' }, - { label: 'tcp', content: 'TCP' }, - { label: 'udp', content: 'UDP' }, - { label: 'cel', content: 'HTTP API' }, - { label: 'gcp-pubsub', content: 'GCP Pubsub' }, - { label: 'gcs', content: 'Google Cloud Storage' }, - { label: 'http_endpoint', content: 'Incoming HTTP Webhooks' }, - { label: 'journald', content: 'JournalD' }, - { label: 'kafka', content: 'Kafka' }, - { label: 'cloudfoundry', content: 'CloudFoundry' }, - { label: 'aws-cloudwatch', content: 'AWS Cloudwatch' }, - { label: 'aws-s3', content: 'AWS S3' }, - { label: 'azure-blob-storage', content: 'Azure Blob Storage' }, - { label: 'azure-eventhub', content: 'Azure Eventhub' }, - ]} - onChange={onInputTypeChange} - /> - - - - - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx deleted file mode 100644 index 08d114cc991b4..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsFormStats.tsx +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiStat, EuiFlexGroup, EuiFlexItem, EuiPanel } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; - -export const EcsFormStats = () => { - const sampleCount = useGlobalStore((state) => state.sampleCount); - const uniqueKeysCount = useGlobalStore((state) => state.uniqueKeysCount); - const ecsMappingTableItemsWithEcs = useGlobalStore((state) => state.ecsMappingTableItemsWithEcs); - - return ( - - - - - - - - - - - - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx b/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx deleted file mode 100644 index c3430ebac810d..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Ecs/EcsTable.tsx +++ /dev/null @@ -1,199 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { - EuiBasicTable, - EuiFlexItem, - EuiButtonEmpty, - EuiPopover, - EuiText, - EuiBasicTableColumn, - EuiInlineEditText, - EuiFlexGroup, - EuiPanel, -} from '@elastic/eui'; -import { useState } from 'react'; -import { getUpdatedPipeline } from '@Api/services/ecsMappingService'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { EcsMappingTableItem } from '../../types'; - -export const EcsTable = () => { - const packageName = useGlobalStore((state) => state.packageName); - const dataStreamName = useGlobalStore((state) => state.dataStreamName); - const formSamples = useGlobalStore((state) => state.formSamples); - const ecsMappingIsLoading = useGlobalStore((state) => state.ecsMappingIsLoading); - const ecsMappingTablePopoverState = useGlobalStore((state) => state.ecsMappingTablePopoverState); - const ecsMappingTableState = useGlobalStore((state) => state.ecsMappingTableState); - const mapping = useGlobalStore((state) => state.mapping); - const setEcsMappingTablePopoverState = useGlobalStore( - (state) => state.setEcsMappingTablePopoverState - ); - const setIntegrationBuilderChainItemsState = useGlobalStore( - (state) => state.setIntegrationBuilderChainItemsState - ); - const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); - const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); - const updateEcsMappingTableItem = useGlobalStore((state) => state.updateEcsMappingTableItem); - const updateChainItem = useGlobalStore((state) => state.updateChainItem); - const [pageIndex, setPageIndex] = useState(0); - const [pageSize, setPageSize] = useState(10); - const closePopover = (id) => { - setEcsMappingTablePopoverState(id); - }; - - const onSaveDestinationFieldClick = async (id, newDestinationField, sourceField) => { - setIsLoadingState('ecsMappingIsLoading', true); - setContinueButtonState('ecsButtonContinue', false); - updateEcsMappingTableItem(id, newDestinationField); - updateChainItem( - `${packageName}.${dataStreamName}.${sourceField}`, - newDestinationField, - 'mapping' - ); - - const req = { packageName, dataStreamName, formSamples, mapping }; - const response = await getUpdatedPipeline(req); - if (response.results.mapping !== undefined) { - setIntegrationBuilderChainItemsState('mapping', response.results.mapping); - setContinueButtonState('ecsButtonContinue', true); - } - if (response.results.current_pipeline !== undefined) { - setIntegrationBuilderChainItemsState('ingestPipeline', response.results.current_pipeline); - setContinueButtonState('ecsButtonContinue', true); - } - setIsLoadingState('ecsMappingIsLoading', false); - }; - - const onViewDocumentationButtonClick = (id) => { - setEcsMappingTablePopoverState(id); - }; - const onTableChange = ({ page }) => { - if (page) { - const { index: pageIndex, size: pageSize } = page; - setPageIndex(pageIndex); - setPageSize(pageSize); - } - }; - - const getEcsTablePage = (fields: EcsMappingTableItem[], pageIndex: number, pageSize: number) => { - let pageOfItems; - - if (!pageIndex && !pageSize) { - pageOfItems = fields; - } else { - const startIndex = pageIndex * pageSize; - pageOfItems = fields.slice(startIndex, Math.min(startIndex + pageSize, fields.length)); - } - - return { - pageOfItems, - totalItemCount: fields.length, - }; - }; - - const { pageOfItems, totalItemCount } = getEcsTablePage( - ecsMappingTableState, - pageIndex, - pageSize - ); - - const pagination = { - pageIndex, - pageSize, - totalItemCount, - pageSizeOptions: [10, 0], - showPerPageOptions: true, - }; - - const columns: Array> = [ - { - field: 'sourceField', - name: 'Source Field', - truncateText: true, - dataType: 'string', - width: '20%', - }, - { - field: 'destinationField', - name: 'Destination Field', - truncateText: true, - width: '20%', - render: (destinationField, item) => { - const label = `destination-field-${item.id}`; - return ( - - onSaveDestinationFieldClick(item.id, newDestinationField, item.sourceField) - } - defaultValue={destinationField} - placeholder="destination.field.name" - /> - ); - }, - }, - { - field: 'isEcs', - name: 'ECS Field', - dataType: 'boolean', - textOnly: true, - width: '5%', - render: (isEcs) => (isEcs ? 'Yes' : 'No'), - }, - { - field: 'exampleValue', - name: 'Example Value', - dataType: 'string', - textOnly: true, - truncateText: true, - width: '15%', - }, - { - field: 'description', - name: 'Documentation', - width: '10%', - render: (description, item) => { - const button = ( - onViewDocumentationButtonClick(item.id)} - > - View Documentation - - ); - return ( - closePopover(item.id)} - > - {description || 'No documentation available'} - - ); - }, - }, - ]; - - return ( - - - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx b/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx deleted file mode 100644 index 4bc784eb31f76..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/EmptyPrompt/EmptyPrompt.tsx +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPageTemplate } from '@elastic/eui'; -import { GoBackButton } from '../Buttons/GoBackButton'; -import { RoutePaths } from '../../constants/routePaths'; - -interface EmptyPromptProps { - title: string; - description: string; - goBackPath: RoutePaths; -} - -export const EmptyPrompt = ({ title, description, goBackPath }: EmptyPromptProps) => { - return ( - {title}} - actions={} - > - {description} - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx b/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx deleted file mode 100644 index c0555e96263af..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Headers/IntegrationBuilderHeader.tsx +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPageTemplate } from '@elastic/eui'; -import { useLocation } from 'react-router-dom'; -import { IntegrationBuilderSteps } from '../IntegrationBuilderSteps/IntegrationBuilderSteps'; -import { ProgressPortal } from '../Portal/ProgressPortal'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { HeaderTitles } from '../../constants/headerTitles'; - -export const IntegrationBuilderHeader = () => { - const location = useLocation(); - const isPortalLoading = useGlobalStore((state) => state.isPortalLoading); - const pageTitle = HeaderTitles[location.pathname as keyof typeof HeaderTitles] || 'Unknown Page'; - return ( - <> - - {pageTitle && pageTitle !== 'Base Page' && } - {isPortalLoading && } - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx deleted file mode 100644 index fc3547b8280bd..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/IntegrationBuilderSteps/IntegrationBuilderSteps.tsx +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiStepsHorizontal, EuiStepsHorizontalProps } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { useNavigate } from 'react-router-dom'; - -import { RoutePaths } from '../../constants/routePaths'; - -export const IntegrationBuilderSteps = () => { - const step1 = useGlobalStore((state) => state.integrationBuilderStep1); - const step2 = useGlobalStore((state) => state.integrationBuilderStep2); - const step3 = useGlobalStore((state) => state.integrationBuilderStep3); - const step4 = useGlobalStore((state) => state.integrationBuilderStep4); - const step5 = useGlobalStore((state) => state.integrationBuilderStep5); - - const setSelected = useGlobalStore((state) => state.setSelected); - - const navigate = useNavigate(); - - const selectAndNavigate = (path) => { - setSelected(path); - navigate(path); - }; - - const horizontalSteps = [ - { - title: 'ECS Mapping', - status: step1, - onClick: () => { - selectAndNavigate(RoutePaths.ECS_MAPPING_PATH); - }, - }, - { - title: 'Add Categorization', - status: step2, - onClick: () => { - selectAndNavigate(RoutePaths.CATEGORIZATION_PATH); - }, - }, - { - title: 'Add Related Fields', - status: step3, - onClick: () => { - selectAndNavigate(RoutePaths.RELATED_PATH); - }, - }, - { - title: 'View Results', - status: step4, - onClick: () => { - selectAndNavigate(RoutePaths.INTEGRATION_BUILDER_RESULTS_PATH); - }, - }, - { - title: 'Build & Deploy', - status: step5, - onClick: () => { - selectAndNavigate(RoutePaths.INTEGRATION_BUILDER_BUILD_PATH); - }, - }, - ] as EuiStepsHorizontalProps['steps']; - - return ; -}; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx deleted file mode 100644 index f34083213efe5..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/DocsResults.tsx +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiCodeBlock, EuiAccordion, EuiPanel, useGeneratedHtmlId } from '@elastic/eui'; -import { css } from '@emotion/react'; - -interface DocsResultsProps { - docs: object[]; -} - -export const DocResults = ({ docs }: DocsResultsProps) => { - const simpleAccordionId = useGeneratedHtmlId({ prefix: 'docs_results' }); - - return ( -
- div:nth-child(2) { - block-size: auto !important; - } - `} - > - - - {JSON.stringify(docs, null, 2)} - - - -
- ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx b/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx deleted file mode 100644 index 073bb102d9491..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/IntegrationResults/PipelineResults.tsx +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiCodeBlock, EuiAccordion, EuiPanel, useGeneratedHtmlId } from '@elastic/eui'; -import { css } from '@emotion/react'; - -interface PipelineResultsProps { - pipeline: object; -} - -export const PipelineResults = ({ pipeline }: PipelineResultsProps) => { - const simpleAccordionId = useGeneratedHtmlId({ prefix: 'ingest_pipeline_results' }); - - return ( -
- div:nth-child(2) { - block-size: auto !important; - } - `} - > - - - {JSON.stringify(pipeline, null, 2)} - - - -
- ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx b/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx deleted file mode 100644 index 0b4e7d46ce140..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Portal/ProgressPortal.tsx +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPortal, EuiProgress } from '@elastic/eui'; - -export const ProgressPortal = () => { - return ( - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx b/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx deleted file mode 100644 index 59b2229e30b5d..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/Related/RelatedButtons.tsx +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { EuiFlexGroup } from '@elastic/eui'; -import { getCategorization } from '@Api/services/categorizationService'; -import { RoutePaths } from '../../constants/routePaths'; - -import { ContinueButton } from '../Buttons/ContinueButton'; -import { ActionButton } from '../Buttons/ActionButton'; -import { GoBackButton } from '../Buttons/GoBackButton'; - -export const RelatedButtons = () => { - const packageName = useGlobalStore((state) => state.packageName); - const dataStreamName = useGlobalStore((state) => state.dataStreamName); - const formSamples = useGlobalStore((state) => state.formSamples); - const relatedIsLoading = useGlobalStore((state) => state.relatedIsLoading); - const relatedButtonContinue = useGlobalStore((state) => state.relatedButtonContinue); - const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); - const setIsLoadingState = useGlobalStore((state) => state.setIsLoadingState); - const setIntegrationBuilderChainItemsState = useGlobalStore( - (state) => state.setIntegrationBuilderChainItemsState - ); - const setContinueButtonState = useGlobalStore((state) => state.setContinueButtonState); - const setIsPortalLoadingState = useGlobalStore((state) => state.setIsPortalLoadingState); - - const onCreateCategorizationClick = async () => { - setIsLoadingState('relatedIsLoading', true); - setIsPortalLoadingState(true); - if (ingestPipeline === undefined) { - setIsLoadingState('relatedIsLoading', false); - setIsPortalLoadingState(false); - return; - } - const req = { packageName, dataStreamName, formSamples, ingestPipeline }; - const response = await getCategorization(req); - if (response.results.pipeline !== undefined) { - setIntegrationBuilderChainItemsState('ingestPipeline', response.results.pipeline); - setIntegrationBuilderChainItemsState('docs', response.results.docs); - setContinueButtonState('relatedButtonContinue', true); - } - setIsLoadingState('relatedIsLoading', false); - setIsPortalLoadingState(false); - }; - - return ( - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx b/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx deleted file mode 100644 index e5db8e43bda15..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/ViewResults/ViewResultsButtons.tsx +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiFlexGroup } from '@elastic/eui'; -import { ContinueButton } from '../Buttons/ContinueButton'; -import { GoBackButton } from '../Buttons/GoBackButton'; -import { RoutePaths } from '../../constants/routePaths'; - -export const ViewResultsButtons = () => { - return ( - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/plugin.tsx b/x-pack/plugins/integration_assistant/public/plugin.tsx index 00538859d5c26..34b519749cf60 100644 --- a/x-pack/plugins/integration_assistant/public/plugin.tsx +++ b/x-pack/plugins/integration_assistant/public/plugin.tsx @@ -32,23 +32,10 @@ export class IntegrationAssistantPlugin }; }, }); - return { - runEcsGraph() { - return 'test'; - }, - runCategorizationGraph() { - return 'test'; - }, - runRelatedGraph() { - return 'test'; - }, - runIntegrationBuilder() { - return 'test'; - }, - }; + return {}; } - public start(core: CoreStart) { + public start(core: CoreStart): IntegrationAssistantPluginStart { return {}; } diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts index 9f8e8afec8b06..ca8b120cb86f1 100644 --- a/x-pack/plugins/integration_assistant/public/services.ts +++ b/x-pack/plugins/integration_assistant/public/services.ts @@ -7,25 +7,26 @@ import type { CoreStart } from '@kbn/core/public'; import type { IHttpFetchError } from '@kbn/core-http-browser'; +import type { EcsMappingApiRequest, EcsMappingApiResponse } from '../common'; import { ECS_GRAPH_PATH, - CATEGORZATION_GRAPH_PATH, + CATEGORIZATION_GRAPH_PATH, RELATED_GRAPH_PATH, INTEGRATION_BUILDER_PATH, } from '../common'; export interface Services { - runEcsGraph: () => Promise; - runCategorizationGraph: () => Promise; - runRelatedGraph: () => Promise; - runIntegrationBuilder: () => Promise; + runEcsGraph: (req: EcsMappingApiRequest) => Promise; + runCategorizationGraph: () => Promise; + runRelatedGraph: () => Promise; + runIntegrationBuilder: () => Promise; } export function getServices(core: CoreStart): Services { return { - runEcsGraph: async () => { + runEcsGraph: async (req: EcsMappingApiRequest) => { try { - const response = await core.http.fetch<{}>(ECS_GRAPH_PATH); + const response = await core.http.post(ECS_GRAPH_PATH, {}); return response; } catch (e) { return e; @@ -33,7 +34,7 @@ export function getServices(core: CoreStart): Services { }, runCategorizationGraph: async () => { try { - const response = await core.http.fetch<{}>(CATEGORZATION_GRAPH_PATH); + const response = await core.http.fetch<{}>(CATEGORIZATION_GRAPH_PATH); return response; } catch (e) { return e; diff --git a/x-pack/plugins/integration_assistant/public/types.ts b/x-pack/plugins/integration_assistant/public/types.ts index 886b5852beef2..7ddae65359abc 100644 --- a/x-pack/plugins/integration_assistant/public/types.ts +++ b/x-pack/plugins/integration_assistant/public/types.ts @@ -5,13 +5,10 @@ * 2.0. */ -import { NavigationPublicPluginStart } from '@kbn/navigation-plugin/public'; -export interface IntegrationAssistantPluginSetup { - runEcsGraph: () => string; - runRelatedGraph: () => string; - runCategorizationGraph: () => string; - runIntegrationBuilder: () => string; -} +import type { NavigationPublicPluginStart } from '@kbn/navigation-plugin/public'; + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IntegrationAssistantPluginSetup {} // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IntegrationAssistantPluginStart {} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 2d1d6764acc73..a431e8d6892e2 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -106,7 +106,7 @@ function modelOutput(state: EcsMappingState): Partial { lastExecutedChain: 'modelOutput', results: { mapping: state.currentMapping, - current_pipeline: currentPipeline, + pipeline: currentPipeline, }, }; } diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts index 30389ac343f72..1c41b17a73273 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -158,7 +158,7 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { // Retrieve all source field names from convert processors to populate single remove processor: const fieldsToRemove = processors.filter((p: any) => p.convert).map((p: any) => p.convert.field); - const templatesPath = join(__dirname, '../../templates'); + const templatesPath = join(__dirname, '../../templates/pipeline'); const mappedValues = { processors, ecs_version: state.ecsVersion, diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 39769e6cc8ff5..45779442c500d 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -6,15 +6,27 @@ */ import { IRouter } from '@kbn/core/server'; +import { schema } from '@kbn/config-schema'; import { INTEGRATION_BUILDER_PATH } from '../../common'; export function registerIntegrationBuilderRoutes(router: IRouter) { router.post( { path: `${INTEGRATION_BUILDER_PATH}`, - validate: false, + validate: { + body: schema.object({ + packageName: schema.string(), + packageTitle: schema.string(), + dataStreamName: schema.string(), + dataStreamTitle: schema.string(), + inputTypes: schema.arrayOf(schema.string()), + formSamples: schema.arrayOf(schema.string()), + ingestPipeline: schema.any(), + docs: schema.arrayOf(schema.any()), + }), + }, }, - async (ctx, req, res) => { + async (_, req, res) => { return res.ok(); } ); diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index 34bf93aee985b..d9e21b568d4c9 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -6,16 +6,42 @@ */ import { IRouter } from '@kbn/core/server'; -import { CATEGORZATION_GRAPH_PATH } from '../../common'; +import { schema } from '@kbn/config-schema'; +import { CATEGORIZATION_GRAPH_PATH } from '../../common'; +import { CategorizationApiRequest, CategorizationApiResponse } from '../../common/types'; +import { getCategorizationGraph } from '../graphs/categorization'; export function registerCategorizationRoutes(router: IRouter) { router.post( { - path: `${CATEGORZATION_GRAPH_PATH}`, - validate: false, + path: `${CATEGORIZATION_GRAPH_PATH}`, + validate: { + body: schema.object({ + packageName: schema.string(), + dataStreamName: schema.string(), + formSamples: schema.arrayOf(schema.string()), + ingestPipeline: schema.maybe(schema.any()), + }), + }, }, - async (ctx, req, res) => { - return res.ok(); + async (_, req, res) => { + const { packageName, dataStreamName, formSamples, ingestPipeline } = + req.body as CategorizationApiRequest; + const graph = await getCategorizationGraph(); + let results = { results: { docs: {}, pipeline: {} } }; + try { + results = (await graph.invoke({ + packageName, + dataStreamName, + formSamples, + ingestPipeline, + })) as CategorizationApiResponse; + } catch (e) { + // TODO: Better error responses? + return e; + } + + return res.ok({ body: results }); } ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts index d21f0d4fcc835..4de757ff5394b 100644 --- a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -6,24 +6,50 @@ */ import { IRouter } from '@kbn/core/server'; +import { schema } from '@kbn/config-schema'; import { ECS_GRAPH_PATH } from '../../common'; - -export interface EcsGraphResponse { - ecs: string; -} - -const body: EcsGraphResponse = { - ecs: 'graph', -}; +import { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common/types'; +import { getEcsGraph } from '../graphs/ecs'; export function registerEcsRoutes(router: IRouter) { - router.get( + router.post( { path: `${ECS_GRAPH_PATH}`, - validate: false, + validate: { + body: schema.object({ + packageName: schema.string(), + dataStreamName: schema.string(), + formSamples: schema.arrayOf(schema.string()), + // TODO: This is a single nested object of any key or shape, any better schema? + mapping: schema.maybe(schema.any()), + }), + }, }, - async (ctx, req, res) => { - return res.ok({ body }); + async (_, req, res) => { + const { packageName, dataStreamName, formSamples, mapping } = + req.body as EcsMappingApiRequest; + const graph = await getEcsGraph(); + let results = { results: { mapping: {}, pipeline: {} } }; + try { + if (req.body?.mapping) { + results = (await graph.invoke({ + packageName, + dataStreamName, + formSamples, + mapping, + })) as EcsMappingApiResponse; + } else + results = (await graph.invoke({ + packageName, + dataStreamName, + formSamples, + })) as EcsMappingApiResponse; + } catch (e) { + // TODO: Better error responses? + return e; + } + + return res.ok({ body: results }); } ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index 4eb70cdbfa159..83b0d3a4e875d 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -6,16 +6,43 @@ */ import { IRouter } from '@kbn/core/server'; +import { schema } from '@kbn/config-schema'; import { RELATED_GRAPH_PATH } from '../../common'; +import { RelatedApiRequest, RelatedApiResponse } from '../../common/types'; +import { getRelatedGraph } from '../graphs/related'; export function registerRelatedRoutes(router: IRouter) { router.post( { path: `${RELATED_GRAPH_PATH}`, - validate: false, + validate: { + body: schema.object({ + packageName: schema.string(), + dataStreamName: schema.string(), + formSamples: schema.arrayOf(schema.string()), + // TODO: This is a single nested object of any key or shape, any better schema? + ingestPipeline: schema.maybe(schema.any()), + }), + }, }, - async (ctx, req, res) => { - return res.ok(); + async (_, req, res) => { + const { packageName, dataStreamName, formSamples, ingestPipeline } = + req.body as RelatedApiRequest; + const graph = await getRelatedGraph(); + let results = { results: { docs: {}, pipeline: {} } }; + try { + results = (await graph.invoke({ + packageName, + dataStreamName, + formSamples, + ingestPipeline, + })) as RelatedApiResponse; + } catch (e) { + // TODO: Better error responses? + return e; + } + + return res.ok({ body: results }); } ); } diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.njk new file mode 100644 index 0000000000000..c90e8a267890c --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.njk @@ -0,0 +1,76 @@ +{{#unless log_group_name}} +{{#unless log_group_name_prefix}} +{{#if log_group_arn }} +log_group_arn: {{ log_group_arn }} +{{/if}} +{{/unless}} +{{/unless}} +{{#unless log_group_arn}} +{{#unless log_group_name}} +{{#if log_group_name_prefix }} +log_group_name_prefix: {{ log_group_name_prefix }} +{{/if}} +{{/unless}} +{{/unless}} +{{#unless log_group_arn}} +{{#unless log_group_name_prefix}} +{{#if log_group_name }} +log_group_name: {{ log_group_name }} +{{/if}} +{{/unless}} +{{/unless}} +{{#unless log_group_arn}} +region_name: {{ region_name }} +{{/unless}} +{{#unless log_stream_prefix}} +{{#if log_streams }} +log_streams: {{ log_streams }} +{{/if}} +{{/unless}} +{{#unless log_streams}} +{{#if log_stream_prefix }} +log_stream_prefix: {{ log_stream_prefix }} +{{/if}} +{{/unless}} +{{#if start_position }} +start_position: {{ start_position }} +{{/if}} +{{#if scan_frequency }} +scan_frequency: {{ scan_frequency }} +{{/if}} +{{#if api_sleep }} +api_sleep: {{ api_sleep }} +{{/if}} +{{#if api_timeout}} +api_timeout: {{api_timeout}} +{{/if}} +{{#if latency }} +latency: {{ latency }} +{{/if}} +{{#if number_of_workers }} +number_of_workers: {{ number_of_workers }} +{{/if}} +{{#if credential_profile_name}} +credential_profile_name: {{credential_profile_name}} +{{/if}} +{{#if shared_credential_file}} +shared_credential_file: {{shared_credential_file}} +{{/if}} +{{#if default_region}} +default_region: {{default_region}} +{{/if}} +{{#if access_key_id}} +access_key_id: {{access_key_id}} +{{/if}} +{{#if secret_access_key}} +secret_access_key: {{secret_access_key}} +{{/if}} +{{#if session_token}} +session_token: {{session_token}} +{{/if}} +{{#if role_arn}} +role_arn: {{role_arn}} +{{/if}} +{{#if proxy_url }} +proxy_url: {{proxy_url}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.njk new file mode 100644 index 0000000000000..5951396423391 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.njk @@ -0,0 +1,130 @@ +{{! start SQS queue }} +{{#unless bucket_arn}} +{{#unless non_aws_bucket_name}} +{{#if queue_url }} +queue_url: {{ queue_url }} +{{/if}} +{{/unless}} +{{/unless}} +{{! end SQS queue }} + +{{#unless queue_url}}{{! start S3 bucket polling }} + +{{! +When using an S3 bucket, you can specify only one of the following options: +- An AWS bucket ARN +- A non-AWS bucket name +}} + +{{! shared S3 bucket polling options }} +{{#if number_of_workers }} +number_of_workers: {{ number_of_workers }} +{{/if}} +{{#if bucket_list_prefix }} +bucket_list_prefix: {{ bucket_list_prefix }} +{{/if}} +{{#if bucket_list_interval }} +bucket_list_interval: {{ bucket_list_interval }} +{{/if}} + +{{! AWS S3 bucket ARN options }} +{{#unless non_aws_bucket_name}} +{{#if bucket_arn }} +bucket_arn: {{ bucket_arn }} +{{/if}} +{{/unless}}{{! end AWS S3 bucket ARN options }} + +{{! non-AWS S3 bucket ARN options }} +{{#unless bucket_arn}} +{{#if non_aws_bucket_name }} +non_aws_bucket_name: {{ non_aws_bucket_name }} +{{/if}} +{{/unless}}{{! end non-AWS S3 bucket ARN options }} + +{{/unless}}{{! end S3 bucket polling }} + +{{#if buffer_size }} +buffer_size: {{ buffer_size }} +{{/if}} +{{#if content_type }} +content_type: {{ content_type }} +{{/if}} +{{#if encoding }} +encoding: {{ encoding }} +{{/if}} +{{#if expand_event_list_from_field }} +expand_event_list_from_field: {{ expand_event_list_from_field }} +{{/if}} +{{#if buffer_size }} +buffer_size: {{ buffer_size }} +{{/if}} +{{#if fips_enabled }} +fips_enabled: {{ fips_enabled }} +{{/if}} +{{#if include_s3_metadata }} +include_s3_metadata: {{ include_s3_metadata }} +{{/if}} +{{#if max_bytes }} +max_bytes: {{ max_bytes }} +{{/if}} +{{#if max_number_of_messages }} +max_number_of_messages: {{ max_number_of_messages }} +{{/if}} +{{#if path_style }} +path_style: {{ path_style }} +{{/if}} +{{#if provider }} +provider: {{ provider }} +{{/if}} +{{#if sqs.max_receive_count }} +sqs.max_receive_count: {{ sqs.max_receive_count }} +{{/if}} +{{#if sqs.wait_time }} +sqs.wait_time: {{ sqs.wait_time }} +{{/if}} + +{{#if file_selectors}} +file_selectors: +{{file_selectors}} +{{/if}} + +{{#if credential_profile_name}} +credential_profile_name: {{credential_profile_name}} +{{/if}} +{{#if shared_credential_file}} +shared_credential_file: {{shared_credential_file}} +{{/if}} +{{#if visibility_timeout}} +visibility_timeout: {{visibility_timeout}} +{{/if}} +{{#if api_timeout}} +api_timeout: {{api_timeout}} +{{/if}} +{{#if endpoint}} +endpoint: {{endpoint}} +{{/if}} +{{#if default_region}} +default_region: {{default_region}} +{{/if}} +{{#if access_key_id}} +access_key_id: {{access_key_id}} +{{/if}} +{{#if secret_access_key}} +secret_access_key: {{secret_access_key}} +{{/if}} +{{#if session_token}} +session_token: {{session_token}} +{{/if}} +{{#if role_arn}} +role_arn: {{role_arn}} +{{/if}} +{{#if fips_enabled}} +fips_enabled: {{fips_enabled}} +{{/if}} +{{#if proxy_url }} +proxy_url: {{proxy_url}} +{{/if}} +{{#if parsers}} +parsers: +{{parsers}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.njk new file mode 100644 index 0000000000000..6e319399e7b3d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.njk @@ -0,0 +1,35 @@ +{{#if account_name}} +account_name: {{account_name}} +{{/if}} +{{#if service_account_key}} +auth.shared_credentials.account_key: {{service_account_key}} +{{/if}} +{{#if service_account_uri}} +auth.connection_string.uri: {{service_account_uri}} +{{/if}} +{{#if storage_url}} +storage_url: {{storage_url}} +{{/if}} +{{#if number_of_workers}} +max_workers: {{number_of_workers}} +{{/if}} +{{#if poll}} +poll: {{poll}} +{{/if}} +{{#if poll_interval}} +poll_interval: {{poll_interval}} +{{/if}} +{{#if containers}} +containers: +{{containers}} +{{/if}} +{{#if file_selectors}} +file_selectors: +{{file_selectors}} +{{/if}} +{{#if timestamp_epoch}} +timestamp_epoch: {{timestamp_epoch}} +{{/if}} +{{#if expand_event_list_from_field}} +expand_event_list_from_field: {{expand_event_list_from_field}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.njk new file mode 100644 index 0000000000000..ed13f215ac169 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.njk @@ -0,0 +1,28 @@ +{{#if eventhub}} +eventhub: {{eventhub}} +{{/if}} +{{#if consumer_group}} +consumer_group: {{consumer_group}} +{{/if}} +{{#if connection_string}} +connection_string: {{connection_string}} +{{/if}} +{{#if storage_account}} +storage_account: {{storage_account}} +{{/if}} +{{#if storage_account_key}} +storage_account_key: {{storage_account_key}} +{{/if}} +{{#if storage_account_container}} +storage_account_container: {{storage_account_container}} +{{/if}} +{{#if resource_manager_endpoint}} +resource_manager_endpoint: {{resource_manager_endpoint}} +{{/if}} +sanitize_options: +{{#if sanitize_newlines}} + - NEW_LINES +{{/if}} +{{#if sanitize_singlequotes}} + - SINGLE_QUOTES +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.njk new file mode 100644 index 0000000000000..38e88ea691699 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.njk @@ -0,0 +1,24 @@ +{{#if api_address}} +api_address: {{api_address}} +{{/if}} +{{#if doppler_address}} +doppler_address: {{doppler_address}} +{{/if}} +{{#if uaa_address}} +uaa_address: {{uaa_address}} +{{/if}} +{{#if rlp_address}} +rlp_address: {{rlp_address}} +{{/if}} +{{#if client_id}} +client_id: {{client_id}} +{{/if}} +{{#if client_secret}} +client_secret: {{client_secret}} +{{/if}} +{{#if version}} +version: {{version}} +{{/if}} +{{#if shard_id}} +shard_id: {{shard_id}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/common.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/common.yml.njk new file mode 100644 index 0000000000000..a11c556b5099b --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/common.yml.njk @@ -0,0 +1,14 @@ +tags: +{{#if preserve_original_event}} + - preserve_original_event +{{/if}} +{{#each tags as |tag|}} + - {{tag}} +{{/each}} +{{#contains "forwarded" tags}} +publisher_pipeline.disable_host: true +{{/contains}} +{{#if processors}} +processors: +{{processors}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.njk new file mode 100644 index 0000000000000..437accfc32650 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.njk @@ -0,0 +1,13 @@ +paths: +{{#each paths as |path|}} + - {{path}} +{{/each}} +{{#if exclude_files}} +prospector.scanner.exclude_files: +{{#each exclude_files as |pattern f|}} + - {{pattern}} +{{/each}} +{{/if}} +{{#if custom}} +{{custom}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.njk new file mode 100644 index 0000000000000..6cee3bb8e1ae4 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.njk @@ -0,0 +1,27 @@ +{{#if project_id}} +project_id: {{project_id}} +{{/if}} +{{#if topic}} +topic: {{topic}} +{{/if}} +{{#if subscription_name}} +subscription.name: {{subscription_name}} +{{/if}} +{{#if subscription_create}} +subscription.create: {{subscription_create}} +{{/if}} +{{#if subscription_num_goroutines}} +subscription.num_goroutines: {{subscription_num_goroutines}} +{{/if}} +{{#if subscription_max_outstanding_messages}} +subscription.max_outstanding_messages: {{subscription_max_outstanding_messages}} +{{/if}} +{{#if credentials_file}} +credentials_file: {{credentials_file}} +{{/if}} +{{#if credentials_json}} +credentials_json: '{{credentials_json}}' +{{/if}} +{{#if alternative_host}} +alternative_host: {{alternative_host}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.njk new file mode 100644 index 0000000000000..37ee85e48db93 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.njk @@ -0,0 +1,35 @@ +{{#if project_id}} +project_id: {{project_id}} +{{/if}} +{{#if alternative_host}} +alternative_host: {{alternative_host}} +{{/if}} +{{#if service_account_key}} +auth.credentials_json.account_key: {{service_account_key}} +{{/if}} +{{#if service_account_file}} +auth.credentials_file.path: {{service_account_file}} +{{/if}} +{{#if number_of_workers}} +max_workers: {{number_of_workers}} +{{/if}} +{{#if poll}} +poll: {{poll}} +{{/if}} +{{#if poll_interval}} +poll_interval: {{poll_interval}} +{{/if}} +{{#if bucket_timeout}} +bucket_timeout: {{bucket_timeout}} +{{/if}} +{{#if buckets}} +buckets: +{{buckets}} +{{/if}} +{{#if file_selectors}} +file_selectors: +{{file_selectors}} +{{/if}} +{{#if timestamp_epoch}} +timestamp_epoch: {{timestamp_epoch}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.njk new file mode 100644 index 0000000000000..1accdcbaa22e6 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.njk @@ -0,0 +1,57 @@ +{{#if listen_address}} +listen_address: {{listen_address}} +{{/if}} +{{#if listen_port}} +listen_port: {{listen_port}} +{{/if}} +{{#if prefix}} +prefix: {{prefix}} +{{/if}} +{{#if preserve_original_event}} +preserve_original_event: {{preserve_original_event}} +{{/if}} +{{#if basic_auth}} +basic_auth: {{basic_auth}} +{{/if}} +{{#if username}} +username: {{username}} +{{/if}} +{{#if password}} +password: {{password}} +{{/if}} +{{#if secret_header}} +secret.header: {{secret_header}} +{{/if}} +{{#if secret_value}} +secret.value: {{secret_value}} +{{/if}} +{{#if hmac_header}} +hmac.header: {{hmac_header}} +{{/if}} +{{#if hmac_key}} +hmac.key: {{hmac_key}} +{{/if}} +{{#if hmac_type}} +hmac.type: {{hmac_type}} +{{/if}} +{{#if hmac_prefix}} +hmac.prefix: {{hmac_prefix}} +{{/if}} +{{#if content_type}} +content_type: {{content_type}} +{{/if}} +{{#if response_code}} +response_code: {{response_code}} +{{/if}} +{{#if response_body}} +response_body: '{{response_body}}' +{{/if}} +{{#if url}} +url: {{url}} +{{/if}} +{{#if include_headers}} +include_headers: +{{#each include_headers as |header|}} + - {{header}} +{{/each}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.njk new file mode 100644 index 0000000000000..4bcad79a53ddc --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.njk @@ -0,0 +1,44 @@ +condition: ${host.platform} == 'linux' + +{{#if paths}} +paths: +{{#each paths as |path i|}} + - {{path}} +{{/each}} +{{/if}} +{{#if backoff}} +backoff: {{backoff}} +{{/if}} +{{#if max_backoff}} +max_backoff: {{max_backoff}} +{{/if}} +{{#if seek}} +seek: {{seek}} +{{/if}} +{{#if cursor_seek_fallback}} +cursor_seek_fallback: {{cursor_seek_fallback}} +{{/if}} +{{#if since}} +since: {{since}} +{{/if}} +{{#if units}} +units: {{units}} +{{/if}} +{{#if syslog_identifiers}} +syslog_identifiers: +{{#each syslog_identifiers as |identifier i|}} + - {{identifier}} +{{/each}} +{{/if}} +{{#if transports}} +transports: +{{#each transports as |transport i|}} + - {{transport}} +{{/each}} +{{/if}} +{{#if include_matches}} +include_matches: +{{#each include_matches as |match i|}} + - {{match}} +{{/each}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.njk new file mode 100644 index 0000000000000..79b74ed4cdcb8 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.njk @@ -0,0 +1,100 @@ +{{#if hosts}} +hosts: +{{#each hosts as |host i|}} + - {{host}} +{{/each}} +{{/if}} +{{#if topics}} +topics: +{{#each topics as |topic i|}} + - {{topic}} +{{/each}} +{{/if}} +{{#if group_id}} +group_id: {{group_id}} +{{/if}} +{{#if client_id}} +client_id: {{client_id}} +{{/if}} +{{#if username}} +username: {{username}} +{{/if}} +{{#if password}} +password: {{password}} +{{/if}} +{{#if version}} +version: {{version}} +{{/if}} +{{#if initial_offset}} +initial_offset: {{initial_offset}} +{{/if}} +{{#if connect_backoff}} +connect_backoff: {{connect_backoff}} +{{/if}} +{{#if consume_backoff}} +consume_backoff: {{consume_backoff}} +{{/if}} +{{#if max_wait_time}} +max_wait_time: {{max_wait_time}} +{{/if}} +{{#if wait_close}} +wait_close: {{wait_close}} +{{/if}} +{{#if isolation_level}} +isolation_level: {{isolation_level}} +{{/if}} +{{#if expand_event_list_from_field}} +expand_event_list_from_field: {{expand_event_list_from_field}} +{{/if}} +{{#if fetch_min}} +fetch.min: {{fetch_min}} +{{/if}} +{{#if fetch_default}} +fetch.default: {{fetch_default}} +{{/if}} +{{#if fetch_max}} +fetch.max: {{fetch_max}} +{{/if}} +{{#if rebalance_strategy}} +rebalance.strategy: {{rebalance_strategy}} +{{/if}} +{{#if rebalance_timeout}} +rebalance.timeout: {{rebalance_timeout}} +{{/if}} +{{#if rebalance_max_retries}} +rebalance.max_retries: {{rebalance_max_retries}} +{{/if}} +{{#if rebalance_retry_backoff}} +rebalance.retry_backoff: {{rebalance_retry_backoff}} +{{/if}} +{{#if parsers}} +parsers: +{{parsers}} +{{/if}} +{{#if kerberos_enabled}} +kerberos.enabled: {{kerberos_enabled}} +{{/if}} +{{#if kerberos_auth_type}} +kerberos.auth_type: {{kerberos_auth_type}} +{{/if}} +{{#if kerberos_config_path}} +kerberos.config_path: {{kerberos_config_path}} +{{/if}} +{{#if kerberos_username}} +kerberos.username: {{kerberos_username}} +{{/if}} +{{#if kerberos_password}} +kerberos.password: {{kerberos_password}} +{{/if}} +{{#if kerberos_keytab}} +kerberos.keytab: {{kerberos_keytab}} +{{/if}} +{{#if kerberos_service_name}} +kerberos.service_name: {{kerberos_service_name}} +{{/if}} +{{#if kerberos_realm}} +kerberos.realm: {{kerberos_realm}} +{{/if}} +{{#if kerberos_enable_krb5_fast}} +kerberos.enable_krb5_fast: {{kerberos_enable_krb5_fast}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.njk new file mode 100644 index 0000000000000..181b2466dff7f --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.njk @@ -0,0 +1,13 @@ +paths: +{{#each paths as |path i|}} + - {{path}} +{{/each}} +{{#if exclude_files}} +exclude_files: +{{#each exclude_files as |file f|}} + - {{file}} +{{/each}} +{{/if}} +{{#if custom}} +{{custom}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.njk new file mode 100644 index 0000000000000..037d4fc8a4590 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.njk @@ -0,0 +1,19 @@ +host: {{listen_address}}:{{listen_port}} +{{#if max_message_size}} +max_message_size: {{max_message_size}} +{{/if}} +{{#if framing}} +framing: {{framing}} +{{/if}} +{{#if line_delimiter}} +line_delimiter: {{line_delimiter}} +{{/if}} +{{#if max_connections}} +max_connections: {{max_connections}} +{{/if}} +{{#if timeout}} +timeout: {{timeout}} +{{/if}} +{{#if keep_null}} +keep_null: {{keep_null}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.njk new file mode 100644 index 0000000000000..22f842ae31af6 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.njk @@ -0,0 +1,10 @@ +host: {{listen_address}}:{{listen_port}} +{{#if max_message_size}} +max_message_size: {{max_message_size}} +{{/if}} +{{#if timeout}} +timeout: {{timeout}} +{{/if}} +{{#if keep_null}} +keep_null: {{keep_null}} +{{/if}} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/build/build.yml.njk b/x-pack/plugins/integration_assistant/server/templates/build/build.yml.njk new file mode 100644 index 0000000000000..8eb17a43a735e --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/build/build.yml.njk @@ -0,0 +1,3 @@ +dependencies: + ecs: + reference: "git@{{ ecs_version }}" \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/changelog/changelog.yml.njk b/x-pack/plugins/integration_assistant/server/templates/changelog/changelog.yml.njk new file mode 100644 index 0000000000000..eaf3d00631fa9 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/changelog/changelog.yml.njk @@ -0,0 +1,6 @@ +# newer versions go on top +- version: {{ initial_version }} + changes: + - description: Initial Version + type: enhancement + link: https://github.com/elastic/integrations/pull/xxxx diff --git a/x-pack/plugins/integration_assistant/server/templates/data_stream/fields/agent.yml b/x-pack/plugins/integration_assistant/server/templates/data_stream/fields/agent.yml new file mode 100644 index 0000000000000..d815b78d95fcf --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/data_stream/fields/agent.yml @@ -0,0 +1,44 @@ +- name: cloud + title: Cloud + group: 2 + description: Fields related to the cloud or infrastructure the events are coming from. + footnote: 'Examples: If Metricbeat is running on an EC2 host and fetches data from its host, the cloud info contains the data about this machine. If Metricbeat runs on a remote machine outside the cloud and fetches data from a service running in the cloud, the field contains cloud data from the machine the service is running on.' + type: group + fields: + - name: image.id + type: keyword + description: Image ID for the cloud instance. +- name: container + title: Container + group: 2 + description: 'Container fields are used for meta information about the specific container that is the source of information. + These fields help correlate data based containers from any runtime.' + type: group + fields: + - name: labels + level: extended + type: object + object_type: keyword + description: Image labels. +- name: host + title: Host + group: 2 + description: 'A host is defined as a general computing instance. + ECS host.* fields should be populated with details about the host on which the event happened, or from which the measurement was taken. Host types include hardware, virtual machines, Docker containers, and Kubernetes nodes.' + type: group + fields: + - name: containerized + type: boolean + description: > + If the host is a container. + - name: os.build + type: keyword + example: "18D109" + description: > + OS build information. + - name: os.codename + type: keyword + example: "stretch" + description: > + OS codename, if any. + diff --git a/x-pack/plugins/integration_assistant/server/templates/data_stream/fields/beats.yml b/x-pack/plugins/integration_assistant/server/templates/data_stream/fields/beats.yml new file mode 100644 index 0000000000000..9bcba659d84c0 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/data_stream/fields/beats.yml @@ -0,0 +1,30 @@ +- name: input.type + type: keyword + description: Type of Filebeat input. +- name: log.flags + type: keyword + description: Flags for the log file. +- name: log.offset + type: long + description: Offset of the entry in the log file. +- name: log.file + type: group + fields: + - name: device_id + type: keyword + description: ID of the device containing the filesystem where the file resides. + - name: fingerprint + type: keyword + description: The sha256 fingerprint identity of the file when fingerprinting is enabled. + - name: inode + type: keyword + description: Inode number of the log file. + - name: idxhi + type: keyword + description: The high-order part of a unique identifier that is associated with a file. (Windows-only) + - name: idxlo + type: keyword + description: The low-order part of a unique identifier that is associated with a file. (Windows-only) + - name: vol + type: keyword + description: The serial number of the volume that contains a file. (Windows-only) \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/fields/base-fields.yml.njk b/x-pack/plugins/integration_assistant/server/templates/fields/base-fields.yml.njk new file mode 100644 index 0000000000000..336d4c30e0dd5 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/fields/base-fields.yml.njk @@ -0,0 +1,20 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset name. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: event.module + type: constant_keyword + description: Event module + value: {{ module }} +- name: event.dataset + type: constant_keyword + description: Event dataset + value: {{ dataset }} +- name: "@timestamp" + type: date + description: Event timestamp. diff --git a/x-pack/plugins/integration_assistant/server/templates/img/logo.svg b/x-pack/plugins/integration_assistant/server/templates/img/logo.svg new file mode 100644 index 0000000000000..173fdec5072e9 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/img/logo.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/aws-cloudwatch_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/aws-cloudwatch_manifest.yml.njk new file mode 100644 index 0000000000000..c2334c76052a0 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/aws-cloudwatch_manifest.yml.njk @@ -0,0 +1,92 @@ +- input: aws-cloudwatch + template_path: aws-cloudwatch.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: log_group_arn + type: text + title: Log Group ARN + multi: false + required: false + show_user: true + description: ARN of the log group to collect logs from. + - name: start_position + type: text + title: Start Position + multi: false + required: false + default: beginning + show_user: true + description: Allows user to specify if this input should read log files from the beginning or from the end. + - name: log_group_name + type: text + title: Log Group Name + multi: false + required: false + show_user: false + description: Name of the log group to collect logs from. `region_name` is required when `log_group_name` is given. + - name: log_group_name_prefix + type: text + title: Log Group Name Prefix + multi: false + required: false + show_user: false + description: The prefix for a group of log group names. `region_name` is required when `log_group_name_prefix` is given. `log_group_name` and `log_group_name_prefix` cannot be given at the same time. + - name: region_name + type: text + title: Region Name + multi: false + required: false + show_user: false + description: Region that the specified log group or log group prefix belongs to. + - name: log_streams + type: text + title: Log Streams + multi: true + required: false + show_user: false + description: A list of strings of log streams names that Filebeat collect log events from. + - name: log_stream_prefix + type: text + title: Log Stream Prefix + multi: false + required: false + show_user: false + description: A string to filter the results to include only log events from log streams that have names starting with this prefix. + - name: scan_frequency + type: text + title: Scan Frequency + multi: false + required: false + show_user: false + default: 1m + description: This config parameter sets how often Filebeat checks for new log events from the specified log group. + - name: api_timeput + type: text + title: API Timeout + multi: false + required: false + show_user: false + default: 120s + description: The maximum duration of AWS API can take. If it exceeds the timeout, AWS API will be interrupted. + - name: api_sleep + type: text + title: API Sleep + multi: false + required: false + show_user: false + default: 200ms + description: This is used to sleep between AWS FilterLogEvents API calls inside the same collection period. `FilterLogEvents` API has a quota of 5 transactions per second (TPS)/account/Region. This value should only be adjusted when there are multiple Filebeats or multiple Filebeat inputs collecting logs from the same region and AWS account. + - name: latency + type: text + title: Latency + multi: false + required: false + show_user: false + description: "The amount of time required for the logs to be available to CloudWatch Logs. Sample values, `1m` or `5m` — see Golang [time.ParseDuration](https://pkg.go.dev/time#ParseDuration) for more details. Latency translates the query's time range to consider the CloudWatch Logs latency. Example: `5m` means that the integration will query CloudWatch to search for logs available 5 minutes ago." + - name: number_of_workers + type: integer + title: Number of workers + required: false + show_user: false + description: The number of workers assigned to reading from log groups. Each worker will read log events from one of the log groups matching `log_group_name_prefix`. For example, if `log_group_name_prefix` matches five log groups, then `number_of_workers` should be set to `5`. The default value is `1`. \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/aws-s3.yml_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/aws-s3.yml_manifest.yml.njk new file mode 100644 index 0000000000000..6265b57e6ed35 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/aws-s3.yml_manifest.yml.njk @@ -0,0 +1,177 @@ +- input: aws-s3 + template_path: aws-s3.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: bucket_arn + type: text + title: Bucket ARN + multi: false + required: false + show_user: true + description: ARN of the AWS S3 bucket that will be polled for list operation. (Required when `queue_url` and `non_aws_bucket_name` are not set). + - name: queue_url + type: text + title: Queue URL + multi: false + required: false + show_user: true + description: URL of the AWS SQS queue that messages will be received from. + - name: number_of_workers + type: integer + title: Number of Workers + multi: false + required: false + default: 1 + show_user: true + description: Number of workers that will process the S3 objects listed. (Required when `bucket_arn` is set). + - name: parsers + type: yaml + title: Parsers + description: >- + This option expects a list of parsers that the payload has to go through. For more information see [Parsers](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-aws-s3.html#input-aws-s3-parsers) + required: false + show_user: true + multi: false + default: | + #- multiline: + # pattern: "^- + A standard MIME type describing the format of the object data. This can be set to override the MIME type that was given to the object when it was uploaded. For example application/json. + - name: encoding + type: text + title: Encoding + multi: false + required: false + show_user: false + description: The file encoding to use for reading data that contains international characters. This only applies to non-JSON logs. + - name: expand_event_list_from_field + type: text + title: Expand Event List from Field + multi: false + required: false + show_user: false + description: >- + If the fileset using this input expects to receive multiple messages bundled under a specific field then the config option expand_event_list_from_field value can be assigned the name of the field. This setting will be able to split the messages under the group value into separate events. For example, CloudTrail logs are in JSON format and events are found under the JSON object "Records". + - name: file_selectors + type: yaml + title: File Selectors + multi: true + required: false + show_user: false + description: >- + If the SQS queue will have events that correspond to files that this integration shouldn’t process file_selectors can be used to limit the files that are downloaded. This is a list of selectors which are made up of regex and expand_event_list_from_field options. The regex should match the S3 object key in the SQS message, and the optional expand_event_list_from_field is the same as the global setting. If file_selectors is given, then any global expand_event_list_from_field value is ignored in favor of the ones specified in the file_selectors. Regex syntax is the same as the Go language. Files that don’t match one of the regexes won’t be processed. content_type, parsers, include_s3_metadata,max_bytes, buffer_size, and encoding may also be set for each file selector. + - name: fips_enabled + type: bool + title: Enable S3 FIPS + default: false + multi: false + required: false + show_user: false + description: Enabling this option changes the service name from `s3` to `s3-fips` for connecting to the correct service endpoint. + - name: include_s3_metadata + type: text + title: Include S3 Metadata + multi: true + required: false + show_user: false + description: >- + This input can include S3 object metadata in the generated events for use in follow-on processing. You must specify the list of keys to include. By default none are included. If the key exists in the S3 response then it will be included in the event as aws.s3.metadata. where the key name as been normalized to all lowercase. + - name: max_bytes + type: text + title: Max Bytes + default: 10MiB + multi: false + required: false + show_user: false + description: The maximum number of bytes that a single log message can have. All bytes after max_bytes are discarded and not sent. This setting is especially useful for multiline log messages, which can get large. This only applies to non-JSON logs. + - name: max_number_of_messages + type: integer + title: Maximum Concurrent SQS Messages + description: The maximum number of SQS messages that can be inflight at any time. + default: 5 + required: false + show_user: false + - name: non_aws_bucket_name + type: text + title: Non AWS Bucket Name + multi: false + required: false + show_user: false + description: Name of the S3 bucket that will be polled for list operation. Required for 3rd party S3 compatible services. (Required when queue_url and bucket_arn are not set). + - name: path_style + type: text + title: Path Style + multi: false + required: false + show_user: false + description: >- + Enabling this option sets the bucket name as a path in the API call instead of a subdomain. When enabled https://.s3...com becomes https://s3...com/. This is only supported with 3rd party S3 providers. AWS does not support path style. + - name: provider + type: text + title: Provider Name + multi: false + required: false + show_user: false + description: Name of the 3rd party S3 bucket provider like backblaze or GCP. + - name: sqs.max_receive_count + type: integer + title: SQS Message Maximum Receive Count + multi: false + required: false + show_user: false + default: 5 + description: The maximum number of times a SQS message should be received (retried) before deleting it. This feature prevents poison-pill messages (messages that can be received but can’t be processed) from consuming resources. + - name: sqs.wait_time + type: text + title: SQS Maximum Wait Time + multi: false + required: false + show_user: false + default: 20s + description: >- + The maximum duration that an SQS `ReceiveMessage` call should wait for a message to arrive in the queue before returning. The maximum value is `20s`. + - name: visibility_timeout + type: text + title: Visibility Timeout + multi: false + required: false + show_user: false + description: The duration that the received messages are hidden from subsequent retrieve requests after being retrieved by a ReceiveMessage request. The maximum is 12 hours. \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/azure-blob-storage_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/azure-blob-storage_manifest.yml.njk new file mode 100644 index 0000000000000..897a6a043f86f --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/azure-blob-storage_manifest.yml.njk @@ -0,0 +1,74 @@ +- input: azure-blob-storage + template_path: azure-blob-storage.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: storage_url + type: text + title: Storage URL + description: >- + Use this attribute to specify a custom storage URL if required. By default it points to azure cloud storage. Only use this if there is a specific need to connect to a different environment where blob storage is available. + URL format : {{protocol}}://{{account_name}}.{{storage_uri}}. + required: false + show_user: false + - name: number_of_workers + type: integer + title: Maximum number of workers + multi: false + required: false + show_user: true + default: 3 + description: Determines how many workers are spawned per container. + - name: poll + type: bool + title: Polling + multi: false + required: false + show_user: true + default: true + description: Determines if the container will be continuously polled for new documents. + - name: poll_interval + type: text + title: Polling interval + multi: false + required: false + show_user: true + default: 15s + description: Determines the time interval between polling operations. + - name: containers + type: yaml + title: Containers + description: "This attribute contains the details about a specific container like, name, number_of_workers, poll, poll_interval etc. \nThe attribute 'name' is specific to a container as it describes the container name, while the fields number_of_workers, poll, poll_interval can exist both at the container level and at the global level. \nIf you have already defined the attributes globally, then you can only specify the container name in this yaml config. \nIf you want to override any specific attribute for a container, then, you can define it here. \nAny attribute defined in the yaml will override the global definitions. Please see the relevant [documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-azure-blob-storage.html#attrib-containers) for further information.\n" + required: true + show_user: true + default: | + - name: azure-container1 + max_workers: 3 + poll: true + poll_interval: 15s + #- name: azure-container2 + # max_workers: 3 + # poll: true + # poll_interval: 10s + - name: file_selectors + type: yaml + title: File Selectors + multi: false + required: false + show_user: false + default: | + # - regex: "event/" + description: "If the container will have events that correspond to files that this integration shouldn’t process, file_selectors can be used to limit the files that are downloaded. This is a list of selectors which is made up of regex patters. \nThe regex should match the container filepath. Regexes use [RE2 syntax](https://pkg.go.dev/regexp/syntax). Files that don’t match one of the regexes will not be processed.\n" + - name: timestamp_epoch + type: integer + title: Timestamp Epoch + multi: false + required: false + show_user: false + - name: expand_event_list_from_field + type: text + title: Expand Event List From Field + multi: false + required: false + show_user: false + description: "If the file-set using this input expects to receive multiple messages bundled under a specific field or an array of objects then the config option for 'expand_event_list_from_field' can be specified. \nThis setting will be able to split the messages under the group value into separate events. This can be specified at the global level or at the container level.\nFor more info please refer to the [documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-azure-blob-storage.html#attrib-expand_event_list_from_field).\n" \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/azure-eventhub_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/azure-eventhub_manifest.yml.njk new file mode 100644 index 0000000000000..feac0ec87759f --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/azure-eventhub_manifest.yml.njk @@ -0,0 +1,74 @@ +- input: azure-eventhub + template_path: azure-eventhub.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: eventhub + type: text + title: Event Hub + multi: false + required: true + show_user: true + description: >- + Elastic recommends using one event hub for each integration. Visit [Create an event hub](https://docs.elastic.co/integrations/azure#create-an-event-hub) to learn more. Use event hub names up to 30 characters long to avoid compatibility issues. + - name: connection_string + type: text + title: Connection String + multi: false + required: true + show_user: true + description: >- + The connection string required to communicate with Event Hubs. See [Get an Event Hubs connection string](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-get-connection-string) to learn more. + - name: storage_account + type: text + title: Storage Account + multi: false + required: true + show_user: true + description: >- + The name of the storage account where the consumer group's state/offsets will be stored and updated. + - name: storage_account_key + type: text + title: Storage Account Key + multi: false + required: true + show_user: true + description: >- + The storage account key, this key will be used to authorize access to data in your storage account. + - name: consumer_group + type: text + title: Consumer Group + multi: false + required: true + show_user: true + default: $Default + - name: resource_manager_endpoint + type: text + title: Resource Manager Endpoint + multi: false + required: false + show_user: false + - name: storage_account_container + type: text + title: Storage Account Container + multi: false + required: false + show_user: false + description: >- + The storage account container where the integration stores the checkpoint data for the consumer group. It is an advanced option to use with extreme care. You MUST use a dedicated storage account container for each Azure log type (activity, sign-in, audit logs, and others). DO NOT REUSE the same container name for more than one Azure log type. See [Container Names](https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata#container-names) for details on naming rules from Microsoft. The integration generates a default container name if not specified. + - name: sanitize_newlines + type: bool + title: Sanitizes New Lines + description: Removes new lines in logs to ensure proper formatting of JSON data and avoid parsing issues during processing. + multi: false + required: false + show_user: false + default: false + - name: sanitize_singlequotes + required: true + show_user: false + title: Sanitizes Single Quotes + description: Replaces single quotes with double quotes (single quotes inside double quotes are omitted) in logs to ensure proper formatting of JSON data and avoid parsing issues during processing. + type: bool + multi: false + default: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/cloudfoundry_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/cloudfoundry_manifest.yml.njk new file mode 100644 index 0000000000000..b8e85c57c52d1 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/cloudfoundry_manifest.yml.njk @@ -0,0 +1,101 @@ +- input: cloudfoundry + template_path: cloudfoundry.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: api_address + type: text + title: API Address + multi: false + required: true + show_user: true + default: "http://api.bosh-lite.com" + description: The URL of the Cloud Foundry API. + - name: client_id + type: text + title: Client ID + multi: false + required: true + show_user: true + description: Client ID to authenticate with Cloud Foundry. + - name: client_secret + type: text + title: Client Secret + multi: false + required: true + show_user: true + description: Client Secret to authenticate with Cloud Foundry. + - name: shard_id + type: text + title: Shard ID + required: false + show_user: false + description: Shard ID for the connection with Cloud Foundry. Use the same ID across multiple filebeat to shard the load of events. Default: "(generated UUID)". + - name: version + type: text + title: Cloud Foundry API Version + required: false + show_user: false + description: Consumer API version to connect with Cloud Foundry to collect events. Use v1 to collect events using Doppler/Traffic Control. Use v2 to collect events from the RLP Gateway. Default: "v1". + - name: doppler_address + type: text + title: Doppler Address + required: false + show_user: false + description: The URL of the Cloud Foundry Doppler Websocket. Optional. Default: "(value from ${api_address}/v2/info)". + - name: uaa_address + type: text + title: UAA Address + required: false + show_user: false + description: The URL of the Cloud Foundry UAA API. Optional. Default: "(value from ${api_address}/v2/info)". + - name: rlp_address + type: text + title: RLP Address + required: false + show_user: false + description: The URL of the Cloud Foundry RLP Gateway. Optional. Default: "(log-stream subdomain under the same domain as api_server)". + - name: custom + type: yaml + title: Additional Log Configuration Options + description: > + Configuration options that can be used to further change input configuration. Check the [Filebeat documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-log.html) for more information. + required: false + show_user: false + default: | + #recursive_glob.enabled: true + #encoding: plain + harvester_buffer_size: 16384 + max_bytes: 10485760 + # json.keys_under_root: false + # json.add_error_key: false + # json.message_key: message + # json.overwrite_keys: false + # json.expand_keys: false + # json.document_id: "" + # json.ignore_decoding_error: false + # multiline.type: pattern + # multiline.pattern: "^{" + # multiline.negate: true + # multiline.match: after + # multiline.max_lines: 500 + # multiline.timeout: 5s + # multiline.flush_pattern: "" + # exclude_lines: ['^DBG'] + # include_lines: ['^ERR', '^WARN'] + # scan_frequency: 10s + # harvester_limit: 0 + # tail_files: false + # backoff: 1s + # max_backoff: 10s + # backoff_factor: 2 + # close_inactive: 5m + # close_renamed: false + # close_removed: true + # close_eof: false + # close_timeout: 0 + # clean_removed: true + # clean_inactive: 0 + # ignore_older: 0 + # max_bytes: 10485760 + # symlinks: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/common.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/common.yml.njk new file mode 100644 index 0000000000000..95b0dbc37b3cf --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/common.yml.njk @@ -0,0 +1,25 @@ + - name: preserve_original_event + required: true + show_user: true + title: Preserve original event + description: Preserves a raw copy of the original event, added to the field `event.original` + type: bool + multi: false + default: false + - name: tags + type: text + title: Tags + multi: true + required: true + show_user: false + default: + - forwarded + - {{ package_name }}-{{ data_stream_name }} + - name: processors + type: yaml + title: Processors + multi: false + required: false + show_user: false + description: > + Processors are used to reduce the number of fields in the exported event or to enhance the event with metadata. This executes in the agent before the logs are parsed. See [Processors](https://www.elastic.co/guide/en/beats/filebeat/current/filtering-and-enhancing-data.html) for details. \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/data_stream.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/data_stream.yml.njk new file mode 100644 index 0000000000000..e90bdd91f69e4 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/data_stream.yml.njk @@ -0,0 +1,4 @@ +title: {{ title }} +type: logs +streams:{% for data_stream in data_streams %} +{{ data_stream | indent(2, true) }}{% endfor %} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/filestream_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/filestream_manifest.yml.njk new file mode 100644 index 0000000000000..0884b55123850 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/filestream_manifest.yml.njk @@ -0,0 +1,60 @@ +- input: filestream + template_path: filestream.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - '/test/path' + description: The full path to the related log file. + - name: exclude_files + type: text + title: Exclude Files + description: A list of regular expressions to match the files that you want Filebeat to ignore. + required: false + show_user: true + default: + - '\.gz$' + - name: custom + type: yaml + title: Additional Filestream Configuration Options + required: false + show_user: false + description: >- + Configuration options that can be used to further change input configuration. Check the [Filebeat documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-filestream.html) for more information. + default: |- + #encoding: plain + prospector.scanner.recursive_glob: true + #prospector.scanner.symlinks: true + #prospector.scanner.include_files: [''] + #prospector.scanner.resend_on_touch: false + #prospector.scanner.check_interval: 10s + #prospector.scanner.fingerprint.enabled: false + #prospector.scanner.fingerprint.offset: 0 + #prospector.scanner.fingerprint.length: 1024 + #ignore_older: 0 + #ignore_inactive: '' + #close.on_state_change.inactive: 5m + #close.on_state_change.renamed: false + #close.on_state_change.removed: false + #close.reader.on_eof: false + #close.reader.after_interval: 0 + #clean_inactive: 0 + #clean_removed: true + #backoff.init: 2s + #backoff.max: 10s + #file_identity.native: ~ + #file_identity.path: '' + #file_identity.inode_marker.path: '' + #file_identity.fingerprint: ~ + #rotation.external.strategy.copytruncate.suffix_regex: '\.\d$' + #rotation.external.strategy.copytruncate.dateformat: '-20060102' + #include_lines: ['sometext'] + #exclude_lines: ['^DBG'] + #buffer_size: 16384 + #message_max_bytes: 1048576 \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/gcp-pubsub_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/gcp-pubsub_manifest.yml.njk new file mode 100644 index 0000000000000..920450fd7b911 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/gcp-pubsub_manifest.yml.njk @@ -0,0 +1,64 @@ +- input: gcp-pubsub + template_path: gcp-pubsub.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: project_id + type: text + title: Project ID + multi: false + required: true + show_user: true + - name: topic + type: text + title: Topic + multi: false + required: true + show_user: true + - name: subscription_name + type: text + title: Subscription Name + multi: false + required: true + show_user: true + - name: credentials_file + type: text + title: Credentials File + multi: false + required: false + show_user: true + description: Path to a JSON file containing the credentials and key used to subscribe. + - name: credentials_json + type: text + title: Credentials JSON + multi: false + required: false + show_user: true + description: JSON blob containing the credentials and key used to subscribe. + - name: subscription_create + type: bool + title: Subscription Create + description: If true, the integration will create the subscription on start. + multi: false + required: false + show_user: true + - name: subscription_num_goroutines + type: text + title: Subscription Num Goroutines + description: Number of goroutines created to read from the subscription. This does not limit the number of messages that can be processed concurrently or the maximum number of goroutines the input will create. + multi: false + required: false + show_user: false + - name: subscription_max_outstanding_messages + type: text + title: Subscription Max Outstanding Messages + description: The maximum number of unprocessed messages (unacknowledged but not yet expired). If the value is negative, then there will be no limit on the number of unprocessed messages. Default is 1000. + multi: false + required: false + show_user: false + - name: alternative_host + type: text + title: Alternative host + multi: false + required: false + show_user: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/gcs_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/gcs_manifest.yml.njk new file mode 100644 index 0000000000000..4beef9fc5f439 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/gcs_manifest.yml.njk @@ -0,0 +1,106 @@ +- input: gcs + template_path: gcs.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: project_id + type: text + title: Project ID + description: >- + This attribute is required for various internal operations with respect to authentication, creating storage clients and logging which are used internally for various processing purposes. + multi: false + required: true + show_user: true + default: my-project-id + - name: service_account_key + type: password + title: Service Account Key + description: >- + This attribute contains the json service account credentials string, which can be generated from the google cloud console, ref[Service Account Keys](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + Required if a Service Account File is not provided. + multi: false + required: false + show_user: true + - name: service_account_file + type: text + title: Service Account File + description: >- + This attribute contains the service account credentials file, which can be generated from the google cloud console, ref [Service Account Keys](https://cloud.google.com/iam/docs/creating-managing-service-account-keys). + Required if a Service Account Key is not provided. + multi: false + required: false + show_user: true + - name: number_of_workers + type: integer + title: Maximum number of workers + multi: false + required: false + show_user: true + default: 3 + description: Determines how many workers are spawned per bucket. + - name: poll + type: bool + title: Polling + multi: false + required: false + show_user: true + default: true + description: Determines if the bucket will be continuously polled for new documents. + - name: poll_interval + type: text + title: Polling interval + multi: false + required: false + show_user: true + default: 15s + description: Determines the time interval between polling operations. + - name: bucket_timeout + type: text + title: Bucket Timeout + multi: false + required: false + show_user: true + default: 120s + description: Defines the maximum time that the sdk will wait for a bucket api response before timing out. Valid time units are ns, us, ms, s, m, h. + - name: buckets + type: yaml + title: Buckets + description: "This attribute contains the details about a specific bucket like, name, number_of_workers, poll, poll_interval and bucket_timeout. The attribute 'name' is specific to a bucket as it describes the bucket name, while the fields number_of_workers, poll, poll_interval and bucket_timeout can exist both at the bucket level and at the global level. If you have already defined the attributes globally, then you can only specify the name in this yaml config. If you want to override any specific attribute for a specific bucket, then, you can define it here. Any attribute defined in the yaml will override the global definitions. Please see the relevant [Documentation](https://www.elastic.co/guide/en/beats/filebeat/8.5/filebeat-input-gcs.html#attrib-buckets) for further information.\n" + required: true + show_user: true + default: >- + # You can define as many buckets as you want here. + + - name: logs + - name: logs_2 + + # The config below is an example of how to override the global config. + + #- name: event_logs_3 + # number_of_workers: 3 + # poll: true + # poll_interval: 10s + # bucket_timeout: 30s + - name: file_selectors + type: yaml + title: File Selectors + multi: false + required: false + show_user: false + default: >- + # - regex: "event/" + description: "If the GCS bucket will have events that correspond to files that this integration shouldn’t process, file_selectors can be used to limit the files that are downloaded. This is a list of selectors which is made up of regex patters. \nThe regex should match the GCS bucket filepath. Regexes use [RE2 syntax](https://pkg.go.dev/regexp/syntax). Files that don’t match one of the regexes will not be processed.\n" + - name: timestamp_epoch + type: integer + title: Timestamp Epoch + multi: false + required: false + show_user: false + description: Defines the epoch time in seconds, which is used to filter out objects/files that are older than the specified timestamp. + - name: alternative_host + type: text + title: Alternative Host + description: Used to override the default host for the storage client (default is storage.googleapis.com) + required: false + multi: false + show_user: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/http_endpoint_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/http_endpoint_manifest.yml.njk new file mode 100644 index 0000000000000..1c75f6f337806 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/http_endpoint_manifest.yml.njk @@ -0,0 +1,133 @@ +- input: http_endpoint + template_path: http_endpoint.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: listen_address + type: text + title: Listen Address + description: | + Bind address for the HTTP listener. Use 0.0.0.0 to listen on all interfaces. + required: true + show_user: true + default: localhost + - name: listen_port + type: text + title: Listen port + description: | + Bind port for the listener. + required: true + show_user: true + default: 8080 + - name: url + type: text + title: URL + description: This options specific which URL path to accept requests on. Defaults to /. + required: false + show_user: true + - name: data_stream.dataset + type: text + title: Dataset name + description: | + Dataset to write data to. Changing the dataset will send the data to a different index. You can't use `-` in the name of a dataset and only valid characters for [Elasticsearch index names](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html). + default: http_endpoint.generic + required: true + show_user: true + - name: pipeline + type: text + title: Ingest Pipeline + description: | + The Ingest Node pipeline ID to be used by the integration. + required: false + show_user: true + - name: preserve_original_event + type: bool + title: Preserve Original Event + description: This option copies the raw unmodified body of the incoming request to the event.original field as a string before sending the event to Elasticsearch. + required: false + show_user: true + - name: prefix + type: text + title: Prefix + description: This option specifies which prefix field the incoming request will be mapped to. + required: false + show_user: false + - name: basic_auth + type: bool + title: Basic Auth + description: Enables or disables HTTP basic auth for each incoming request. If enabled then username and password will also need to be configured. + required: false + show_user: false + - name: username + type: text + title: Username + description: If basic_auth is enabled, this is the username used for authentication against the HTTP listener. Requires password to also be set. + required: false + show_user: false + - name: password + type: password + title: Password + description: If basic_auth is enabled, this is the password used for authentication against the HTTP listener. Requires username to also be set. + required: false + show_user: false + - name: secret_header + type: text + title: Secret Header + description: The header to check for a specific value specified by secret.value. Certain webhooks provide the possibility to include a special header and secret to identify the source. + required: false + show_user: false + - name: secret_value + type: password + title: Secret Value + description: The secret stored in the header name specified by secret.header. Certain webhooks provide the possibility to include a special header and secret to identify the source. + required: false + show_user: false + - name: hmac_header + type: text + title: HMAC Header + description: The name of the header that contains the HMAC signature, for example X-Dropbox-Signature, X-Hub-Signature-256, etc. + required: false + show_user: false + - name: hmac_key + type: password + title: HMAC Key + description: The secret key used to calculate the HMAC signature. Typically, the webhook sender provides this value. + required: false + show_user: false + - name: hmac_type + type: text + title: HMAC Type + description: The hash algorithm to use for the HMAC comparison. At this time the only valid values are sha256 or sha1. + required: false + show_user: false + - name: hmac_prefix + type: text + title: HMAC Prefix + description: The prefix for the signature. Certain webhooks prefix the HMAC signature with a value, for example sha256=. + required: false + show_user: false + - name: content_type + type: text + title: Content Type + description: By default the input expects the incoming POST to include a Content-Type of application/json to try to enforce the incoming data to be valid JSON. In certain scenarios when the source of the request is not able to do that, it can be overwritten with another value or set to null. + required: false + show_user: false + - name: response_code + type: text + title: Response Code + description: The HTTP response code returned upon success. Should be in the 2XX range. + required: false + show_user: false + - name: response_body + type: text + title: Response Body + description: The response body returned upon success. Should be a single line JSON string. + required: false + show_user: false + - name: include_headers + type: text + title: Include Headers + description: This options specifies a list of HTTP headers that should be copied from the incoming request and included in the document. All configured headers will always be canonicalized to match the headers of the incoming request. For example, ["content-type"] will become ["Content-Type"] when the filebeat is running. + multi: true + required: false + show_user: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/journald_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/journald_manifest.yml.njk new file mode 100644 index 0000000000000..eef7588f18def --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/journald_manifest.yml.njk @@ -0,0 +1,77 @@ +- input: journald + template_path: journald.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: include_matches + type: text + title: Include Matches + multi: true + show_user: true + description: >- + A list of filter expressions used to select the logs to read (e.g. `_SYSTEMD_UNIT=vault.service`). Defaults to all logs. See [include_matches](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-journald.html#filebeat-input-journald-include-matches) for details. + - name: backoff + type: text + title: Backoff Duration + multi: false + show_user: false + default: "1s" + description: >- + The number of seconds to wait before trying to read again from journals. + - name: max_backoff + type: text + title: Max Backoff Duration + multi: false + show_user: false + default: "60s" + description: >- + The maximum number of seconds to wait before attempting to read again from journals. + - name: seek + type: text + title: Start Position + multi: false + show_user: false + description: >- + The position to start reading the journal from. See [seek](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-journald.html#filebeat-input-journald-seek) for details. + - name: cursor_seek_fallback + type: text + title: Start Position Fallback + multi: false + show_user: false + description: >- + The position to start reading the journal from if no cursor information is available. See [cursor_seek_fallback](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-journald.html#filebeat-input-journald-cursor_seek_fallback) for details. + - name: since + type: text + title: Start Since + multi: false + show_user: false + description: >- + A time offset from the current time to start reading from. Example: "-24h" + - name: units + type: text + title: Units + multi: true + show_user: false + description: >- + Iterate only the entries of the units specified in this option. The iterated entries include messages from the units, messages about the units by authorized daemons and coredumps. However, it does not match systemd user units. + - name: syslog_identifiers + type: text + title: Syslog Identifiers + multi: true + show_user: false + description: >- + Read only the entries with the selected syslog identifiers. + - name: transports + type: text + title: Included Transport Types + multi: true + show_user: false + description: >- + Collect the messages using the specified transports. See [transports](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-journald.html#filebeat-input-journald-transports) for more details. + - name: paths + type: text + title: Journal paths + multi: true + show_user: false + description: >- + List of journals to read from. Defaults to the system journal. diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/kafka_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/kafka_manifest.yml.njk new file mode 100644 index 0000000000000..7374b34906c0c --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/kafka_manifest.yml.njk @@ -0,0 +1,221 @@ +- input: kafka + template_path: kafka.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: hosts + type: text + title: Hosts + description: | + A list of Kafka bootstrapping hosts (brokers) for this cluster. + required: true + show_user: true + multi: true + - name: topics + type: text + title: Topics + description: | + A list of topics to read from. + required: true + show_user: true + multi: true + - name: data_stream.dataset + type: text + title: Dataset name + description: | + Dataset to write data to. Changing the dataset will send the data to a different index. You can't use `-` in the name of a dataset and only valid characters for [Elasticsearch index names](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html). + default: kafka_log.generic + required: true + show_user: true + - name: pipeline + type: text + title: Ingest Pipeline + description: | + The Ingest Node pipeline ID to be used by the integration. + required: false + show_user: true + - name: group_id + type: text + title: Group ID + description: The Kafka consumer group id. + required: true + show_user: true + - name: client_id + type: text + title: Client ID + description: The Kafka client id (optional). + required: false + show_user: true + - name: version + type: text + title: Version + description: The version of the Kafka protocol to use (defaults to "1.0.0"). + required: false + show_user: true + - name: expand_event_list_from_field + type: text + title: Expand Event List from Field + description: Split a field that contains an array of JSON objects, the value would be the name of this field. + required: false + show_user: true + - name: parsers + type: yaml + title: Parsers + description: | + This option expects a list of parsers that the payload has to go through. For more information see [Parsers](https://www.elastic.co/guide/en/beats/filebeat/8.0/filebeat-input-kafka.html#_parsers_2) + required: false + show_user: true + multi: false + default: | + #- ndjson: + # keys_under_root: true + # message_key: msg + #- multiline: + # type: counter + # lines_count: 3 + - name: username + type: text + title: Username + description: Username used for SASL authentication. + required: false + show_user: true + - name: password + type: password + title: Password + description: Password used for SASL authentication. + required: false + show_user: true + - name: kerberos_enabled + type: bool + title: Kerberos Enabled + description: The enabled setting can be used to enable the Kerberos configuration by setting it to true. The default value is false. + required: false + show_user: false + - name: kerberos_auth_type + type: text + title: Kerberos Auth Type + description: | + There are two options to authenticate with Kerberos KDC: password and keytab. + Password expects the principal name and its password. When choosing keytab, you have to specify a principal name and a path to a keytab. The keytab must contain the keys of the selected principal. Otherwise, authentication will fail. + required: false + show_user: false + - name: kerberos_config_path + type: text + title: Kerberos Config Path + description: You need to set the path to the krb5.conf, so Elastic Agent can find the Kerberos KDC to retrieve a ticket. + required: false + show_user: false + - name: kerberos_username + type: text + title: Kerberos Username + description: Name of the principal used to connect to the output. + required: false + show_user: false + - name: kerberos_password + type: password + title: Kerberos Password + description: If you configured password for Auth Type, you have to provide a password for the selected principal. + required: false + show_user: false + - name: kerberos_keytab + type: text + title: Kerberos Keytab + description: If you configured keytab for Auth Type, you have to provide the path to the keytab of the selected principal. + required: false + show_user: false + - name: kerberos_service_name + type: text + title: Kerberos Service Name + description: This option can only be configured for Kafka. It is the name of the Kafka service, usually "kafka". + required: false + show_user: false + - name: kerberos_realm + type: text + title: Kerberos Realm + description: Name of the realm where the output resides. + required: false + show_user: false + - name: kerberos_enable_krb5_fast + type: bool + title: Kerberos KRB5 Fast + description: Enable Kerberos FAST authentication. This may conflict with some Active Directory installations. The default is false. + required: false + show_user: false + - name: initial_offset + type: text + title: Initial Offset + description: The initial offset to start reading, either "oldest" or "newest". Defaults to "oldest". + required: false + show_user: false + - name: connect_backoff + type: text + title: Connect Backoff + description: How long to wait before trying to reconnect to the kafka cluster after a fatal error. Default is 30s. + required: false + show_user: false + - name: consume_backoff + type: text + title: Consume Backoff + description: How long to wait before retrying a failed read. Default is 2s. + required: false + show_user: false + - name: max_wait_time + type: text + title: Max Wait Time + description: How long to wait for the minimum number of input bytes while reading. Default is 250ms. + required: false + show_user: false + - name: wait_close + type: text + title: Wait Close + description: When shutting down, how long to wait for in-flight messages to be delivered and acknowledged. + required: false + show_user: false + - name: isolation_level + type: text + title: Wait Close + description: This configures the Kafka group isolation level, supports the values "read_uncommitted" which returns all messages in the message channel and "read_committed" which hides messages that are part of an aborted transaction. The default is "read_uncommitted". + required: false + show_user: false + - name: fetch_min + type: text + title: Fetch Min + description: The minimum number of bytes to wait for. Defaults to 1. + required: false + show_user: false + - name: fetch_default + type: text + title: Fetch Default + description: The default number of bytes to read per request. Defaults to 1MB. + required: false + show_user: false + - name: fetch_max + type: text + title: Fetch Max + description: The maximum number of bytes to read per request. Defaults to 0 (no limit). + required: false + show_user: false + - name: rebalance_strategy + type: text + title: Rebalance Strategy + description: Either "range" or "roundrobin". Defaults to "range". + required: false + show_user: false + - name: rebalance_timeout + type: text + title: Rebalance Timeout + description: How long to wait for an attempted rebalance. Defaults to 60s. + required: false + show_user: false + - name: rebalance_max_retries + type: text + title: Rebalance Max Retries + description: How many times to retry if rebalancing fails. Defaults to 4. + required: false + show_user: false + - name: rebalance_retry_backoff + type: text + title: Rebalance Retry Backoff + description: How long to wait after an unsuccessful rebalance attempt. Defaults to 2s. + required: false + show_user: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/logfile_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/logfile_manifest.yml.njk new file mode 100644 index 0000000000000..24de5572cf73d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/logfile_manifest.yml.njk @@ -0,0 +1,66 @@ +- input: logfile + template_path: logfile.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - '/test/path' + description: The full path to the related log file. + - name: exclude_files + type: text + title: Exclude Files + description: A list of regular expressions to match the files that you want Filebeat to ignore. + required: false + show_user: true + default: + - '\.gz$' + - name: custom + type: yaml + title: Additional Log Configuration Options + description: > + Configuration options that can be used to further change input configuration. Check the [Filebeat documentation](https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-input-log.html) for more information. + required: false + show_user: false + default: | + #recursive_glob.enabled: true + #encoding: plain + harvester_buffer_size: 16384 + max_bytes: 10485760 + # json.keys_under_root: false + # json.add_error_key: false + # json.message_key: message + # json.overwrite_keys: false + # json.expand_keys: false + # json.document_id: "" + # json.ignore_decoding_error: false + # multiline.type: pattern + # multiline.pattern: "^{" + # multiline.negate: true + # multiline.match: after + # multiline.max_lines: 500 + # multiline.timeout: 5s + # multiline.flush_pattern: "" + # exclude_lines: ['^DBG'] + # include_lines: ['^ERR', '^WARN'] + # scan_frequency: 10s + # harvester_limit: 0 + # tail_files: false + # backoff: 1s + # max_backoff: 10s + # backoff_factor: 2 + # close_inactive: 5m + # close_renamed: false + # close_removed: true + # close_eof: false + # close_timeout: 0 + # clean_removed: true + # clean_inactive: 0 + # ignore_older: 0 + # max_bytes: 10485760 + # symlinks: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/package.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/package.yml.njk new file mode 100644 index 0000000000000..e4beecdd132ea --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/package.yml.njk @@ -0,0 +1,28 @@ +format_version: {{ format_version }} +name: {{ package_name }} +title: {{ package_title }} +version: {{ package_version }} +description: {{ package_description }} +type: integration +categories: + - security + - iam +conditions: + kibana: + version: {{ min_version }} +icons: + - src: /img/logo.svg + title: {{ package_name }} Logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: {{ package_name }} + title: {{ package_title }} + description: {{ package_description}} + inputs: {% for input in inputs %} + - type: {{ input.type }} + title: {{ input.title }} + description: {{ input.description }} {% endfor %} +owner: + github: {{ package_owner }} + type: elastic \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/ssl.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/ssl.yml.njk new file mode 100644 index 0000000000000..0eb62ad2f5924 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/ssl.yml.njk @@ -0,0 +1,75 @@ + - name: ssl_supported_protocols + type: text + title: SSL Supported Protocols + multi: true + required: false + show_user: false + default: + - 'TLSv1.1' + - 'TLSv1.2' + - 'TLSv1.3' + description: List of allowed SSL/TLS versions. If SSL/TLS server decides for protocol versions not configure. + - name: ssl_cipher_suites + type: text + title: SSL Cipher Suites + multi: true + required: false + show_user: false + description: The list of cipher suites to use. If not specified, the default cipher suites will be used. + - name: ssl_curve_types + type: text + title: SSL Curve Types + multi: true + required: false + show_user: false + description: The list of curve types for ECDHE (Elliptic Curve Diffie-Hellman ephemeral key exchange). + - name: ssl_ca_sha256 + type: text + title: SSL SHA256 Pin + multi: true + required: false + show_user: false + description: Configure a pin that can be used to do extra validation of the verified certificate chain. + - name: ssl_certificate_authorities + type: text + title: SSL Certificate Authorities + multi: true + required: false + show_user: false + description: The list of root certificates for verifications is required. If certificate_authorities is empty or not set, the system keystore is used. Example: /etc/pki/root/ca.pem + - name: ssl_certificate + type: text + title: SSL Certificate + multi: false + required: false + show_user: false + description: Path to the SSL certificate file to be used. Example: /etc/pki/client/cert.pem + - name: ssl_certificate_key + type: text + title: SSL Certificate Key + multi: false + required: false + show_user: false + description: Path to the SSL certificate key file to be used. Example: /etc/pki/client/cert.key + - name: ssl_certificate_key_passphrase + type: text + title: SSL Certificate Key Passphrase + multi: false + required: false + show_user: false + description: The passphrase used to decrypt an encrypted key stored in the configured key file. + - name: ssl_verification_mode + type: text + title: SSL Certificate Key Passphrase + multi: false + required: false + show_user: false + default: 'full' + description: The passphrase used to decrypt an encrypted key stored in the configured key file. + - name: ssl_certificate_authority_trusted_fingerprint + type: text + title: SSL Certificate Authority Trusted Fingerprint + multi: false + required: false + show_user: false + description: A HEX encoded SHA-256 of a CA certificate. If this certificate is present in the chain during the handshake, it will be added to the certificate_authorities list and the handshake will continue normaly. \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/tcp_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/tcp_manifest.yml.njk new file mode 100644 index 0000000000000..eb2b5d27d55c9 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/tcp_manifest.yml.njk @@ -0,0 +1,57 @@ +- input: tcp + template_path: tcp.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: listen_address + type: text + title: Listen Address + description: | + Bind address for the listener. Use 0.0.0.0 to listen on all interfaces. + required: true + show_user: true + default: localhost + - name: listen_port + type: text + title: Listen port + description: | + Bind port for the listener. + required: true + show_user: true + default: 8080 + - name: max_message_size + type: text + title: Max Message Size + description: The maximum size of the message received over TCP. The default is 20MiB + required: false + show_user: false + - name: framing + type: text + title: Framing + description: Specify the framing used to split incoming events. Can be one of delimiter or rfc6587. The default is delimiter + required: false + show_user: false + - name: line_delimiter + type: text + title: Line Delimiter + description: Specify the characters used to split the incoming events. The default is \n. + required: false + show_user: false + - name: max_connections + type: text + title: Max Connections + description: The at most number of connections to accept at any given point in time. + required: false + show_user: false + - name: timeout + type: text + title: Timeout + description: The duration of inactivity before a remote connection is closed. The default is 300s. Valid time units are ns, us, ms, s, m, h. + required: false + show_user: false + - name: keep_null + type: bool + title: Keep Null Values + description: If this option is set to true, fields with null values will be published in the output document. By default, keep_null is set to false. + required: false + show_user: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/udp_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/udp_manifest.yml.njk new file mode 100644 index 0000000000000..9955d2222b4ba --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/manifest/udp_manifest.yml.njk @@ -0,0 +1,39 @@ +- input: udp + template_path: udp.yml.hbs + title: {{ data_stream_title }} + description: {{ data_stream_description }} + vars: + - name: listen_address + type: text + title: Listen Address + description: | + Bind address for the listener. Use 0.0.0.0 to listen on all interfaces. + required: true + show_user: true + default: localhost + - name: listen_port + type: text + title: Listen port + description: | + Bind port for the listener. + required: true + show_user: true + default: 8080 + - name: max_message_size + type: text + title: Max Message Size + description: The maximum size of the message received over TCP. The default is 20MiB + required: false + show_user: false + - name: timeout + type: text + title: Timeout + description: The duration of inactivity before a remote connection is closed. The default is 300s. Valid time units are ns, us, ms, s, m, h. + required: false + show_user: false + - name: keep_null + type: bool + title: Keep Null Values + description: If this option is set to true, fields with null values will be published in the output document. By default, keep_null is set to false. + required: false + show_user: false \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk b/x-pack/plugins/integration_assistant/server/templates/pipeline/pipeline.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/pipeline/pipeline.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test-common-config.yml b/x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test-common-config.yml new file mode 100644 index 0000000000000..772cb40587804 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test-common-config.yml @@ -0,0 +1,3 @@ +fields: + tags: + - preserve_original_event \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/readme/README.md.njk b/x-pack/plugins/integration_assistant/server/templates/readme/README.md.njk new file mode 100644 index 0000000000000..74520da051b8b --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/readme/README.md.njk @@ -0,0 +1,24 @@ +# {{ package_name }} Integration + +This integration is for ingesting data from [{{ package_name }}](https://example.com/). +{% for data_stream in data_streams %} +- `{{ data_stream.name }}`: {{ data_stream.description }} +{% endfor %} +See [Link to docs](https://example.com/docs) for more information. + +## Compatibility + +Insert compatibility information here. This could for example be which versions of the product it was tested with. + +## Setup + +Insert how to configure the vendor side of the integration here, for example how to configure the API, create a syslog remote destination etc. + +## Logs +{% for data_stream in data_streams %} +### {{ data_stream.name }} + +Insert a description of the data stream here. + +{% raw %}{{fields {% endraw %}"{{ data_stream.name }}"{% raw %}}}{% endraw %} +{% endfor %} diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/docker-compose.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/docker-compose.yml.njk new file mode 100644 index 0000000000000..74ebed9dd0a75 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/docker-compose.yml.njk @@ -0,0 +1,3 @@ +version: "{{ docker_compose_version }}" +services: {% for service in services %} + {{ service }}{% endfor %} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-filestream.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-filestream.njk new file mode 100644 index 0000000000000..642897a0fbfea --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-filestream.njk @@ -0,0 +1,6 @@ +{{package_name}}-{{data_stream_name}}-filestream: + image: alpine + volumes: + - ./sample_logs:/sample_logs:ro + - ${SERVICE_LOGS_DIR}:/var/log + command: /bin/sh -c "cp /sample_logs/* /var/log/" \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-gcs.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-gcs.njk new file mode 100644 index 0000000000000..3a1010d0b0bfb --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-gcs.njk @@ -0,0 +1,7 @@ +{{package_name}}-{{data_stream_name}}-gcs: + image: fsouza/fake-gcs-server:latest + command: -host=0.0.0.0 -public-host=elastic-package-service_{{package_name}}-{{data_stream_name}}-gcs_1 -port=4443 -scheme=http + volumes: + - ./sample_logs:/data + ports: + - 4443/tcp \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-logfile.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-logfile.njk new file mode 100644 index 0000000000000..1393ef7cc1098 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-logfile.njk @@ -0,0 +1,6 @@ +{{package_name}}-{{data_stream_name}}-logfile: + image: alpine + volumes: + - ./sample_logs:/sample_logs:ro + - ${SERVICE_LOGS_DIR}:/var/log + command: /bin/sh -c "cp /sample_logs/* /var/log/" \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-tcp.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-tcp.njk new file mode 100644 index 0000000000000..0267c60d00d4d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-tcp.njk @@ -0,0 +1,6 @@ +{{package_name}}-{{data_stream_name}}-tcp: + image: docker.elastic.co/observability/stream:{{stream_version}} + volumes: + - ./sample_logs:/sample_logs:ro + entrypoint: /bin/bash + command: -c "/stream log --start-signal=SIGHUP --delay=5s --addr elastic-agent:9025 -p=tcp /sample_logs/{{package_name}}.log" \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-udp.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-udp.njk new file mode 100644 index 0000000000000..bdb8b5b91b8ff --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/service-udp.njk @@ -0,0 +1,6 @@ +{{package_name}}-{{data_stream_name}}-udp: + image: docker.elastic.co/observability/stream:{{stream_version}} + volumes: + - ./sample_logs:/sample_logs:ro + entrypoint: /bin/bash + command: -c "/stream log --start-signal=SIGHUP --delay=5s --addr elastic-agent:9025 -p=udp /sample_logs/{{package_name}}.log" \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-filestream-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-filestream-config.yml.njk new file mode 100644 index 0000000000000..3a861dfe3b7d1 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-filestream-config.yml.njk @@ -0,0 +1,13 @@ +service: {{package_name}}-{{data_stream_name}}-filestream +input: filestream +data_stream: + vars: + preserve_original_event: true + paths: + - '{% raw %}{{SERVICE_LOGS_DIR}}{% endraw %}/test-{{package_name}}-{{data_stream_name}}.log' +numeric_keyword_fields: + - log.file.device_id + - log.file.inode + - log.file.idxhi + - log.file.idxlo + - log.file.vol \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-gcs-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-gcs-config.yml.njk new file mode 100644 index 0000000000000..3bdf39c42fac7 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-gcs-config.yml.njk @@ -0,0 +1,10 @@ +service: {{package_name}}-{{data_stream_name}}-gcs +input: gcs +data_stream: + vars: + project_id: testproject + alternative_host: "http://{% raw %}{{Hostname}}:{{Port}}{% endraw %}" + buckets: | + - name: testbucket + poll: true + poll_interval: 15s diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-logfile-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-logfile-config.yml.njk new file mode 100644 index 0000000000000..d6d891cd7038b --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-logfile-config.yml.njk @@ -0,0 +1,13 @@ +service: {{package_name}}-{{data_stream_name}}-logfile +input: logfile +data_stream: + vars: + preserve_original_event: true + paths: + - '{% raw %}{{SERVICE_LOGS_DIR}}{% endraw %}/{{package_name}}-{{data_stream_name}}.log' +numeric_keyword_fields: + - log.file.device_id + - log.file.inode + - log.file.idxhi + - log.file.idxlo + - log.file.vol \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-tcp-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-tcp-config.yml.njk new file mode 100644 index 0000000000000..1c5377a87e213 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-tcp-config.yml.njk @@ -0,0 +1,7 @@ +service: {{package_name}}-{{data_stream_name}}-tcp +input: tcp +data_stream: + vars: + preserve_original_event: true + listen_address: 0.0.0.0 + listen_port: 9025 diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-udp-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-udp-config.yml.njk new file mode 100644 index 0000000000000..634f151b97198 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/templates/system_tests/test-udp-config.yml.njk @@ -0,0 +1,7 @@ +service: {{package_name}}-{{data_stream_name}}-udp +input: udp +data_stream: + vars: + preserve_original_event: true + listen_address: 0.0.0.0 + listen_port: 9025 diff --git a/yarn.lock b/yarn.lock index 19c89b83287a9..3587084a1b081 100644 --- a/yarn.lock +++ b/yarn.lock @@ -86,6 +86,52 @@ "@aws-sdk/types" "^3.222.0" tslib "^1.11.1" +"@aws-crypto/ie11-detection@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz#640ae66b4ec3395cee6a8e94ebcd9f80c24cd688" + integrity sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q== + dependencies: + tslib "^1.11.1" + +"@aws-crypto/sha256-browser@3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz#05f160138ab893f1c6ba5be57cfd108f05827766" + integrity sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ== + dependencies: + "@aws-crypto/ie11-detection" "^3.0.0" + "@aws-crypto/sha256-js" "^3.0.0" + "@aws-crypto/supports-web-crypto" "^3.0.0" + "@aws-crypto/util" "^3.0.0" + "@aws-sdk/types" "^3.222.0" + "@aws-sdk/util-locate-window" "^3.0.0" + "@aws-sdk/util-utf8-browser" "^3.0.0" + tslib "^1.11.1" + +"@aws-crypto/sha256-js@3.0.0", "@aws-crypto/sha256-js@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz#f06b84d550d25521e60d2a0e2a90139341e007c2" + integrity sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ== + dependencies: + "@aws-crypto/util" "^3.0.0" + "@aws-sdk/types" "^3.222.0" + tslib "^1.11.1" + +"@aws-crypto/sha256-js@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz#c4fdb773fdbed9a664fc1a95724e206cf3860042" + integrity sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA== + dependencies: + "@aws-crypto/util" "^5.2.0" + "@aws-sdk/types" "^3.222.0" + tslib "^2.6.2" + +"@aws-crypto/supports-web-crypto@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz#5d1bf825afa8072af2717c3e455f35cda0103ec2" + integrity sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg== + dependencies: + tslib "^1.11.1" + "@aws-crypto/util@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-3.0.0.tgz#1c7ca90c29293f0883468ad48117937f0fe5bfb0" @@ -95,6 +141,236 @@ "@aws-sdk/util-utf8-browser" "^3.0.0" tslib "^1.11.1" +"@aws-crypto/util@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-5.2.0.tgz#71284c9cffe7927ddadac793c14f14886d3876da" + integrity sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ== + dependencies: + "@aws-sdk/types" "^3.222.0" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" + +"@aws-sdk/client-sso@3.583.0": + version "3.583.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.583.0.tgz#fa18cadd19abe80e0c0378b6cbe6225ed0296595" + integrity sha512-FNJ2MmiBtZZwgkj4+GLVrzqwmD6D8FBptrFZk7PnGkSf7v1Q8txYNI6gY938RRhYJ4lBW4cNbhPvWoDxAl90Hw== + dependencies: + "@aws-crypto/sha256-browser" "3.0.0" + "@aws-crypto/sha256-js" "3.0.0" + "@aws-sdk/core" "3.582.0" + "@aws-sdk/middleware-host-header" "3.577.0" + "@aws-sdk/middleware-logger" "3.577.0" + "@aws-sdk/middleware-recursion-detection" "3.577.0" + "@aws-sdk/middleware-user-agent" "3.583.0" + "@aws-sdk/region-config-resolver" "3.577.0" + "@aws-sdk/types" "3.577.0" + "@aws-sdk/util-endpoints" "3.583.0" + "@aws-sdk/util-user-agent-browser" "3.577.0" + "@aws-sdk/util-user-agent-node" "3.577.0" + "@smithy/config-resolver" "^3.0.0" + "@smithy/core" "^2.0.1" + "@smithy/fetch-http-handler" "^3.0.1" + "@smithy/hash-node" "^3.0.0" + "@smithy/invalid-dependency" "^3.0.0" + "@smithy/middleware-content-length" "^3.0.0" + "@smithy/middleware-endpoint" "^3.0.0" + "@smithy/middleware-retry" "^3.0.1" + "@smithy/middleware-serde" "^3.0.0" + "@smithy/middleware-stack" "^3.0.0" + "@smithy/node-config-provider" "^3.0.0" + "@smithy/node-http-handler" "^3.0.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/smithy-client" "^3.0.1" + "@smithy/types" "^3.0.0" + "@smithy/url-parser" "^3.0.0" + "@smithy/util-base64" "^3.0.0" + "@smithy/util-body-length-browser" "^3.0.0" + "@smithy/util-body-length-node" "^3.0.0" + "@smithy/util-defaults-mode-browser" "^3.0.1" + "@smithy/util-defaults-mode-node" "^3.0.1" + "@smithy/util-endpoints" "^2.0.0" + "@smithy/util-middleware" "^3.0.0" + "@smithy/util-retry" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/core@3.582.0": + version "3.582.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/core/-/core-3.582.0.tgz#9ebb295290cba3d68738401fe4e3d51dfb0d1bfc" + integrity sha512-ofmD96IQc9g1dbyqlCyxu5fCG7kIl9p1NoN5+vGBUyLdbmPCV3Pdg99nRHYEJuv2MgGx5AUFGDPMHcqbJpnZIw== + dependencies: + "@smithy/core" "^2.0.1" + "@smithy/protocol-http" "^4.0.0" + "@smithy/signature-v4" "^3.0.0" + "@smithy/smithy-client" "^3.0.1" + "@smithy/types" "^3.0.0" + fast-xml-parser "4.2.5" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-env@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.577.0.tgz#d587ea01a2288840e8483a236516c0f26cb4ba36" + integrity sha512-Jxu255j0gToMGEiqufP8ZtKI8HW90lOLjwJ3LrdlD/NLsAY0tOQf1fWc53u28hWmmNGMxmCrL2p66IOgMDhDUw== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-http@3.582.0": + version "3.582.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-http/-/credential-provider-http-3.582.0.tgz#6ea9377461c4ce38d487ea0ae5888155f7c495a6" + integrity sha512-kGOUKw5ryPkDIYB69PjK3SicVLTbWB06ouFN2W1EvqUJpkQGPAUGzYcomKtt3mJaCTf/1kfoaHwARAl6KKSP8Q== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/fetch-http-handler" "^3.0.1" + "@smithy/node-http-handler" "^3.0.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/smithy-client" "^3.0.1" + "@smithy/types" "^3.0.0" + "@smithy/util-stream" "^3.0.1" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-ini@3.583.0": + version "3.583.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.583.0.tgz#948ebd3ca257d7d9362d3294259e0be9526cd662" + integrity sha512-8I0oWNg/yps6ctjhEeL/qJ9BIa/+xXP7RPDQqFKZ2zBkWbmLLOoMWXRvl8uKUBD6qCe+DGmcu9skfVXeXSesEQ== + dependencies: + "@aws-sdk/credential-provider-env" "3.577.0" + "@aws-sdk/credential-provider-process" "3.577.0" + "@aws-sdk/credential-provider-sso" "3.583.0" + "@aws-sdk/credential-provider-web-identity" "3.577.0" + "@aws-sdk/types" "3.577.0" + "@smithy/credential-provider-imds" "^3.0.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/shared-ini-file-loader" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-node@^3.583.0": + version "3.583.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.583.0.tgz#8ce316409d91cddca0c85851ca50726ee666cff5" + integrity sha512-yBNypBXny7zJH85SzxDj8s1mbLXv9c/Vbq0qR3R3POj2idZ6ywB/qlIRC1XwBuv49Wvg8kA1wKXk3K3jrpcVIw== + dependencies: + "@aws-sdk/credential-provider-env" "3.577.0" + "@aws-sdk/credential-provider-http" "3.582.0" + "@aws-sdk/credential-provider-ini" "3.583.0" + "@aws-sdk/credential-provider-process" "3.577.0" + "@aws-sdk/credential-provider-sso" "3.583.0" + "@aws-sdk/credential-provider-web-identity" "3.577.0" + "@aws-sdk/types" "3.577.0" + "@smithy/credential-provider-imds" "^3.0.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/shared-ini-file-loader" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-process@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.577.0.tgz#ba35b4f012563762bbd86a71989d366272ee0f07" + integrity sha512-Gin6BWtOiXxIgITrJ3Nwc+Y2P1uVT6huYR4EcbA/DJUPWyO0n9y5UFLewPvVbLkRn15JeEqErBLUrHclkiOKtw== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/shared-ini-file-loader" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-sso@3.583.0": + version "3.583.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.583.0.tgz#468bb6ca9bd7b89370d5ec7865a8e29e98772abc" + integrity sha512-G/1EvL9tBezSiU+06tG4K/kOvFfPjnheT4JSXqjPM7+vjKzgp2jxp1J9MMd69zs4jVWon932zMeGgjrCplzMEg== + dependencies: + "@aws-sdk/client-sso" "3.583.0" + "@aws-sdk/token-providers" "3.577.0" + "@aws-sdk/types" "3.577.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/shared-ini-file-loader" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-web-identity@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.577.0.tgz#294fb71fa832d9f55ea1c56678357efa3cd7ca55" + integrity sha512-ZGHGNRaCtJJmszb9UTnC7izNCtRUttdPlLdMkh41KPS32vfdrBDHs1JrpbZijItRj1xKuOXsiYSXLAaHGcLh8Q== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-host-header@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.577.0.tgz#a3fc626d409ec850296740478c64ef5806d8b878" + integrity sha512-9ca5MJz455CODIVXs0/sWmJm7t3QO4EUa1zf8pE8grLpzf0J94bz/skDWm37Pli13T3WaAQBHCTiH2gUVfCsWg== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-logger@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.577.0.tgz#6da3b13ae284fb3930961f0fc8e20b1f6cf8be30" + integrity sha512-aPFGpGjTZcJYk+24bg7jT4XdIp42mFXSuPt49lw5KygefLyJM/sB0bKKqPYYivW0rcuZ9brQ58eZUNthrzYAvg== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-recursion-detection@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.577.0.tgz#fff76abc6d4521636f9e654ce5bf2c4c79249417" + integrity sha512-pn3ZVEd2iobKJlR3H+bDilHjgRnNrQ6HMmK9ZzZw89Ckn3Dcbv48xOv4RJvu0aU8SDLl/SNCxppKjeLDTPGBNA== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-user-agent@3.583.0": + version "3.583.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.583.0.tgz#5554b0de431cb3700368f01eb7425210fd3ee9a9" + integrity sha512-xVNXXXDWvBVI/AeVtSdA9SVumqxiZaESk/JpUn9GMkmtTKfter0Cweap+1iQ9j8bRAO0vNhmIkbcvdB1S4WVUw== + dependencies: + "@aws-sdk/types" "3.577.0" + "@aws-sdk/util-endpoints" "3.583.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/region-config-resolver@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.577.0.tgz#1fab6dc6c4ec3ad9a0352c1ce1a757464219fb00" + integrity sha512-4ChCFACNwzqx/xjg3zgFcW8Ali6R9C95cFECKWT/7CUM1D0MGvkclSH2cLarmHCmJgU6onKkJroFtWp0kHhgyg== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/node-config-provider" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-config-provider" "^3.0.0" + "@smithy/util-middleware" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/token-providers@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.577.0.tgz#8f9e96ff42994dfd0b5b3692b583644ccda04893" + integrity sha512-0CkIZpcC3DNQJQ1hDjm2bdSy/Xjs7Ny5YvSsacasGOkNfk+FdkiQy6N67bZX3Zbc9KIx+Nz4bu3iDeNSNplnnQ== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/shared-ini-file-loader" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/types@3.577.0", "@aws-sdk/types@^3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.577.0.tgz#7700784d368ce386745f8c340d9d68cea4716f90" + integrity sha512-FT2JZES3wBKN/alfmhlo+3ZOq/XJ0C7QOZcDNrpKjB0kqYoKjhVKZ/Hx6ArR0czkKfHzBBEs6y40ebIHx2nSmA== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + "@aws-sdk/types@^3.222.0": version "3.433.0" resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.433.0.tgz#0f94eae2a4a3525ca872c9ab04e143c01806d755" @@ -103,6 +379,43 @@ "@smithy/types" "^2.4.0" tslib "^2.5.0" +"@aws-sdk/util-endpoints@3.583.0": + version "3.583.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.583.0.tgz#1554d3b4124be21a72a519603e9727d973845504" + integrity sha512-ZC9mb2jq6BFXPYsUsD2tmYcnlmd+9PGNwnFNn8jk4abna5Jjk2wDknN81ybktmBR5ttN9W8ugmktuKtvAMIDCQ== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/types" "^3.0.0" + "@smithy/util-endpoints" "^2.0.0" + tslib "^2.6.2" + +"@aws-sdk/util-locate-window@^3.0.0": + version "3.568.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.568.0.tgz#2acc4b2236af0d7494f7e517401ba6b3c4af11ff" + integrity sha512-3nh4TINkXYr+H41QaPelCceEB2FXP3fxp93YZXB/kqJvX0U9j0N0Uk45gvsjmEPzG8XxkPEeLIfT2I1M7A6Lig== + dependencies: + tslib "^2.6.2" + +"@aws-sdk/util-user-agent-browser@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.577.0.tgz#d4d2cdb3a2b3d1c8b35f239ee9f7b2c87bee66ea" + integrity sha512-zEAzHgR6HWpZOH7xFgeJLc6/CzMcx4nxeQolZxVZoB5pPaJd3CjyRhZN0xXeZB0XIRCWmb4yJBgyiugXLNMkLA== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/types" "^3.0.0" + bowser "^2.11.0" + tslib "^2.6.2" + +"@aws-sdk/util-user-agent-node@3.577.0": + version "3.577.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.577.0.tgz#0215ea10ead622a61b575a7181a4c51ae8e71449" + integrity sha512-XqvtFjbSMtycZTWVwDe8DRWovuoMbA54nhUoZwVU6rW9OSD6NZWGR512BUGHFaWzW0Wg8++Dj10FrKTG2XtqfA== + dependencies: + "@aws-sdk/types" "3.577.0" + "@smithy/node-config-provider" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + "@aws-sdk/util-utf8-browser@^3.0.0": version "3.259.0" resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff" @@ -7960,7 +8273,51 @@ "@types/node" ">=18.0.0" axios "^1.6.0" -"@smithy/eventstream-codec@^2.0.12", "@smithy/eventstream-codec@^2.1.1": +"@smithy/abort-controller@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-3.0.0.tgz#5815f5d4618e14bf8d031bb98a99adabbb831168" + integrity sha512-p6GlFGBt9K4MYLu72YuJ523NVR4A8oHlC5M2JO6OmQqN8kAc/uh1JqLE+FizTokrSJGg0CSvC+BrsmGzKtsZKA== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/config-resolver@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-3.0.0.tgz#d37b31e3202c5ce54d9bd2406dcde7c7b5073cbd" + integrity sha512-2GzOfADwYLQugYkKQhIyZyQlM05K+tMKvRnc6eFfZcpJGRfKoMUMYdPlBKmqHwQFXQKBrGV6cxL9oymWgDzvFw== + dependencies: + "@smithy/node-config-provider" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-config-provider" "^3.0.0" + "@smithy/util-middleware" "^3.0.0" + tslib "^2.6.2" + +"@smithy/core@^2.0.1": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@smithy/core/-/core-2.0.1.tgz#8a7ac8faa0227912ce260bc3f976a5e254323920" + integrity sha512-rcMkjvwxH/bER+oZUPR0yTA0ELD6m3A+d92+CFkdF6HJFCBB1bXo7P5pm21L66XwTN01B6bUhSCQ7cymWRD8zg== + dependencies: + "@smithy/middleware-endpoint" "^3.0.0" + "@smithy/middleware-retry" "^3.0.1" + "@smithy/middleware-serde" "^3.0.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/smithy-client" "^3.0.1" + "@smithy/types" "^3.0.0" + "@smithy/util-middleware" "^3.0.0" + tslib "^2.6.2" + +"@smithy/credential-provider-imds@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-3.0.0.tgz#a290eb0224ef045742e5c806685cf63d44a084f3" + integrity sha512-lfmBiFQcA3FsDAPxNfY0L7CawcWtbyWsBOHo34nF095728JLkBX4Y9q/VPPE2r7fqMVK+drmDigqE2/SSQeVRA== + dependencies: + "@smithy/node-config-provider" "^3.0.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/url-parser" "^3.0.0" + tslib "^2.6.2" + +"@smithy/eventstream-codec@^2.1.1": version "2.1.1" resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-2.1.1.tgz#4405ab0f9c77d439c575560c4886e59ee17d6d38" integrity sha512-E8KYBxBIuU4c+zrpR22VsVrOPoEDzk35bQR3E+xm4k6Pa6JqzkDOdMyf9Atac5GPNKHJBdVaQ4JtjdWX2rl/nw== @@ -7970,6 +8327,16 @@ "@smithy/util-hex-encoding" "^2.1.1" tslib "^2.5.0" +"@smithy/eventstream-codec@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-3.0.0.tgz#81d30391220f73d41f432f65384b606d67673e46" + integrity sha512-PUtyEA0Oik50SaEFCZ0WPVtF9tz/teze2fDptW6WRXl+RrEenH8UbEjudOz8iakiMl3lE3lCVqYf2Y+znL8QFQ== + dependencies: + "@aws-crypto/crc32" "3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-hex-encoding" "^3.0.0" + tslib "^2.6.2" + "@smithy/eventstream-serde-node@^2.1.1": version "2.1.1" resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.1.1.tgz#2e1afa27f9c7eb524c1c53621049c5e4e3cea6a5" @@ -7988,6 +8355,35 @@ "@smithy/types" "^2.9.1" tslib "^2.5.0" +"@smithy/fetch-http-handler@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-3.0.1.tgz#dacfdf6e70d639fac4a0f57c42ce13f0ed14ff22" + integrity sha512-uaH74i5BDj+rBwoQaXioKpI0SHBJFtOVwzrCpxZxphOW0ki5jhj7dXvDMYM2IJem8TpdFvS2iC08sjOblfFGFg== + dependencies: + "@smithy/protocol-http" "^4.0.0" + "@smithy/querystring-builder" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-base64" "^3.0.0" + tslib "^2.6.2" + +"@smithy/hash-node@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-3.0.0.tgz#f44b5fff193e241c1cdcc957b296b60f186f0e59" + integrity sha512-84qXstNemP3XS5jcof0el6+bDfjzuvhJPQTEfro3lgtbCtKgzPm3MgiS6ehXVPjeQ5+JS0HqmTz8f/RYfzHVxw== + dependencies: + "@smithy/types" "^3.0.0" + "@smithy/util-buffer-from" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/invalid-dependency@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-3.0.0.tgz#21cb6b5203ee15321bfcc751f21f7a19536d4ae8" + integrity sha512-F6wBBaEFgJzj0s4KUlliIGPmqXemwP6EavgvDqYwCH40O5Xr2iMHvS8todmGVZtuJCorBkXsYLyTu4PuizVq5g== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + "@smithy/is-array-buffer@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz#8fa9b8040651e7ba0b2f6106e636a91354ff7d34" @@ -7995,6 +8391,160 @@ dependencies: tslib "^2.5.0" +"@smithy/is-array-buffer@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz#9a95c2d46b8768946a9eec7f935feaddcffa5e7a" + integrity sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ== + dependencies: + tslib "^2.6.2" + +"@smithy/middleware-content-length@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-3.0.0.tgz#084b3d22248967885d496eb0b105d9090e8ababd" + integrity sha512-3C4s4d/iGobgCtk2tnWW6+zSTOBg1PRAm2vtWZLdriwTroFbbWNSr3lcyzHdrQHnEXYCC5K52EbpfodaIUY8sg== + dependencies: + "@smithy/protocol-http" "^4.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/middleware-endpoint@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-3.0.0.tgz#54c9e1bd8f35b7d004c803eaf3702e61e32b8295" + integrity sha512-aXOAWztw/5qAfp0NcA2OWpv6ZI/E+Dh9mByif7i91D/0iyYNUcKvskmXiowKESFkuZ7PIMd3VOR4fTibZDs2OQ== + dependencies: + "@smithy/middleware-serde" "^3.0.0" + "@smithy/node-config-provider" "^3.0.0" + "@smithy/shared-ini-file-loader" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/url-parser" "^3.0.0" + "@smithy/util-middleware" "^3.0.0" + tslib "^2.6.2" + +"@smithy/middleware-retry@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-3.0.1.tgz#167b75e9b79395f11a799f22030eaaf7d40da410" + integrity sha512-hBhSEuL841FhJBK/19WpaGk5YWSzFk/P2UaVjANGKRv3eYNO8Y1lANWgqnuPWjOyCEWMPr58vELFDWpxvRKANw== + dependencies: + "@smithy/node-config-provider" "^3.0.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/service-error-classification" "^3.0.0" + "@smithy/smithy-client" "^3.0.1" + "@smithy/types" "^3.0.0" + "@smithy/util-middleware" "^3.0.0" + "@smithy/util-retry" "^3.0.0" + tslib "^2.6.2" + uuid "^9.0.1" + +"@smithy/middleware-serde@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-3.0.0.tgz#786da6a6bc0e5e51d669dac834c19965245dd302" + integrity sha512-I1vKG1foI+oPgG9r7IMY1S+xBnmAn1ISqployvqkwHoSb8VPsngHDTOgYGYBonuOKndaWRUGJZrKYYLB+Ane6w== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/middleware-stack@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-3.0.0.tgz#00f112bae7af5fc3bd37d4fab95ebce0f17a7774" + integrity sha512-+H0jmyfAyHRFXm6wunskuNAqtj7yfmwFB6Fp37enytp2q047/Od9xetEaUbluyImOlGnGpaVGaVfjwawSr+i6Q== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/node-config-provider@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-3.0.0.tgz#4cd5dcf6132c75d6a582fcd6243482dac703865a" + integrity sha512-buqfaSdDh0zo62EPLf8rGDvcpKwGpO5ho4bXS2cdFhlOta7tBkWJt+O5uiaAeICfIOfPclNOndshDNSanX2X9g== + dependencies: + "@smithy/property-provider" "^3.0.0" + "@smithy/shared-ini-file-loader" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/node-http-handler@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-3.0.0.tgz#e771ea95d03e259f04b7b37e8aece8a4fffc8cdc" + integrity sha512-3trD4r7NOMygwLbUJo4eodyQuypAWr7uvPnebNJ9a70dQhVn+US8j/lCnvoJS6BXfZeF7PkkkI0DemVJw+n+eQ== + dependencies: + "@smithy/abort-controller" "^3.0.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/querystring-builder" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/property-provider@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-3.0.0.tgz#ef7a26557c855cc1471b9aa0e05529183e99b978" + integrity sha512-LmbPgHBswdXCrkWWuUwBm9w72S2iLWyC/5jet9/Y9cGHtzqxi+GVjfCfahkvNV4KXEwgnH8EMpcrD9RUYe0eLQ== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/protocol-http@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-4.0.0.tgz#04df3b5674b540323f678e7c4113e8abd8b26432" + integrity sha512-qOQZOEI2XLWRWBO9AgIYuHuqjZ2csyr8/IlgFDHDNuIgLAMRx2Bl8ck5U5D6Vh9DPdoaVpuzwWMa0xcdL4O/AQ== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/querystring-builder@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-3.0.0.tgz#48a9aa7b700e8409368c21bc0adf7564e001daea" + integrity sha512-bW8Fi0NzyfkE0TmQphDXr1AmBDbK01cA4C1Z7ggwMAU5RDz5AAv/KmoRwzQAS0kxXNf/D2ALTEgwK0U2c4LtRg== + dependencies: + "@smithy/types" "^3.0.0" + "@smithy/util-uri-escape" "^3.0.0" + tslib "^2.6.2" + +"@smithy/querystring-parser@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-3.0.0.tgz#fa1ed0cee408cd4d622070fa874bc50ac1a379b7" + integrity sha512-UzHwthk0UEccV4dHzPySnBy34AWw3V9lIqUTxmozQ+wPDAO9csCWMfOLe7V9A2agNYy7xE+Pb0S6K/J23JSzfQ== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/service-error-classification@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-3.0.0.tgz#06a45cb91b15b8b0d5f3b1df2b3743d2ca42f5c4" + integrity sha512-3BsBtOUt2Gsnc3X23ew+r2M71WwtpHfEDGhHYHSDg6q1t8FrWh15jT25DLajFV1H+PpxAJ6gqe9yYeRUsmSdFA== + dependencies: + "@smithy/types" "^3.0.0" + +"@smithy/shared-ini-file-loader@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.0.0.tgz#8739b7cd24f55fb4e276a74f00f0c2bb4e3f25d8" + integrity sha512-REVw6XauXk8xE4zo5aGL7Rz4ywA8qNMUn8RtWeTRQsgAlmlvbJ7CEPBcaXU2NDC3AYBgYAXrGyWD8XrN8UGDog== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/signature-v4@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-3.0.0.tgz#f536d0abebfeeca8e9aab846a4042658ca07d3b7" + integrity sha512-kXFOkNX+BQHe2qnLxpMEaCRGap9J6tUGLzc3A9jdn+nD4JdMwCKTJ+zFwQ20GkY+mAXGatyTw3HcoUlR39HwmA== + dependencies: + "@smithy/is-array-buffer" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-hex-encoding" "^3.0.0" + "@smithy/util-middleware" "^3.0.0" + "@smithy/util-uri-escape" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/smithy-client@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-3.0.1.tgz#c440473f6fb5dfbe86eaf015565fc56f66533bb4" + integrity sha512-KAiFY4Y4jdHxR+4zerH/VBhaFKM8pbaVmJZ/CWJRwtM/CmwzTfXfvYwf6GoUwiHepdv+lwiOXCuOl6UBDUEINw== + dependencies: + "@smithy/middleware-endpoint" "^3.0.0" + "@smithy/middleware-stack" "^3.0.0" + "@smithy/protocol-http" "^4.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-stream" "^3.0.1" + tslib "^2.6.2" + "@smithy/types@^2.4.0", "@smithy/types@^2.9.1": version "2.9.1" resolved "https://registry.yarnpkg.com/@smithy/types/-/types-2.9.1.tgz#ed04d4144eed3b8bd26d20fc85aae8d6e357ebb9" @@ -8002,6 +8552,45 @@ dependencies: tslib "^2.5.0" +"@smithy/types@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-3.0.0.tgz#00231052945159c64ffd8b91e8909d8d3006cb7e" + integrity sha512-VvWuQk2RKFuOr98gFhjca7fkBS+xLLURT8bUjk5XQoV0ZLm7WPwWPPY3/AwzTLuUBDeoKDCthfe1AsTUWaSEhw== + dependencies: + tslib "^2.6.2" + +"@smithy/url-parser@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-3.0.0.tgz#5fdc77cd22051c1aac6531be0315bfcba0fa705d" + integrity sha512-2XLazFgUu+YOGHtWihB3FSLAfCUajVfNBXGGYjOaVKjLAuAxx3pSBY3hBgLzIgB17haf59gOG3imKqTy8mcrjw== + dependencies: + "@smithy/querystring-parser" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-base64@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-3.0.0.tgz#f7a9a82adf34e27a72d0719395713edf0e493017" + integrity sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ== + dependencies: + "@smithy/util-buffer-from" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-body-length-browser@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-3.0.0.tgz#86ec2f6256310b4845a2f064e2f571c1ca164ded" + integrity sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ== + dependencies: + tslib "^2.6.2" + +"@smithy/util-body-length-node@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-3.0.0.tgz#99a291bae40d8932166907fe981d6a1f54298a6d" + integrity sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA== + dependencies: + tslib "^2.6.2" + "@smithy/util-buffer-from@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz#7eb75d72288b6b3001bc5f75b48b711513091deb" @@ -8010,6 +8599,54 @@ "@smithy/is-array-buffer" "^2.0.0" tslib "^2.5.0" +"@smithy/util-buffer-from@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz#559fc1c86138a89b2edaefc1e6677780c24594e3" + integrity sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA== + dependencies: + "@smithy/is-array-buffer" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-config-provider@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-3.0.0.tgz#62c6b73b22a430e84888a8f8da4b6029dd5b8efe" + integrity sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ== + dependencies: + tslib "^2.6.2" + +"@smithy/util-defaults-mode-browser@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-3.0.1.tgz#0ba33ec90f6dd311599bed3a3dd604f3adba9acd" + integrity sha512-nW5kEzdJn1Bn5TF+gOPHh2rcPli8JU9vSSXLbfg7uPnfR1TMRQqs9zlYRhIb87NeSxIbpdXOI94tvXSy+fvDYg== + dependencies: + "@smithy/property-provider" "^3.0.0" + "@smithy/smithy-client" "^3.0.1" + "@smithy/types" "^3.0.0" + bowser "^2.11.0" + tslib "^2.6.2" + +"@smithy/util-defaults-mode-node@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-3.0.1.tgz#71242a6978240a6f559445d4cc26f2cce91c90e1" + integrity sha512-TFk+Qb+elLc/MOhtSp+50fstyfZ6avQbgH2d96xUBpeScu+Al9elxv+UFAjaTHe0HQe5n+wem8ZLpXvU8lwV6Q== + dependencies: + "@smithy/config-resolver" "^3.0.0" + "@smithy/credential-provider-imds" "^3.0.0" + "@smithy/node-config-provider" "^3.0.0" + "@smithy/property-provider" "^3.0.0" + "@smithy/smithy-client" "^3.0.1" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-endpoints@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-endpoints/-/util-endpoints-2.0.0.tgz#5a16a723c1220f536a9b1b3e01787e69e77b6f12" + integrity sha512-+exaXzEY3DNt2qtA2OtRNSDlVrE4p32j1JSsQkzA5AdP0YtJNjkYbYhJxkFmPYcjI1abuwopOZCwUmv682QkiQ== + dependencies: + "@smithy/node-config-provider" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + "@smithy/util-hex-encoding@^2.1.1": version "2.1.1" resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-2.1.1.tgz#978252b9fb242e0a59bae4ead491210688e0d15f" @@ -8017,6 +8654,51 @@ dependencies: tslib "^2.5.0" +"@smithy/util-hex-encoding@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz#32938b33d5bf2a15796cd3f178a55b4155c535e6" + integrity sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ== + dependencies: + tslib "^2.6.2" + +"@smithy/util-middleware@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-3.0.0.tgz#64d775628b99a495ca83ce982f5c83aa45f1e894" + integrity sha512-q5ITdOnV2pXHSVDnKWrwgSNTDBAMHLptFE07ua/5Ty5WJ11bvr0vk2a7agu7qRhrCFRQlno5u3CneU5EELK+DQ== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-retry@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-3.0.0.tgz#8a0c47496aab74e1dfde4905d462ad636a8824bb" + integrity sha512-nK99bvJiziGv/UOKJlDvFF45F00WgPLKVIGUfAK+mDhzVN2hb/S33uW2Tlhg5PVBoqY7tDVqL0zmu4OxAHgo9g== + dependencies: + "@smithy/service-error-classification" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-stream@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-3.0.1.tgz#3cf527bcd3fec82c231c38d47dd75f3364747edb" + integrity sha512-7F7VNNhAsfMRA8I986YdOY5fE0/T1/ZjFF6OLsqkvQVNP3vZ/szYDfGCyphb7ioA09r32K/0qbSFfNFU68aSzA== + dependencies: + "@smithy/fetch-http-handler" "^3.0.1" + "@smithy/node-http-handler" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-base64" "^3.0.0" + "@smithy/util-buffer-from" "^3.0.0" + "@smithy/util-hex-encoding" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-uri-escape@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz#e43358a78bf45d50bb736770077f0f09195b6f54" + integrity sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg== + dependencies: + tslib "^2.6.2" + "@smithy/util-utf8@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.0.0.tgz#b4da87566ea7757435e153799df9da717262ad42" @@ -8025,6 +8707,14 @@ "@smithy/util-buffer-from" "^2.0.0" tslib "^2.5.0" +"@smithy/util-utf8@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-3.0.0.tgz#1a6a823d47cbec1fd6933e5fc87df975286d9d6a" + integrity sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA== + dependencies: + "@smithy/util-buffer-from" "^3.0.0" + tslib "^2.6.2" + "@statoscope/extensions@5.28.1": version "5.28.1" resolved "https://registry.yarnpkg.com/@statoscope/extensions/-/extensions-5.28.1.tgz#bc270f9366c4b2c13342f1a0d138520cf607a5bb" @@ -12814,6 +13504,11 @@ bowser@^1.7.3: resolved "https://registry.yarnpkg.com/bowser/-/bowser-1.9.4.tgz#890c58a2813a9d3243704334fa81b96a5c150c9a" integrity sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ== +bowser@^2.11.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== + boxen@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50" @@ -17288,6 +17983,13 @@ fast-stream-to-buffer@^1.0.0: dependencies: end-of-stream "^1.4.1" +fast-xml-parser@4.2.5: + version "4.2.5" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz#a6747a09296a6cb34f2ae634019bf1738f3b421f" + integrity sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g== + dependencies: + strnum "^1.0.5" + fastest-levenshtein@^1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz#9990f7d3a88cc5a9ffd1f1745745251700d497e2" @@ -28976,6 +29678,11 @@ strip-json-comments@~2.0.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= +strnum@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db" + integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== + style-loader@^1.1.3, style-loader@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.3.0.tgz#828b4a3b3b7e7aa5847ce7bae9e874512114249e" @@ -29944,7 +30651,7 @@ tslib@^1.10.0, tslib@^1.11.1, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.0.0, tslib@^2.0.1, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.1, tslib@^2.4.0, tslib@^2.5.0, tslib@^2.5.2: +tslib@^2.0.0, tslib@^2.0.1, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.1, tslib@^2.4.0, tslib@^2.5.0, tslib@^2.5.2, tslib@^2.6.2: version "2.6.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== From 9bc9dbfaa69e35dd3e2d2955f237f02acf293718 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Sat, 25 May 2024 11:44:34 +0200 Subject: [PATCH 08/62] adding templates, removing pages, adding some test buttons --- .../integration_assistant/common/ecs.ts | 1950 +++++++++++++ .../integration_assistant/common/index.ts | 2 + .../integration_assistant/public/app.tsx | 58 +- .../public/constants/ecsFields.tsx | 2465 ----------------- .../BuildIntegrationPage.tsx | 17 - .../IntegrationBuilder/CategorizationPage.tsx | 36 - .../IntegrationBuilder/EcsMapperPage.tsx | 32 - .../pages/IntegrationBuilder/RelatedPage.tsx | 36 - .../IntegrationBuilder/ViewResultsPage.tsx | 39 - .../public/pages/Main/MainPage.tsx | 17 - .../integration_assistant/public/services.ts | 37 +- .../public/utils/samples.tsx | 86 - .../server/graphs/ecs/constants.ts | 1940 ------------- .../server/graphs/ecs/validate.ts | 2 +- .../server/providers/bedrock.ts | 1 + .../integration_assistant/server/util/es.ts | 46 +- 16 files changed, 2034 insertions(+), 4730 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/common/ecs.ts delete mode 100644 x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/utils/samples.tsx diff --git a/x-pack/plugins/integration_assistant/common/ecs.ts b/x-pack/plugins/integration_assistant/common/ecs.ts new file mode 100644 index 0000000000000..3c3c249258db7 --- /dev/null +++ b/x-pack/plugins/integration_assistant/common/ecs.ts @@ -0,0 +1,1950 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +interface EcsFields { + [key: string]: any; +} + +export const ECS_FULL: EcsFields = { + '@timestamp': 'Date/time when the event originated.', + 'agent.build.original': 'Extended build information for the agent.', + 'agent.ephemeral_id': 'Ephemeral identifier of this agent.', + 'agent.id': 'Unique identifier of this agent.', + 'agent.name': 'Custom name of the agent.', + 'agent.type': 'Type of the agent.', + 'agent.version': 'Version of the agent.', + 'client.address': 'Client network address.', + 'client.as.number': 'Unique number allocated to the autonomous system.', + 'client.as.organization.name': 'Organization name.', + 'client.bytes': 'Bytes sent from the client to the server.', + 'client.domain': 'The domain name of the client.', + 'client.geo.city_name': 'City name.', + 'client.geo.continent_code': 'Continent code.', + 'client.geo.continent_name': 'Name of the continent.', + 'client.geo.country_iso_code': 'Country ISO code.', + 'client.geo.country_name': 'Country name.', + 'client.geo.location': 'Longitude and latitude.', + 'client.geo.name': 'User-defined description of a location.', + 'client.geo.postal_code': 'Postal code.', + 'client.geo.region_iso_code': 'Region ISO code.', + 'client.geo.region_name': 'Region name.', + 'client.geo.timezone': 'Time zone.', + 'client.ip': 'IP address of the client.', + 'client.mac': 'MAC address of the client.', + 'client.nat.ip': 'Client NAT ip address', + 'client.nat.port': 'Client NAT port', + 'client.packets': 'Packets sent from the client to the server.', + 'client.port': 'Port of the client.', + 'client.registered_domain': 'The highest registered client domain, stripped of the subdomain.', + 'client.subdomain': 'The subdomain of the domain.', + 'client.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'client.user.domain': 'Name of the directory the user is a member of.', + 'client.user.email': 'User email address.', + 'client.user.full_name': 'Users full name, if available.', + 'client.user.group.domain': 'Name of the directory the group is a member of.', + 'client.user.group.id': 'Unique identifier for the group on the system/platform.', + 'client.user.group.name': 'Name of the group.', + 'client.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'client.user.id': 'Unique identifier of the user.', + 'client.user.name': 'Short name or login of the user.', + 'client.user.roles': 'Array of user roles at the time of the event.', + 'cloud.account.id': 'The cloud account or organization id.', + 'cloud.account.name': 'The cloud account name.', + 'cloud.availability_zone': + 'Availability zone in which this host, resource, or service is located.', + 'cloud.instance.id': 'Instance ID of the host machine.', + 'cloud.instance.name': 'Instance name of the host machine.', + 'cloud.machine.type': 'Machine type of the host machine.', + 'cloud.origin.account.id': 'The cloud account or organization id.', + 'cloud.origin.account.name': 'The cloud account name.', + 'cloud.origin.availability_zone': + 'Availability zone in which this host, resource, or service is located.', + 'cloud.origin.instance.id': 'Instance ID of the host machine.', + 'cloud.origin.instance.name': 'Instance name of the host machine.', + 'cloud.origin.machine.type': 'Machine type of the host machine.', + 'cloud.origin.project.id': 'The cloud project id.', + 'cloud.origin.project.name': 'The cloud project name.', + 'cloud.origin.provider': 'Name of the cloud provider.', + 'cloud.origin.region': 'Region in which this host, resource, or service is located.', + 'cloud.origin.service.name': 'The cloud service name.', + 'cloud.project.id': 'The cloud project id.', + 'cloud.project.name': 'The cloud project name.', + 'cloud.provider': 'Name of the cloud provider.', + 'cloud.region': 'Region in which this host, resource, or service is located.', + 'cloud.service.name': 'The cloud service name.', + 'cloud.target.account.id': 'The cloud account or organization id.', + 'cloud.target.account.name': 'The cloud account name.', + 'cloud.target.availability_zone': + 'Availability zone in which this host, resource, or service is located.', + 'cloud.target.instance.id': 'Instance ID of the host machine.', + 'cloud.target.instance.name': 'Instance name of the host machine.', + 'cloud.target.machine.type': 'Machine type of the host machine.', + 'cloud.target.project.id': 'The cloud project id.', + 'cloud.target.project.name': 'The cloud project name.', + 'cloud.target.provider': 'Name of the cloud provider.', + 'cloud.target.region': 'Region in which this host, resource, or service is located.', + 'cloud.target.service.name': 'The cloud service name.', + 'container.cpu.usage': 'Percent CPU used, between 0 and 1.', + 'container.disk.read.bytes': 'The number of bytes read by all disks.', + 'container.disk.write.bytes': 'The number of bytes written on all disks.', + 'container.id': 'Unique container id.', + 'container.image.hash.all': 'An array of digests of the image the container was built on.', + 'container.image.name': 'Name of the image the container was built on.', + 'container.image.tag': 'Container image tags.', + 'container.labels': 'Image labels.', + 'container.memory.usage': 'Percent memory used, between 0 and 1.', + 'container.name': 'Container name.', + 'container.network.egress.bytes': 'The number of bytes sent on all network interfaces.', + 'container.network.ingress.bytes': 'The number of bytes received on all network interfaces.', + 'container.runtime': 'Runtime managing this container.', + 'container.security_context.privileged': + 'Indicates whether the container is running in privileged mode.', + 'data_stream.dataset': + 'The field can contain anything that makes sense to signify the source of the data.', + 'data_stream.namespace': + 'A user defined namespace. Namespaces are useful to allow grouping of data.', + 'data_stream.type': 'An overarching type for the data stream.', + 'destination.address': 'Destination network address.', + 'destination.as.number': 'Unique number allocated to the autonomous system.', + 'destination.as.organization.name': 'Organization name.', + 'destination.bytes': 'Bytes sent from the destination to the source.', + 'destination.domain': 'The domain name of the destination.', + 'destination.geo.city_name': 'City name.', + 'destination.geo.continent_code': 'Continent code.', + 'destination.geo.continent_name': 'Name of the continent.', + 'destination.geo.country_iso_code': 'Country ISO code.', + 'destination.geo.country_name': 'Country name.', + 'destination.geo.location': 'Longitude and latitude.', + 'destination.geo.name': 'User-defined description of a location.', + 'destination.geo.postal_code': 'Postal code.', + 'destination.geo.region_iso_code': 'Region ISO code.', + 'destination.geo.region_name': 'Region name.', + 'destination.geo.timezone': 'Time zone.', + 'destination.ip': 'IP address of the destination.', + 'destination.mac': 'MAC address of the destination.', + 'destination.nat.ip': 'Destination NAT ip', + 'destination.nat.port': 'Destination NAT Port', + 'destination.packets': 'Packets sent from the destination to the source.', + 'destination.port': 'Port of the destination.', + 'destination.registered_domain': + 'The highest registered destination domain, stripped of the subdomain.', + 'destination.subdomain': 'The subdomain of the domain.', + 'destination.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'destination.user.domain': 'Name of the directory the user is a member of.', + 'destination.user.email': 'User email address.', + 'destination.user.full_name': 'Users full name, if available.', + 'destination.user.group.domain': 'Name of the directory the group is a member of.', + 'destination.user.group.id': 'Unique identifier for the group on the system/platform.', + 'destination.user.group.name': 'Name of the group.', + 'destination.user.hash': + 'Unique user hash to correlate information for a user in anonymized form.', + 'destination.user.id': 'Unique identifier of the user.', + 'destination.user.name': 'Short name or login of the user.', + 'destination.user.roles': 'Array of user roles at the time of the event.', + 'device.id': 'The unique identifier of a device.', + 'device.manufacturer': 'The vendor name of the device manufacturer.', + 'device.model.identifier': 'The machine readable identifier of the device model.', + 'device.model.name': 'The human readable marketing name of the device model.', + 'dll.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'dll.code_signature.exists': 'Boolean to capture if a signature is present.', + 'dll.code_signature.signing_id': 'The identifier used to sign the process.', + 'dll.code_signature.status': 'Additional information about the certificate status.', + 'dll.code_signature.subject_name': 'Subject name of the code signer', + 'dll.code_signature.team_id': 'The team identifier used to sign the process.', + 'dll.code_signature.timestamp': 'When the signature was generated and signed.', + 'dll.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'dll.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'dll.hash.md5': 'MD5 hash.', + 'dll.hash.sha1': 'SHA1 hash.', + 'dll.hash.sha256': 'SHA256 hash.', + 'dll.hash.sha384': 'SHA384 hash.', + 'dll.hash.sha512': 'SHA512 hash.', + 'dll.hash.ssdeep': 'SSDEEP hash.', + 'dll.hash.tlsh': 'TLSH hash.', + 'dll.name': 'Name of the library.', + 'dll.path': 'Full file path of the library.', + 'dll.pe.architecture': 'CPU architecture target for the file.', + 'dll.pe.company': 'Internal company name of the file, provided at compile-time.', + 'dll.pe.description': 'Internal description of the file, provided at compile-time.', + 'dll.pe.file_version': 'Process name.', + 'dll.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'dll.pe.go_imports': 'List of imported Go language element names and types.', + 'dll.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'dll.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'dll.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'dll.pe.imphash': 'A hash of the imports in a PE file.', + 'dll.pe.import_hash': 'A hash of the imports in a PE file.', + 'dll.pe.imports': 'List of imported element names and types.', + 'dll.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'dll.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'dll.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'dll.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'dll.pe.product': 'Internal product name of the file, provided at compile-time.', + 'dll.pe.sections': 'Section information of the PE file.', + 'dll.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'dll.pe.sections.name': 'PE Section List name.', + 'dll.pe.sections.physical_size': 'PE Section List physical size.', + 'dll.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'dll.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'dns.answers': 'Array of DNS answers.', + 'dns.answers.class': 'The class of DNS data contained in this resource record.', + 'dns.answers.data': 'The data describing the resource.', + 'dns.answers.name': 'The domain name to which this resource record pertains.', + 'dns.answers.ttl': + 'The time interval in seconds that this resource record may be cached before it should be discarded.', + 'dns.answers.type': 'The type of data contained in this resource record.', + 'dns.header_flags': 'Array of DNS header flags.', + 'dns.id': + 'The DNS packet identifier assigned by the program that generated the query. The identifier is copied to the response.', + 'dns.op_code': 'The DNS operation code that specifies the kind of query in the message.', + 'dns.question.class': 'The class of records being queried.', + 'dns.question.name': 'The name being queried.', + 'dns.question.registered_domain': 'The highest registered domain, stripped of the subdomain.', + 'dns.question.subdomain': 'The subdomain of the domain.', + 'dns.question.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'dns.question.type': 'The type of record being queried.', + 'dns.resolved_ip': 'Array containing all IPs seen in answers.data', + 'dns.response_code': 'The DNS response code.', + 'dns.type': 'The type of DNS event captured, query or answer.', + 'ecs.version': 'ECS version this event conforms to.', + 'email.attachments': 'List of objects describing the attachments.', + 'email.attachments.file.extension': 'Attachment file extension.', + 'email.attachments.file.hash.md5': 'MD5 hash.', + 'email.attachments.file.hash.sha1': 'SHA1 hash.', + 'email.attachments.file.hash.sha256': 'SHA256 hash.', + 'email.attachments.file.hash.sha384': 'SHA384 hash.', + 'email.attachments.file.hash.sha512': 'SHA512 hash.', + 'email.attachments.file.hash.ssdeep': 'SSDEEP hash.', + 'email.attachments.file.hash.tlsh': 'TLSH hash.', + 'email.attachments.file.mime_type': 'MIME type of the attachment file.', + 'email.attachments.file.name': 'Name of the attachment file.', + 'email.attachments.file.size': 'Attachment file size.', + 'email.bcc.address': 'Email address of BCC recipient', + 'email.cc.address': 'Email address of CC recipient', + 'email.content_type': 'MIME type of the email message.', + 'email.delivery_timestamp': 'Date and time when message was delivered.', + 'email.direction': 'Direction of the message.', + 'email.from.address': 'The senders email address.', + 'email.local_id': 'Unique identifier given by the source.', + 'email.message_id': 'Value from the Message-ID header.', + 'email.origination_timestamp': 'Date and time the email was composed.', + 'email.reply_to.address': 'Address replies should be delivered to.', + 'email.sender.address': 'Address of the message sender.', + 'email.subject': 'The subject of the email message.', + 'email.to.address': 'Email address of recipient', + 'email.x_mailer': 'Application that drafted email.', + 'error.code': 'Error code describing the error.', + 'error.id': 'Unique identifier for the error.', + 'error.message': 'Error message.', + 'error.stack_trace': 'The stack trace of this error in plain text.', + 'error.type': 'The type of the error, for example the class name of the exception.', + 'event.action': 'The action captured by the event.', + 'event.agent_id_status': 'Validation status of the events agent.id field.', + 'event.category': 'Event category. The second categorization field in the hierarchy.', + 'event.code': 'Identification code for this event.', + 'event.created': 'Time when the event was first read by an agent or by your pipeline.', + 'event.dataset': 'Name of the dataset.', + 'event.duration': 'Duration of the event in nanoseconds.', + 'event.end': + '`event.end` contains the date when the event ended or when the activity was last observed.', + 'event.hash': + 'Hash (perhaps logstash fingerprint) of raw field to be able to demonstrate log integrity.', + 'event.id': 'Unique ID to describe the event.', + 'event.ingested': 'Timestamp when an event arrived in the central data store.', + 'event.kind': 'The kind of the event. The highest categorization field in the hierarchy.', + 'event.module': 'Name of the module this data is coming from.', + 'event.original': 'Raw text message of entire event.', + 'event.outcome': + 'The outcome of the event. The lowest level categorization field in the hierarchy.', + 'event.provider': 'Source of the event.', + 'event.reason': 'Reason why this event happened, according to the source', + 'event.reference': 'Event reference URL', + 'event.risk_score': + 'Risk score or priority of the event (e.g. security solutions). Use your systems original value here.', + 'event.risk_score_norm': 'Normalized risk score or priority of the event (0-100).', + 'event.sequence': 'Sequence number of the event.', + 'event.severity': 'Numeric severity of the event.', + 'event.start': + '`event.start` contains the date when the event started or when the activity was first observed.', + 'event.timezone': 'Event time zone.', + 'event.type': 'Event type. The third categorization field in the hierarchy.', + 'event.url': 'Event investigation URL', + 'faas.coldstart': 'Boolean value indicating a cold start of a function.', + 'faas.execution': 'The execution ID of the current function execution.', + 'faas.id': 'The unique identifier of a serverless function.', + 'faas.name': 'The name of a serverless function.', + 'faas.trigger.request_id': 'The ID of the trigger request , message, event, etc.', + 'faas.trigger.type': 'The trigger for the function execution.', + 'faas.version': 'The version of a serverless function.', + 'file.accessed': 'Last time the file was accessed.', + 'file.attributes': 'Array of file attributes.', + 'file.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'file.code_signature.signing_id': 'The identifier used to sign the process.', + 'file.code_signature.status': 'Additional information about the certificate status.', + 'file.code_signature.subject_name': 'Subject name of the code signer', + 'file.code_signature.team_id': 'The team identifier used to sign the process.', + 'file.code_signature.timestamp': 'When the signature was generated and signed.', + 'file.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'file.created': 'File creation time.', + 'file.ctime': 'Last time the file attributes or metadata changed.', + 'file.device': 'Device that is the source of the file.', + 'file.directory': 'Directory where the file is located.', + 'file.drive_letter': 'Drive letter where the file is located.', + 'file.elf.architecture': 'Machine architecture of the ELF file.', + 'file.elf.byte_order': 'Byte sequence of ELF file.', + 'file.elf.cpu_type': 'CPU type of the ELF file.', + 'file.elf.creation_date': 'Build or compile date.', + 'file.elf.exports': 'List of exported element names and types.', + 'file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'file.elf.go_imports': 'List of imported Go language element names and types.', + 'file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'file.elf.header.class': 'Header class of the ELF file.', + 'file.elf.header.data': 'Data table of the ELF header.', + 'file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'file.elf.header.object_version': '"0x1" for original ELF files.', + 'file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'file.elf.header.type': 'Header type of the ELF file.', + 'file.elf.header.version': 'Version of the ELF header.', + 'file.elf.import_hash': 'A hash of the imports in an ELF file.', + 'file.elf.imports': 'List of imported element names and types.', + 'file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.elf.sections': 'Section information of the ELF file.', + 'file.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.elf.sections.flags': 'ELF Section List flags.', + 'file.elf.sections.name': 'ELF Section List name.', + 'file.elf.sections.physical_offset': 'ELF Section List offset.', + 'file.elf.sections.physical_size': 'ELF Section List physical size.', + 'file.elf.sections.type': 'ELF Section List type.', + 'file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'file.elf.segments': 'ELF object segment list.', + 'file.elf.segments.sections': 'ELF object segment sections.', + 'file.elf.segments.type': 'ELF object segment type.', + 'file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'file.elf.telfhash': 'telfhash hash for ELF file.', + 'file.extension': 'File extension, excluding the leading dot.', + 'file.fork_name': 'A fork is additional data associated with a filesystem object.', + 'file.gid': 'Primary group ID (GID) of the file.', + 'file.group': 'Primary group name of the file.', + 'file.hash.md5': 'MD5 hash.', + 'file.hash.sha1': 'SHA1 hash.', + 'file.hash.sha256': 'SHA256 hash.', + 'file.hash.sha384': 'SHA384 hash.', + 'file.hash.sha512': 'SHA512 hash.', + 'file.hash.ssdeep': 'SSDEEP hash.', + 'file.hash.tlsh': 'TLSH hash.', + 'file.inode': 'Inode representing the file in the filesystem.', + 'file.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', + 'file.macho.go_imports': 'List of imported Go language element names and types.', + 'file.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'file.macho.import_hash': 'A hash of the imports in a Mach-O file.', + 'file.macho.imports': 'List of imported element names and types.', + 'file.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'file.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.macho.sections': 'Section information of the Mach-O file.', + 'file.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.macho.sections.name': 'Mach-O Section List name.', + 'file.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'file.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'file.macho.symhash': 'A hash of the imports in a Mach-O file.', + 'file.mime_type': 'Media type of file, document, or arrangement of bytes.', + 'file.mode': 'Mode of the file in octal representation.', + 'file.mtime': 'Last time the file content was modified.', + 'file.name': 'Name of the file including the extension, without the directory.', + 'file.owner': 'File owners username.', + 'file.path': 'Full path to the file, including the file name.', + 'file.pe.architecture': 'CPU architecture target for the file.', + 'file.pe.company': 'Internal company name of the file, provided at compile-time.', + 'file.pe.description': 'Internal description of the file, provided at compile-time.', + 'file.pe.file_version': 'Process name.', + 'file.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'file.pe.go_imports': 'List of imported Go language element names and types.', + 'file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'file.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'file.pe.imphash': 'A hash of the imports in a PE file.', + 'file.pe.import_hash': 'A hash of the imports in a PE file.', + 'file.pe.imports': 'List of imported element names and types.', + 'file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'file.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'file.pe.product': 'Internal product name of the file, provided at compile-time.', + 'file.pe.sections': 'Section information of the PE file.', + 'file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'file.pe.sections.name': 'PE Section List name.', + 'file.pe.sections.physical_size': 'PE Section List physical size.', + 'file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'file.size': 'File size in bytes.', + 'file.target_path': 'Target path for symlinks.', + 'file.type': 'File type (file, dir, or symlink).', + 'file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'file.x509.alternative_names': 'List of subject alternative names (SAN).', + 'file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', + 'file.x509.issuer.country': 'List of country (C) codes', + 'file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'file.x509.issuer.locality': 'List of locality names (L)', + 'file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', + 'file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'file.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'file.x509.not_before': 'Time at which the certificate is first considered valid.', + 'file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific', + 'file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific', + 'file.x509.public_key_size': 'The size of the public key space in bits.', + 'file.x509.serial_number': 'Unique serial number issued by the certificate authority.', + 'file.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'file.x509.subject.country': 'List of country (C) code', + 'file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity', + 'file.x509.subject.locality': 'List of locality names (L)', + 'file.x509.subject.organization': 'List of organizations (O) of subject.', + 'file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'file.x509.version_number': 'Version of x509 format.', + 'group.domain': 'Name of the directory the group is a member of.', + 'group.id': 'Unique identifier for the group on the system/platform.', + 'group.name': 'Name of the group.', + 'host.architecture': 'Operating system architecture.', + 'host.boot.id': 'Linux boot uuid taken from /proc/sys/kernel/random/boot_id', + 'host.cpu.usage': 'Percent CPU used, between 0 and 1.', + 'host.disk.read.bytes': 'The number of bytes read by all disks.', + 'host.disk.write.bytes': 'The number of bytes written on all disks.', + 'host.domain': 'Name of the directory the group is a member of.', + 'host.geo.city_name': 'City name.', + 'host.geo.continent_code': 'Continent code.', + 'host.geo.continent_name': 'Name of the continent.', + 'host.geo.country_iso_code': 'Country ISO code.', + 'host.geo.country_name': 'Country name.', + 'host.geo.location': 'Longitude and latitude.', + 'host.geo.name': 'User-defined description of a location.', + 'host.geo.postal_code': 'Postal code.', + 'host.geo.region_iso_code': 'Region ISO code.', + 'host.geo.region_name': 'Region name.', + 'host.geo.timezone': 'Time zone.', + 'host.hostname': 'Hostname of the host.', + 'host.id': 'Unique host id.', + 'host.ip': 'Host ip addresses.', + 'host.mac': 'Host MAC addresses.', + 'host.name': 'Name of the host.', + 'host.network.egress.bytes': 'The number of bytes sent on all network interfaces.', + 'host.network.egress.packets': 'The number of packets sent on all network interfaces.', + 'host.network.ingress.bytes': 'The number of bytes received on all network interfaces.', + 'host.network.ingress.packets': 'The number of packets received on all network interfaces.', + 'host.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'host.os.full': 'Operating system name, including the version or code name.', + 'host.os.kernel': 'Operating system kernel version as a raw string.', + 'host.os.name': 'Operating system name, without the version.', + 'host.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'host.os.type': + 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', + 'host.os.version': 'Operating system version as a raw string.', + 'host.pid_ns_ino': 'Pid namespace inode', + 'host.risk.calculated_level': + 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring', + 'host.risk.calculated_score': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring', + 'host.risk.calculated_score_norm': 'A normalized risk score calculated by an internal system', + 'host.risk.static_level': + 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform', + 'host.risk.static_score': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform', + 'host.risk.static_score_norm': 'A normalized risk score calculated by an external system.', + 'host.type': 'Type of host.', + 'host.uptime': 'Seconds the host has been up.', + 'http.request.body.bytes': 'Size in bytes of the request body.', + 'http.request.body.content': 'The full HTTP request body.', + 'http.request.bytes': 'Total size in bytes of the request (body and headers).', + 'http.request.id': 'HTTP request ID.', + 'http.request.method': 'HTTP request method.', + 'http.request.mime_type': 'Mime type of the body of the request.', + 'http.request.referrer': 'Referrer for this HTTP request.', + 'http.response.body.bytes': 'Size in bytes of the response body.', + 'http.response.body.content': 'The full HTTP response body.', + 'http.response.bytes': 'Total size in bytes of the response (body and headers).', + 'http.response.mime_type': 'Mime type of the body of the response.', + 'http.response.status_code': 'HTTP response status code.', + 'http.version': 'HTTP version.', + labels: 'Custom key/value pairs.', + 'log.file.path': 'Full path to the log file this event came from.', + 'log.level': 'Log level of the log event.', + 'log.logger': 'Name of the logger.', + 'log.origin.file.line': 'The line number of the file which originated the log event.', + 'log.origin.file.name': 'The code file which originated the log event.', + 'log.origin.function': 'The function which originated the log event.', + 'log.syslog': 'Syslog metadata', + 'log.syslog.appname': 'The device or application that originated the Syslog message.', + 'log.syslog.facility.code': 'Syslog numeric facility of the event.', + 'log.syslog.facility.name': 'Syslog text-based facility of the event.', + 'log.syslog.hostname': 'The host that originated the Syslog message.', + 'log.syslog.msgid': 'An identifier for the type of Syslog message.', + 'log.syslog.priority': 'Syslog priority of the event.', + 'log.syslog.procid': 'The process name or ID that originated the Syslog message.', + 'log.syslog.severity.code': 'Syslog numeric severity of the event.', + 'log.syslog.severity.name': 'Syslog text-based severity of the event.', + 'log.syslog.structured_data': 'Structured data expressed in RFC 5424 messages.', + 'log.syslog.version': 'Syslog protocol version.', + message: 'Log message optimized for viewing in a log viewer.', + 'network.application': 'Application level protocol name.', + 'network.bytes': 'Total bytes transferred in both directions.', + 'network.community_id': 'A hash of source and destination IPs and ports.', + 'network.direction': 'Direction of the network traffic.', + 'network.forwarded_ip': 'Host IP address when the source IP address is the proxy.', + 'network.iana_number': 'IANA Protocol Number.', + 'network.inner': 'Inner VLAN tag information', + 'network.inner.vlan.id': 'VLAN ID as reported by the observer.', + 'network.inner.vlan.name': 'Optional VLAN name as reported by the observer.', + 'network.name': 'Name given by operators to sections of their network.', + 'network.packets': 'Total packets transferred in both directions.', + 'network.protocol': 'Application protocol name.', + 'network.transport': 'Protocol Name corresponding to the field `iana_number`.', + 'network.type': 'In the OSI Model this would be the Network Layer. ipv4, ipv6, ipsec, pim, etc', + 'network.vlan.id': 'VLAN ID as reported by the observer.', + 'network.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.egress': 'Object field for egress information', + 'observer.egress.interface.alias': 'Interface alias', + 'observer.egress.interface.id': 'Interface ID', + 'observer.egress.interface.name': 'Interface name', + 'observer.egress.vlan.id': 'VLAN ID as reported by the observer.', + 'observer.egress.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.egress.zone': 'Observer Egress zone', + 'observer.geo.city_name': 'City name.', + 'observer.geo.continent_code': 'Continent code.', + 'observer.geo.continent_name': 'Name of the continent.', + 'observer.geo.country_iso_code': 'Country ISO code.', + 'observer.geo.country_name': 'Country name.', + 'observer.geo.location': 'Longitude and latitude.', + 'observer.geo.name': 'User-defined description of a location.', + 'observer.geo.postal_code': 'Postal code.', + 'observer.geo.region_iso_code': 'Region ISO code.', + 'observer.geo.region_name': 'Region name.', + 'observer.geo.timezone': 'Time zone.', + 'observer.hostname': 'Hostname of the observer.', + 'observer.ingress': 'Object field for ingress information', + 'observer.ingress.interface.alias': 'Interface alias', + 'observer.ingress.interface.id': 'Interface ID', + 'observer.ingress.interface.name': 'Interface name', + 'observer.ingress.vlan.id': 'VLAN ID as reported by the observer.', + 'observer.ingress.vlan.name': 'Optional VLAN name as reported by the observer.', + 'observer.ingress.zone': 'Observer ingress zone', + 'observer.ip': 'IP addresses of the observer.', + 'observer.mac': 'MAC addresses of the observer.', + 'observer.name': 'Custom name of the observer.', + 'observer.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'observer.os.full': 'Operating system name, including the version or code name.', + 'observer.os.kernel': 'Operating system kernel version as a raw string.', + 'observer.os.name': 'Operating system name, without the version.', + 'observer.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'observer.os.type': + 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', + 'observer.os.version': 'Operating system version as a raw string.', + 'observer.product': 'The product name of the observer.', + 'observer.serial_number': 'Observer serial number.', + 'observer.type': 'The type of the observer the data is coming from.', + 'observer.vendor': 'Vendor name of the observer.', + 'observer.version': 'Observer version.', + 'orchestrator.api_version': 'API version being used to carry out the action', + 'orchestrator.cluster.id': 'Unique ID of the cluster.', + 'orchestrator.cluster.name': 'Name of the cluster.', + 'orchestrator.cluster.url': 'URL of the API used to manage the cluster.', + 'orchestrator.cluster.version': 'The version of the cluster.', + 'orchestrator.namespace': 'Namespace in which the action is taking place.', + 'orchestrator.organization': + 'Organization affected by the event (for multi-tenant orchestrator setups).', + 'orchestrator.resource.annotation': 'The list of annotations added to the resource.', + 'orchestrator.resource.id': 'Unique ID of the resource being acted upon.', + 'orchestrator.resource.ip': + 'IP address assigned to the resource associated with the event being observed.', + 'orchestrator.resource.label': 'The list of labels added to the resource.', + 'orchestrator.resource.name': 'Name of the resource being acted upon.', + 'orchestrator.resource.parent.type': + 'Type or kind of the parent resource associated with the event being observed.', + 'orchestrator.resource.type': 'Type of resource being acted upon.', + 'orchestrator.type': 'Orchestrator cluster type (e.g. kubernetes, nomad or cloudfoundry).', + 'organization.id': 'Unique identifier for the organization.', + 'organization.name': 'Organization name.', + 'package.architecture': 'Package architecture.', + 'package.build_version': 'Build version information', + 'package.checksum': 'Checksum of the installed package for verification.', + 'package.description': 'Description of the package.', + 'package.install_scope': 'Indicating how the package was installed, e.g. user-local, global.', + 'package.installed': 'Time when package was installed.', + 'package.license': 'Package license', + 'package.name': 'Package name', + 'package.path': 'Path where the package is installed.', + 'package.reference': 'Package home page or reference URL', + 'package.size': 'Package size in bytes.', + 'package.type': 'Package type', + 'package.version': 'Package version', + 'process.args': 'Array of process arguments.', + 'process.args_count': 'Length of the process.args array.', + 'process.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'process.code_signature.exists': 'Boolean to capture if a signature is present.', + 'process.code_signature.signing_id': 'The identifier used to sign the process.', + 'process.code_signature.status': 'Additional information about the certificate status.', + 'process.code_signature.subject_name': 'Subject name of the code signer', + 'process.code_signature.team_id': 'The team identifier used to sign the process.', + 'process.code_signature.timestamp': 'When the signature was generated and signed.', + 'process.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'process.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'process.command_line': 'Full command line that started the process.', + 'process.elf.architecture': 'Machine architecture of the ELF file.', + 'process.elf.byte_order': 'Byte sequence of ELF file.', + 'process.elf.cpu_type': 'CPU type of the ELF file.', + 'process.elf.creation_date': 'Build or compile date.', + 'process.elf.exports': 'List of exported element names and types.', + 'process.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'process.elf.go_imports': 'List of imported Go language element names and types.', + 'process.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'process.elf.header.class': 'Header class of the ELF file.', + 'process.elf.header.data': 'Data table of the ELF header.', + 'process.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'process.elf.header.object_version': '"0x1" for original ELF files.', + 'process.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'process.elf.header.type': 'Header type of the ELF file.', + 'process.elf.header.version': 'Version of the ELF header.', + 'process.elf.import_hash': 'A hash of the imports in an ELF file.', + 'process.elf.imports': 'List of imported element names and types.', + 'process.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.elf.sections': 'Section information of the ELF file.', + 'process.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'process.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.elf.sections.flags': 'ELF Section List flags.', + 'process.elf.sections.name': 'ELF Section List name.', + 'process.elf.sections.physical_offset': 'ELF Section List offset.', + 'process.elf.sections.physical_size': 'ELF Section List physical size.', + 'process.elf.sections.type': 'ELF Section List type.', + 'process.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'process.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'process.elf.segments': 'ELF object segment list.', + 'process.elf.segments.sections': 'ELF object segment sections.', + 'process.elf.segments.type': 'ELF object segment type.', + 'process.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'process.elf.telfhash': 'telfhash hash for ELF file.', + 'process.end': 'The time the process ended.', + 'process.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.args': 'Array of process arguments.', + 'process.entry_leader.args_count': 'Length of the process.args array.', + 'process.entry_leader.attested_groups.name': 'Name of the group.', + 'process.entry_leader.attested_user.id': 'Unique identifier of the user.', + 'process.entry_leader.attested_user.name': 'Short name or login of the user.', + 'process.entry_leader.command_line': 'Full command line that started the process.', + 'process.entry_leader.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.entry_meta.source.ip': 'IP address of the source.', + 'process.entry_leader.entry_meta.type': 'The entry type for the entry session leader.', + 'process.entry_leader.executable': 'Absolute path to the process executable.', + 'process.entry_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.group.name': 'Name of the group.', + 'process.entry_leader.interactive': 'Whether the process is connected to an interactive shell.', + 'process.entry_leader.name': 'Process name.', + 'process.entry_leader.parent.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.parent.pid': 'Process id.', + 'process.entry_leader.parent.session_leader.entity_id': 'Unique identifier for the process.', + 'process.entry_leader.parent.session_leader.pid': 'Process id.', + 'process.entry_leader.parent.session_leader.start': 'The time the process started.', + 'process.entry_leader.parent.session_leader.vpid': 'Virtual process id.', + 'process.entry_leader.parent.start': 'The time the process started.', + 'process.entry_leader.parent.vpid': 'Virtual process id.', + 'process.entry_leader.pid': 'Process id.', + 'process.entry_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.real_group.name': 'Name of the group.', + 'process.entry_leader.real_user.id': 'Unique identifier of the user.', + 'process.entry_leader.real_user.name': 'Short name or login of the user.', + 'process.entry_leader.same_as_process': + 'This boolean is used to identify if a leader process is the same as the top level process.', + 'process.entry_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.saved_group.name': 'Name of the group.', + 'process.entry_leader.saved_user.id': 'Unique identifier of the user.', + 'process.entry_leader.saved_user.name': 'Short name or login of the user.', + 'process.entry_leader.start': 'The time the process started.', + 'process.entry_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.entry_leader.supplemental_groups.name': 'Name of the group.', + 'process.entry_leader.tty': 'Information about the controlling TTY device.', + 'process.entry_leader.tty.char_device.major': 'The TTY character devices major number.', + 'process.entry_leader.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.entry_leader.user.id': 'Unique identifier of the user.', + 'process.entry_leader.user.name': 'Short name or login of the user.', + 'process.entry_leader.vpid': 'Virtual process id.', + 'process.entry_leader.working_directory': 'The working directory of the process.', + 'process.env_vars': 'Array of environment variable bindings.', + 'process.executable': 'Absolute path to the process executable.', + 'process.exit_code': 'The exit code of the process.', + 'process.group_leader.args': 'Array of process arguments.', + 'process.group_leader.args_count': 'Length of the process.args array.', + 'process.group_leader.command_line': 'Full command line that started the process.', + 'process.group_leader.entity_id': 'Unique identifier for the process.', + 'process.group_leader.executable': 'Absolute path to the process executable.', + 'process.group_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.group.name': 'Name of the group.', + 'process.group_leader.interactive': 'Whether the process is connected to an interactive shell.', + 'process.group_leader.name': 'Process name.', + 'process.group_leader.pid': 'Process id.', + 'process.group_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.real_group.name': 'Name of the group.', + 'process.group_leader.real_user.id': 'Unique identifier of the user.', + 'process.group_leader.real_user.name': 'Short name or login of the user.', + 'process.group_leader.same_as_process': + 'This boolean is used to identify if a leader process is the same as the top level process.', + 'process.group_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.group_leader.saved_group.name': 'Name of the group.', + 'process.group_leader.saved_user.id': 'Unique identifier of the user.', + 'process.group_leader.saved_user.name': 'Short name or login of the user.', + 'process.group_leader.start': 'The time the process started.', + 'process.group_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.group_leader.supplemental_groups.name': 'Name of the group.', + 'process.group_leader.tty': 'Information about the controlling TTY device.', + 'process.group_leader.tty.char_device.major': 'The TTY character devices major number.', + 'process.group_leader.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.group_leader.user.id': 'Unique identifier of the user.', + 'process.group_leader.user.name': 'Short name or login of the user.', + 'process.group_leader.vpid': 'Virtual process id.', + 'process.group_leader.working_directory': 'The working directory of the process.', + 'process.hash.md5': 'MD5 hash.', + 'process.hash.sha1': 'SHA1 hash.', + 'process.hash.sha256': 'SHA256 hash.', + 'process.hash.sha384': 'SHA384 hash.', + 'process.hash.sha512': 'SHA512 hash.', + 'process.hash.ssdeep': 'SSDEEP hash.', + 'process.hash.tlsh': 'TLSH hash.', + 'process.interactive': 'Whether the process is connected to an interactive shell.', + 'process.io': 'A chunk of input or output (IO) from a single process.', + 'process.io.bytes_skipped': + 'An array of byte offsets and lengths denoting where IO data has been skipped.', + 'process.io.bytes_skipped.length': 'The length of bytes skipped.', + 'process.io.bytes_skipped.offset': + 'The byte offset into this events io.text (or io.bytes in the future) where length bytes were skipped.', + 'process.io.max_bytes_per_process_exceeded': + 'If true, the process producing the output has exceeded the max_kilobytes_per_process configuration setting.', + 'process.io.text': 'A chunk of output or input sanitized to UTF-8.', + 'process.io.total_bytes_captured': 'The total number of bytes captured in this event.', + 'process.io.total_bytes_skipped': + 'The total number of bytes that were not captured due to implementation restrictions such as buffer size limits.', + 'process.io.type': 'The type of object on which the IO action (read or write) was taken.', + 'process.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', + 'process.macho.go_imports': 'List of imported Go language element names and types.', + 'process.macho.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.macho.import_hash': 'A hash of the imports in a Mach-O file.', + 'process.macho.imports': 'List of imported element names and types.', + 'process.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.macho.sections': 'Section information of the Mach-O file.', + 'process.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.macho.sections.name': 'Mach-O Section List name.', + 'process.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'process.macho.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'process.macho.symhash': 'A hash of the imports in a Mach-O file.', + 'process.name': 'Process name.', + 'process.parent.args': 'Array of process arguments.', + 'process.parent.args_count': 'Length of the process.args array.', + 'process.parent.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', + 'process.parent.code_signature.exists': 'Boolean to capture if a signature is present.', + 'process.parent.code_signature.signing_id': 'The identifier used to sign the process.', + 'process.parent.code_signature.status': 'Additional information about the certificate status.', + 'process.parent.code_signature.subject_name': 'Subject name of the code signer', + 'process.parent.code_signature.team_id': 'The team identifier used to sign the process.', + 'process.parent.code_signature.timestamp': 'When the signature was generated and signed.', + 'process.parent.code_signature.trusted': 'Stores the trust status of the certificate chain.', + 'process.parent.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'process.parent.command_line': 'Full command line that started the process.', + 'process.parent.elf.architecture': 'Machine architecture of the ELF file.', + 'process.parent.elf.byte_order': 'Byte sequence of ELF file.', + 'process.parent.elf.cpu_type': 'CPU type of the ELF file.', + 'process.parent.elf.creation_date': 'Build or compile date.', + 'process.parent.elf.exports': 'List of exported element names and types.', + 'process.parent.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'process.parent.elf.go_imports': 'List of imported Go language element names and types.', + 'process.parent.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.parent.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', + 'process.parent.elf.header.class': 'Header class of the ELF file.', + 'process.parent.elf.header.data': 'Data table of the ELF header.', + 'process.parent.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'process.parent.elf.header.object_version': '"0x1" for original ELF files.', + 'process.parent.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'process.parent.elf.header.type': 'Header type of the ELF file.', + 'process.parent.elf.header.version': 'Version of the ELF header.', + 'process.parent.elf.import_hash': 'A hash of the imports in an ELF file.', + 'process.parent.elf.imports': 'List of imported element names and types.', + 'process.parent.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.elf.sections': 'Section information of the ELF file.', + 'process.parent.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'process.parent.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.elf.sections.flags': 'ELF Section List flags.', + 'process.parent.elf.sections.name': 'ELF Section List name.', + 'process.parent.elf.sections.physical_offset': 'ELF Section List offset.', + 'process.parent.elf.sections.physical_size': 'ELF Section List physical size.', + 'process.parent.elf.sections.type': 'ELF Section List type.', + 'process.parent.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.parent.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'process.parent.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'process.parent.elf.segments': 'ELF object segment list.', + 'process.parent.elf.segments.sections': 'ELF object segment sections.', + 'process.parent.elf.segments.type': 'ELF object segment type.', + 'process.parent.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'process.parent.elf.telfhash': 'telfhash hash for ELF file.', + 'process.parent.end': 'The time the process ended.', + 'process.parent.entity_id': 'Unique identifier for the process.', + 'process.parent.executable': 'Absolute path to the process executable.', + 'process.parent.exit_code': 'The exit code of the process.', + 'process.parent.group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.group.name': 'Name of the group.', + 'process.parent.group_leader.entity_id': 'Unique identifier for the process.', + 'process.parent.group_leader.pid': 'Process id.', + 'process.parent.group_leader.start': 'The time the process started.', + 'process.parent.group_leader.vpid': 'Virtual process id.', + 'process.parent.hash.md5': 'MD5 hash.', + 'process.parent.hash.sha1': 'SHA1 hash.', + 'process.parent.hash.sha256': 'SHA256 hash.', + 'process.parent.hash.sha384': 'SHA384 hash.', + 'process.parent.hash.sha512': 'SHA512 hash.', + 'process.parent.hash.ssdeep': 'SSDEEP hash.', + 'process.parent.hash.tlsh': 'TLSH hash.', + 'process.parent.interactive': 'Whether the process is connected to an interactive shell.', + 'process.parent.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', + 'process.parent.macho.go_imports': 'List of imported Go language element names and types.', + 'process.parent.macho.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.macho.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.parent.macho.import_hash': 'A hash of the imports in a Mach-O file.', + 'process.parent.macho.imports': 'List of imported element names and types.', + 'process.parent.macho.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.macho.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.macho.sections': 'Section information of the Mach-O file.', + 'process.parent.macho.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.macho.sections.name': 'Mach-O Section List name.', + 'process.parent.macho.sections.physical_size': 'Mach-O Section List physical size.', + 'process.parent.macho.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.parent.macho.sections.virtual_size': + 'Mach-O Section List virtual size. This is always the same as `physical_size`.', + 'process.parent.macho.symhash': 'A hash of the imports in a Mach-O file.', + 'process.parent.name': 'Process name.', + 'process.parent.pe.architecture': 'CPU architecture target for the file.', + 'process.parent.pe.company': 'Internal company name of the file, provided at compile-time.', + 'process.parent.pe.description': 'Internal description of the file, provided at compile-time.', + 'process.parent.pe.file_version': 'Process name.', + 'process.parent.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'process.parent.pe.go_imports': 'List of imported Go language element names and types.', + 'process.parent.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'process.parent.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.parent.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.parent.pe.imphash': 'A hash of the imports in a PE file.', + 'process.parent.pe.import_hash': 'A hash of the imports in a PE file.', + 'process.parent.pe.imports': 'List of imported element names and types.', + 'process.parent.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.parent.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'process.parent.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'process.parent.pe.product': 'Internal product name of the file, provided at compile-time.', + 'process.parent.pe.sections': 'Section information of the PE file.', + 'process.parent.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.parent.pe.sections.name': 'PE Section List name.', + 'process.parent.pe.sections.physical_size': 'PE Section List physical size.', + 'process.parent.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'process.parent.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'process.parent.pgid': 'Deprecated identifier of the group of processes the process belongs to.', + 'process.parent.pid': 'Process id.', + 'process.parent.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.real_group.name': 'Name of the group.', + 'process.parent.real_user.id': 'Unique identifier of the user.', + 'process.parent.real_user.name': 'Short name or login of the user.', + 'process.parent.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.parent.saved_group.name': 'Name of the group.', + 'process.parent.saved_user.id': 'Unique identifier of the user.', + 'process.parent.saved_user.name': 'Short name or login of the user.', + 'process.parent.start': 'The time the process started.', + 'process.parent.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.parent.supplemental_groups.name': 'Name of the group.', + 'process.parent.thread.capabilities.effective': + 'Array of capabilities used for permission checks.', + 'process.parent.thread.capabilities.permitted': 'Array of capabilities a thread could assume.', + 'process.parent.thread.id': 'Thread ID.', + 'process.parent.thread.name': 'Thread name.', + 'process.parent.title': 'Process title.', + 'process.parent.tty': 'Information about the controlling TTY device.', + 'process.parent.tty.char_device.major': 'The TTY character devices major number.', + 'process.parent.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.parent.uptime': 'Seconds the process has been up.', + 'process.parent.user.id': 'Unique identifier of the user.', + 'process.parent.user.name': 'Short name or login of the user.', + 'process.parent.vpid': 'Virtual process id.', + 'process.parent.working_directory': 'The working directory of the process.', + 'process.pe.architecture': 'CPU architecture target for the file.', + 'process.pe.company': 'Internal company name of the file, provided at compile-time.', + 'process.pe.description': 'Internal description of the file, provided at compile-time.', + 'process.pe.file_version': 'Process name.', + 'process.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'process.pe.go_imports': 'List of imported Go language element names and types.', + 'process.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', + 'process.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'process.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', + 'process.pe.imphash': 'A hash of the imports in a PE file.', + 'process.pe.import_hash': 'A hash of the imports in a PE file.', + 'process.pe.imports': 'List of imported element names and types.', + 'process.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'process.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'process.pe.original_file_name': 'Internal name of the file, provided at compile-time.', + 'process.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', + 'process.pe.product': 'Internal product name of the file, provided at compile-time.', + 'process.pe.sections': 'Section information of the PE file.', + 'process.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'process.pe.sections.name': 'PE Section List name.', + 'process.pe.sections.physical_size': 'PE Section List physical size.', + 'process.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', + 'process.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'process.pgid': 'Deprecated identifier of the group of processes the process belongs to.', + 'process.pid': 'Process id.', + 'process.previous.args': 'Array of process arguments.', + 'process.previous.args_count': 'Length of the process.args array.', + 'process.previous.executable': 'Absolute path to the process executable.', + 'process.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.real_group.name': 'Name of the group.', + 'process.real_user.id': 'Unique identifier of the user.', + 'process.real_user.name': 'Short name or login of the user.', + 'process.saved_group.id': 'Unique identifier for the group on the system/platform.', + 'process.saved_group.name': 'Name of the group.', + 'process.saved_user.id': 'Unique identifier of the user.', + 'process.saved_user.name': 'Short name or login of the user.', + 'process.session_leader.args': 'Array of process arguments.', + 'process.session_leader.args_count': 'Length of the process.args array.', + 'process.session_leader.command_line': 'Full command line that started the process.', + 'process.session_leader.entity_id': 'Unique identifier for the process.', + 'process.session_leader.executable': 'Absolute path to the process executable.', + 'process.session_leader.group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.group.name': 'Name of the group.', + 'process.session_leader.interactive': 'Whether the process is connected to an interactive shell.', + 'process.session_leader.name': 'Process name.', + 'process.session_leader.parent.entity_id': 'Unique identifier for the process.', + 'process.session_leader.parent.pid': 'Process id.', + 'process.session_leader.parent.session_leader.entity_id': 'Unique identifier for the process.', + 'process.session_leader.parent.session_leader.pid': 'Process id.', + 'process.session_leader.parent.session_leader.start': 'The time the process started.', + 'process.session_leader.parent.session_leader.vpid': 'Virtual process id.', + 'process.session_leader.parent.start': 'The time the process started.', + 'process.session_leader.parent.vpid': 'Virtual process id.', + 'process.session_leader.pid': 'Process id.', + 'process.session_leader.real_group.id': 'Unique identifier for the group on the system/platform.', + 'process.session_leader.real_group.name': 'Name of the group.', + 'process.session_leader.real_user.id': 'Unique identifier of the user.', + 'process.session_leader.real_user.name': 'Short name or login of the user.', + 'process.session_leader.same_as_process': + 'This boolean is used to identify if a leader process is the same as the top level process.', + 'process.session_leader.saved_group.id': + 'Unique identifier for the group on the system/platform.', + 'process.session_leader.saved_group.name': 'Name of the group.', + 'process.session_leader.saved_user.id': 'Unique identifier of the user.', + 'process.session_leader.saved_user.name': 'Short name or login of the user.', + 'process.session_leader.start': 'The time the process started.', + 'process.session_leader.supplemental_groups.id': + 'Unique identifier for the group on the system/platform.', + 'process.session_leader.supplemental_groups.name': 'Name of the group.', + 'process.session_leader.tty': 'Information about the controlling TTY device.', + 'process.session_leader.tty.char_device.major': 'The TTY character devices major number.', + 'process.session_leader.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.session_leader.user.id': 'Unique identifier of the user.', + 'process.session_leader.user.name': 'Short name or login of the user.', + 'process.session_leader.vpid': 'Virtual process id.', + 'process.session_leader.working_directory': 'The working directory of the process.', + 'process.start': 'The time the process started.', + 'process.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', + 'process.supplemental_groups.name': 'Name of the group.', + 'process.thread.capabilities.effective': 'Array of capabilities used for permission checks.', + 'process.thread.capabilities.permitted': 'Array of capabilities a thread could assume.', + 'process.thread.id': 'Thread ID.', + 'process.thread.name': 'Thread name.', + 'process.title': 'Process title.', + 'process.tty': 'Information about the controlling TTY device.', + 'process.tty.char_device.major': 'The TTY character devices major number.', + 'process.tty.char_device.minor': 'The TTY character devices minor number.', + 'process.tty.columns': 'The number of character columns per line. e.g terminal width', + 'process.tty.rows': 'The number of character rows in the terminal. e.g terminal height', + 'process.uptime': 'Seconds the process has been up.', + 'process.user.id': 'Unique identifier of the user.', + 'process.user.name': 'Short name or login of the user.', + 'process.vpid': 'Virtual process id.', + 'process.working_directory': 'The working directory of the process.', + 'registry.data.bytes': 'Original bytes written with base64 encoding.', + 'registry.data.strings': 'List of strings representing what was written to the registry.', + 'registry.data.type': 'Standard registry type for encoding contents', + 'registry.hive': 'Abbreviated name for the hive.', + 'registry.key': 'Hive-relative path of keys.', + 'registry.path': 'Full path, including hive, key and value', + 'registry.value': 'Name of the value written.', + 'related.hash': 'All the hashes seen on your event.', + 'related.hosts': 'All the host identifiers seen on your event.', + 'related.ip': 'All of the IPs seen on your event.', + 'related.user': 'All the user names or other user identifiers seen on the event.', + 'rule.author': 'Rule author', + 'rule.category': 'Rule category', + 'rule.description': 'Rule description', + 'rule.id': 'Rule ID', + 'rule.license': 'Rule license', + 'rule.name': 'Rule name', + 'rule.reference': 'Rule reference URL', + 'rule.ruleset': 'Rule ruleset', + 'rule.uuid': 'Rule UUID', + 'rule.version': 'Rule version', + 'server.address': 'Server network address.', + 'server.as.number': 'Unique number allocated to the autonomous system.', + 'server.as.organization.name': 'Organization name.', + 'server.bytes': 'Bytes sent from the server to the client.', + 'server.domain': 'The domain name of the server.', + 'server.geo.city_name': 'City name.', + 'server.geo.continent_code': 'Continent code.', + 'server.geo.continent_name': 'Name of the continent.', + 'server.geo.country_iso_code': 'Country ISO code.', + 'server.geo.country_name': 'Country name.', + 'server.geo.location': 'Longitude and latitude.', + 'server.geo.name': 'User-defined description of a location.', + 'server.geo.postal_code': 'Postal code.', + 'server.geo.region_iso_code': 'Region ISO code.', + 'server.geo.region_name': 'Region name.', + 'server.geo.timezone': 'Time zone.', + 'server.ip': 'IP address of the server.', + 'server.mac': 'MAC address of the server.', + 'server.nat.ip': 'Server NAT ip', + 'server.nat.port': 'Server NAT port', + 'server.packets': 'Packets sent from the server to the client.', + 'server.port': 'Port of the server.', + 'server.registered_domain': 'The highest registered server domain, stripped of the subdomain.', + 'server.subdomain': 'The subdomain of the domain.', + 'server.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'server.user.domain': 'Name of the directory the user is a member of.', + 'server.user.email': 'User email address.', + 'server.user.full_name': 'Users full name, if available.', + 'server.user.group.domain': 'Name of the directory the group is a member of.', + 'server.user.group.id': 'Unique identifier for the group on the system/platform.', + 'server.user.group.name': 'Name of the group.', + 'server.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'server.user.id': 'Unique identifier of the user.', + 'server.user.name': 'Short name or login of the user.', + 'server.user.roles': 'Array of user roles at the time of the event.', + 'service.address': 'Address of this service.', + 'service.environment': 'Environment of the service.', + 'service.ephemeral_id': 'Ephemeral identifier of this service.', + 'service.id': 'Unique identifier of the running service.', + 'service.name': 'Name of the service.', + 'service.node.name': 'Name of the service node.', + 'service.node.role': 'Deprecated role (singular) of the service node.', + 'service.node.roles': 'Roles of the service node.', + 'service.origin.address': 'Address of this service.', + 'service.origin.environment': 'Environment of the service.', + 'service.origin.ephemeral_id': 'Ephemeral identifier of this service.', + 'service.origin.id': 'Unique identifier of the running service.', + 'service.origin.name': 'Name of the service.', + 'service.origin.node.name': 'Name of the service node.', + 'service.origin.node.role': 'Deprecated role (singular) of the service node.', + 'service.origin.node.roles': 'Roles of the service node.', + 'service.origin.state': 'Current state of the service.', + 'service.origin.type': 'The type of the service.', + 'service.origin.version': 'Version of the service.', + 'service.state': 'Current state of the service.', + 'service.target.address': 'Address of this service.', + 'service.target.environment': 'Environment of the service.', + 'service.target.ephemeral_id': 'Ephemeral identifier of this service.', + 'service.target.id': 'Unique identifier of the running service.', + 'service.target.name': 'Name of the service.', + 'service.target.node.name': 'Name of the service node.', + 'service.target.node.role': 'Deprecated role (singular) of the service node.', + 'service.target.node.roles': 'Roles of the service node.', + 'service.target.state': 'Current state of the service.', + 'service.target.type': 'The type of the service.', + 'service.target.version': 'Version of the service.', + 'service.type': 'The type of the service.', + 'service.version': 'Version of the service.', + 'source.address': 'Source network address.', + 'source.as.number': 'Unique number allocated to the autonomous system.', + 'source.as.organization.name': 'Organization name.', + 'source.bytes': 'Bytes sent from the source to the destination.', + 'source.domain': 'The domain name of the source.', + 'source.geo.city_name': 'City name.', + 'source.geo.continent_code': 'Continent code.', + 'source.geo.continent_name': 'Name of the continent.', + 'source.geo.country_iso_code': 'Country ISO code.', + 'source.geo.country_name': 'Country name.', + 'source.geo.location': 'Longitude and latitude.', + 'source.geo.name': 'User-defined description of a location.', + 'source.geo.postal_code': 'Postal code.', + 'source.geo.region_iso_code': 'Region ISO code.', + 'source.geo.region_name': 'Region name.', + 'source.geo.timezone': 'Time zone.', + 'source.ip': 'IP address of the source.', + 'source.mac': 'MAC address of the source.', + 'source.nat.ip': 'Source NAT ip', + 'source.nat.port': 'Source NAT port', + 'source.packets': 'Packets sent from the source to the destination.', + 'source.port': 'Port of the source.', + 'source.registered_domain': 'The highest registered source domain, stripped of the subdomain.', + 'source.subdomain': 'The subdomain of the domain.', + 'source.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'source.user.domain': 'Name of the directory the user is a member of.', + 'source.user.email': 'User email address.', + 'source.user.full_name': 'Users full name, if available.', + 'source.user.group.domain': 'Name of the directory the group is a member of.', + 'source.user.group.id': 'Unique identifier for the group on the system/platform.', + 'source.user.group.name': 'Name of the group.', + 'source.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'source.user.id': 'Unique identifier of the user.', + 'source.user.name': 'Short name or login of the user.', + 'source.user.roles': 'Array of user roles at the time of the event.', + 'span.id': 'Unique identifier of the span within the scope of its trace.', + tags: 'List of keywords used to tag each event.', + 'threat.enrichments': 'List of objects containing indicators enriching the event.', + 'threat.enrichments.indicator': 'Object containing indicators enriching the event.', + 'threat.enrichments.indicator.as.number': 'Unique number allocated to the autonomous system.', + 'threat.enrichments.indicator.as.organization.name': 'Organization name.', + 'threat.enrichments.indicator.confidence': 'Indicator confidence rating', + 'threat.enrichments.indicator.description': 'Indicator description', + 'threat.enrichments.indicator.email.address': 'Indicator email address', + 'threat.enrichments.indicator.file.accessed': 'Last time the file was accessed.', + 'threat.enrichments.indicator.file.attributes': 'Array of file attributes.', + 'threat.enrichments.indicator.file.code_signature.digest_algorithm': + 'Hashing algorithm used to sign the process.', + 'threat.enrichments.indicator.file.code_signature.exists': + 'Boolean to capture if a signature is present.', + 'threat.enrichments.indicator.file.code_signature.signing_id': + 'The identifier used to sign the process.', + 'threat.enrichments.indicator.file.code_signature.status': + 'Additional information about the certificate status.', + 'threat.enrichments.indicator.file.code_signature.subject_name': + 'Subject name of the code signer', + 'threat.enrichments.indicator.file.code_signature.team_id': + 'The team identifier used to sign the process.', + 'threat.enrichments.indicator.file.code_signature.timestamp': + 'When the signature was generated and signed.', + 'threat.enrichments.indicator.file.code_signature.trusted': + 'Stores the trust status of the certificate chain.', + 'threat.enrichments.indicator.file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'threat.enrichments.indicator.file.created': 'File creation time.', + 'threat.enrichments.indicator.file.ctime': 'Last time the file attributes or metadata changed.', + 'threat.enrichments.indicator.file.device': 'Device that is the source of the file.', + 'threat.enrichments.indicator.file.directory': 'Directory where the file is located.', + 'threat.enrichments.indicator.file.drive_letter': 'Drive letter where the file is located.', + 'threat.enrichments.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', + 'threat.enrichments.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', + 'threat.enrichments.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', + 'threat.enrichments.indicator.file.elf.creation_date': 'Build or compile date.', + 'threat.enrichments.indicator.file.elf.exports': 'List of exported element names and types.', + 'threat.enrichments.indicator.file.elf.go_import_hash': + 'A hash of the Go language imports in an ELF file.', + 'threat.enrichments.indicator.file.elf.go_imports': + 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.elf.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.enrichments.indicator.file.elf.header.abi_version': + 'Version of the ELF Application Binary Interface (ABI).', + 'threat.enrichments.indicator.file.elf.header.class': 'Header class of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.data': 'Data table of the ELF header.', + 'threat.enrichments.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', + 'threat.enrichments.indicator.file.elf.header.os_abi': + 'Application Binary Interface (ABI) of the Linux OS.', + 'threat.enrichments.indicator.file.elf.header.type': 'Header type of the ELF file.', + 'threat.enrichments.indicator.file.elf.header.version': 'Version of the ELF header.', + 'threat.enrichments.indicator.file.elf.import_hash': 'A hash of the imports in an ELF file.', + 'threat.enrichments.indicator.file.elf.imports': 'List of imported element names and types.', + 'threat.enrichments.indicator.file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.elf.sections': 'Section information of the ELF file.', + 'threat.enrichments.indicator.file.elf.sections.chi2': + 'Chi-square probability distribution of the section.', + 'threat.enrichments.indicator.file.elf.sections.entropy': + 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.elf.sections.flags': 'ELF Section List flags.', + 'threat.enrichments.indicator.file.elf.sections.name': 'ELF Section List name.', + 'threat.enrichments.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', + 'threat.enrichments.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', + 'threat.enrichments.indicator.file.elf.sections.type': 'ELF Section List type.', + 'threat.enrichments.indicator.file.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.elf.sections.virtual_address': + 'ELF Section List virtual address.', + 'threat.enrichments.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'threat.enrichments.indicator.file.elf.segments': 'ELF object segment list.', + 'threat.enrichments.indicator.file.elf.segments.sections': 'ELF object segment sections.', + 'threat.enrichments.indicator.file.elf.segments.type': 'ELF object segment type.', + 'threat.enrichments.indicator.file.elf.shared_libraries': + 'List of shared libraries used by this ELF object.', + 'threat.enrichments.indicator.file.elf.telfhash': 'telfhash hash for ELF file.', + 'threat.enrichments.indicator.file.extension': 'File extension, excluding the leading dot.', + 'threat.enrichments.indicator.file.fork_name': + 'A fork is additional data associated with a filesystem object.', + 'threat.enrichments.indicator.file.gid': 'Primary group ID (GID) of the file.', + 'threat.enrichments.indicator.file.group': 'Primary group name of the file.', + 'threat.enrichments.indicator.file.hash.md5': 'MD5 hash.', + 'threat.enrichments.indicator.file.hash.sha1': 'SHA1 hash.', + 'threat.enrichments.indicator.file.hash.sha256': 'SHA256 hash.', + 'threat.enrichments.indicator.file.hash.sha384': 'SHA384 hash.', + 'threat.enrichments.indicator.file.hash.sha512': 'SHA512 hash.', + 'threat.enrichments.indicator.file.hash.ssdeep': 'SSDEEP hash.', + 'threat.enrichments.indicator.file.hash.tlsh': 'TLSH hash.', + 'threat.enrichments.indicator.file.inode': 'Inode representing the file in the filesystem.', + 'threat.enrichments.indicator.file.mime_type': + 'Media type of file, document, or arrangement of bytes.', + 'threat.enrichments.indicator.file.mode': 'Mode of the file in octal representation.', + 'threat.enrichments.indicator.file.mtime': 'Last time the file content was modified.', + 'threat.enrichments.indicator.file.name': + 'Name of the file including the extension, without the directory.', + 'threat.enrichments.indicator.file.owner': 'File owners username.', + 'threat.enrichments.indicator.file.path': 'Full path to the file, including the file name.', + 'threat.enrichments.indicator.file.pe.architecture': 'CPU architecture target for the file.', + 'threat.enrichments.indicator.file.pe.company': + 'Internal company name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.description': + 'Internal description of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.file_version': 'Process name.', + 'threat.enrichments.indicator.file.pe.go_import_hash': + 'A hash of the Go language imports in a PE file.', + 'threat.enrichments.indicator.file.pe.go_imports': + 'List of imported Go language element names and types.', + 'threat.enrichments.indicator.file.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.enrichments.indicator.file.pe.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.enrichments.indicator.file.pe.imphash': 'A hash of the imports in a PE file.', + 'threat.enrichments.indicator.file.pe.import_hash': 'A hash of the imports in a PE file.', + 'threat.enrichments.indicator.file.pe.imports': 'List of imported element names and types.', + 'threat.enrichments.indicator.file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.enrichments.indicator.file.pe.original_file_name': + 'Internal name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.pehash': + 'A hash of the PE header and data from one or more PE sections.', + 'threat.enrichments.indicator.file.pe.product': + 'Internal product name of the file, provided at compile-time.', + 'threat.enrichments.indicator.file.pe.sections': 'Section information of the PE file.', + 'threat.enrichments.indicator.file.pe.sections.entropy': + 'Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.pe.sections.name': 'PE Section List name.', + 'threat.enrichments.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', + 'threat.enrichments.indicator.file.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.enrichments.indicator.file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'threat.enrichments.indicator.file.size': 'File size in bytes.', + 'threat.enrichments.indicator.file.target_path': 'Target path for symlinks.', + 'threat.enrichments.indicator.file.type': 'File type (file, dir, or symlink).', + 'threat.enrichments.indicator.file.uid': + 'The user ID (UID) or security identifier (SID) of the file owner.', + 'threat.enrichments.indicator.file.x509.alternative_names': + 'List of subject alternative names (SAN).', + 'threat.enrichments.indicator.file.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.country': 'List of country (C) codes', + 'threat.enrichments.indicator.file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.file.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.enrichments.indicator.file.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.file.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.file.x509.public_key_algorithm': + 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.enrichments.indicator.file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.enrichments.indicator.file.x509.public_key_size': + 'The size of the public key space in bits.', + 'threat.enrichments.indicator.file.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.enrichments.indicator.file.x509.signature_algorithm': + 'Identifier for certificate signature algorithm.', + 'threat.enrichments.indicator.file.x509.subject.common_name': + 'List of common names (CN) of subject.', + 'threat.enrichments.indicator.file.x509.subject.country': 'List of country (C) code', + 'threat.enrichments.indicator.file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.enrichments.indicator.file.x509.subject.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.file.x509.subject.organization': + 'List of organizations (O) of subject.', + 'threat.enrichments.indicator.file.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.file.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.file.x509.version_number': 'Version of x509 format.', + 'threat.enrichments.indicator.first_seen': 'Date/time indicator was first reported.', + 'threat.enrichments.indicator.geo.city_name': 'City name.', + 'threat.enrichments.indicator.geo.continent_code': 'Continent code.', + 'threat.enrichments.indicator.geo.continent_name': 'Name of the continent.', + 'threat.enrichments.indicator.geo.country_iso_code': 'Country ISO code.', + 'threat.enrichments.indicator.geo.country_name': 'Country name.', + 'threat.enrichments.indicator.geo.location': 'Longitude and latitude.', + 'threat.enrichments.indicator.geo.name': 'User-defined description of a location.', + 'threat.enrichments.indicator.geo.postal_code': 'Postal code.', + 'threat.enrichments.indicator.geo.region_iso_code': 'Region ISO code.', + 'threat.enrichments.indicator.geo.region_name': 'Region name.', + 'threat.enrichments.indicator.geo.timezone': 'Time zone.', + 'threat.enrichments.indicator.ip': 'Indicator IP address', + 'threat.enrichments.indicator.last_seen': 'Date/time indicator was last reported.', + 'threat.enrichments.indicator.marking.tlp': 'Indicator TLP marking', + 'threat.enrichments.indicator.marking.tlp_version': 'Indicator TLP version', + 'threat.enrichments.indicator.modified_at': 'Date/time indicator was last updated.', + 'threat.enrichments.indicator.name': 'Indicator display name', + 'threat.enrichments.indicator.port': 'Indicator port', + 'threat.enrichments.indicator.provider': 'Indicator provider', + 'threat.enrichments.indicator.reference': 'Indicator reference URL', + 'threat.enrichments.indicator.registry.data.bytes': + 'Original bytes written with base64 encoding.', + 'threat.enrichments.indicator.registry.data.strings': + 'List of strings representing what was written to the registry.', + 'threat.enrichments.indicator.registry.data.type': 'Standard registry type for encoding contents', + 'threat.enrichments.indicator.registry.hive': 'Abbreviated name for the hive.', + 'threat.enrichments.indicator.registry.key': 'Hive-relative path of keys.', + 'threat.enrichments.indicator.registry.path': 'Full path, including hive, key and value', + 'threat.enrichments.indicator.registry.value': 'Name of the value written.', + 'threat.enrichments.indicator.scanner_stats': 'Scanner statistics', + 'threat.enrichments.indicator.sightings': 'Number of times indicator observed', + 'threat.enrichments.indicator.type': 'Type of indicator', + 'threat.enrichments.indicator.url.domain': 'Domain of the url.', + 'threat.enrichments.indicator.url.extension': + 'File extension from the request url, excluding the leading dot.', + 'threat.enrichments.indicator.url.fragment': 'Portion of the url after the `#`.', + 'threat.enrichments.indicator.url.full': 'Full unparsed URL.', + 'threat.enrichments.indicator.url.original': + 'Unmodified original url as seen in the event source.', + 'threat.enrichments.indicator.url.password': 'Password of the request.', + 'threat.enrichments.indicator.url.path': 'Path of the request, such as "/search".', + 'threat.enrichments.indicator.url.port': 'Port of the request, such as 443.', + 'threat.enrichments.indicator.url.query': 'Query string of the request.', + 'threat.enrichments.indicator.url.registered_domain': + 'The highest registered url domain, stripped of the subdomain.', + 'threat.enrichments.indicator.url.scheme': 'Scheme of the url.', + 'threat.enrichments.indicator.url.subdomain': 'The subdomain of the domain.', + 'threat.enrichments.indicator.url.top_level_domain': + 'The effective top level domain (com, org, net, co.uk).', + 'threat.enrichments.indicator.url.username': 'Username of the request.', + 'threat.enrichments.indicator.x509.alternative_names': 'List of subject alternative names (SAN).', + 'threat.enrichments.indicator.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.country': 'List of country (C) codes', + 'threat.enrichments.indicator.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.enrichments.indicator.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.enrichments.indicator.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.enrichments.indicator.x509.public_key_algorithm': + 'Algorithm used to generate the public key.', + 'threat.enrichments.indicator.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.enrichments.indicator.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.enrichments.indicator.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.enrichments.indicator.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.enrichments.indicator.x509.signature_algorithm': + 'Identifier for certificate signature algorithm.', + 'threat.enrichments.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.enrichments.indicator.x509.subject.country': 'List of country (C) code', + 'threat.enrichments.indicator.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.enrichments.indicator.x509.subject.locality': 'List of locality names (L)', + 'threat.enrichments.indicator.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.enrichments.indicator.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.enrichments.indicator.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.enrichments.indicator.x509.version_number': 'Version of x509 format.', + 'threat.enrichments.matched.atomic': 'Matched indicator value', + 'threat.enrichments.matched.field': 'Matched indicator field', + 'threat.enrichments.matched.id': 'Matched indicator identifier', + 'threat.enrichments.matched.index': 'Matched indicator index', + 'threat.enrichments.matched.occurred': 'Date of match', + 'threat.enrichments.matched.type': 'Type of indicator match', + 'threat.feed.dashboard_id': 'Feed dashboard ID.', + 'threat.feed.description': 'Description of the threat feed.', + 'threat.feed.name': 'Name of the threat feed.', + 'threat.feed.reference': 'Reference for the threat feed.', + 'threat.framework': 'Threat classification framework.', + 'threat.group.alias': 'Alias of the group.', + 'threat.group.id': 'ID of the group.', + 'threat.group.name': 'Name of the group.', + 'threat.group.reference': 'Reference URL of the group.', + 'threat.indicator.as.number': 'Unique number allocated to the autonomous system.', + 'threat.indicator.as.organization.name': 'Organization name.', + 'threat.indicator.confidence': 'Indicator confidence rating', + 'threat.indicator.description': 'Indicator description', + 'threat.indicator.email.address': 'Indicator email address', + 'threat.indicator.file.accessed': 'Last time the file was accessed.', + 'threat.indicator.file.attributes': 'Array of file attributes.', + 'threat.indicator.file.code_signature.digest_algorithm': + 'Hashing algorithm used to sign the process.', + 'threat.indicator.file.code_signature.exists': 'Boolean to capture if a signature is present.', + 'threat.indicator.file.code_signature.signing_id': 'The identifier used to sign the process.', + 'threat.indicator.file.code_signature.status': + 'Additional information about the certificate status.', + 'threat.indicator.file.code_signature.subject_name': 'Subject name of the code signer', + 'threat.indicator.file.code_signature.team_id': 'The team identifier used to sign the process.', + 'threat.indicator.file.code_signature.timestamp': 'When the signature was generated and signed.', + 'threat.indicator.file.code_signature.trusted': + 'Stores the trust status of the certificate chain.', + 'threat.indicator.file.code_signature.valid': + 'Boolean to capture if the digital signature is verified against the binary content.', + 'threat.indicator.file.created': 'File creation time.', + 'threat.indicator.file.ctime': 'Last time the file attributes or metadata changed.', + 'threat.indicator.file.device': 'Device that is the source of the file.', + 'threat.indicator.file.directory': 'Directory where the file is located.', + 'threat.indicator.file.drive_letter': 'Drive letter where the file is located.', + 'threat.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', + 'threat.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', + 'threat.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', + 'threat.indicator.file.elf.creation_date': 'Build or compile date.', + 'threat.indicator.file.elf.exports': 'List of exported element names and types.', + 'threat.indicator.file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', + 'threat.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', + 'threat.indicator.file.elf.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.elf.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.elf.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.indicator.file.elf.header.abi_version': + 'Version of the ELF Application Binary Interface (ABI).', + 'threat.indicator.file.elf.header.class': 'Header class of the ELF file.', + 'threat.indicator.file.elf.header.data': 'Data table of the ELF header.', + 'threat.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', + 'threat.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', + 'threat.indicator.file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', + 'threat.indicator.file.elf.header.type': 'Header type of the ELF file.', + 'threat.indicator.file.elf.header.version': 'Version of the ELF header.', + 'threat.indicator.file.elf.import_hash': 'A hash of the imports in an ELF file.', + 'threat.indicator.file.elf.imports': 'List of imported element names and types.', + 'threat.indicator.file.elf.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.elf.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.elf.sections': 'Section information of the ELF file.', + 'threat.indicator.file.elf.sections.chi2': 'Chi-square probability distribution of the section.', + 'threat.indicator.file.elf.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.indicator.file.elf.sections.flags': 'ELF Section List flags.', + 'threat.indicator.file.elf.sections.name': 'ELF Section List name.', + 'threat.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', + 'threat.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', + 'threat.indicator.file.elf.sections.type': 'ELF Section List type.', + 'threat.indicator.file.elf.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', + 'threat.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', + 'threat.indicator.file.elf.segments': 'ELF object segment list.', + 'threat.indicator.file.elf.segments.sections': 'ELF object segment sections.', + 'threat.indicator.file.elf.segments.type': 'ELF object segment type.', + 'threat.indicator.file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', + 'threat.indicator.file.elf.telfhash': 'telfhash hash for ELF file.', + 'threat.indicator.file.extension': 'File extension, excluding the leading dot.', + 'threat.indicator.file.fork_name': + 'A fork is additional data associated with a filesystem object.', + 'threat.indicator.file.gid': 'Primary group ID (GID) of the file.', + 'threat.indicator.file.group': 'Primary group name of the file.', + 'threat.indicator.file.hash.md5': 'MD5 hash.', + 'threat.indicator.file.hash.sha1': 'SHA1 hash.', + 'threat.indicator.file.hash.sha256': 'SHA256 hash.', + 'threat.indicator.file.hash.sha384': 'SHA384 hash.', + 'threat.indicator.file.hash.sha512': 'SHA512 hash.', + 'threat.indicator.file.hash.ssdeep': 'SSDEEP hash.', + 'threat.indicator.file.hash.tlsh': 'TLSH hash.', + 'threat.indicator.file.inode': 'Inode representing the file in the filesystem.', + 'threat.indicator.file.mime_type': 'Media type of file, document, or arrangement of bytes.', + 'threat.indicator.file.mode': 'Mode of the file in octal representation.', + 'threat.indicator.file.mtime': 'Last time the file content was modified.', + 'threat.indicator.file.name': 'Name of the file including the extension, without the directory.', + 'threat.indicator.file.owner': 'File owners username.', + 'threat.indicator.file.path': 'Full path to the file, including the file name.', + 'threat.indicator.file.pe.architecture': 'CPU architecture target for the file.', + 'threat.indicator.file.pe.company': + 'Internal company name of the file, provided at compile-time.', + 'threat.indicator.file.pe.description': + 'Internal description of the file, provided at compile-time.', + 'threat.indicator.file.pe.file_version': 'Process name.', + 'threat.indicator.file.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', + 'threat.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', + 'threat.indicator.file.pe.go_imports_names_entropy': + 'Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.pe.go_imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of Go imports.', + 'threat.indicator.file.pe.go_stripped': + 'Whether the file is a stripped or obfuscated Go executable.', + 'threat.indicator.file.pe.imphash': 'A hash of the imports in a PE file.', + 'threat.indicator.file.pe.import_hash': 'A hash of the imports in a PE file.', + 'threat.indicator.file.pe.imports': 'List of imported element names and types.', + 'threat.indicator.file.pe.imports_names_entropy': + 'Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.pe.imports_names_var_entropy': + 'Variance for Shannon entropy calculation from the list of imported element names and types.', + 'threat.indicator.file.pe.original_file_name': + 'Internal name of the file, provided at compile-time.', + 'threat.indicator.file.pe.pehash': + 'A hash of the PE header and data from one or more PE sections.', + 'threat.indicator.file.pe.product': + 'Internal product name of the file, provided at compile-time.', + 'threat.indicator.file.pe.sections': 'Section information of the PE file.', + 'threat.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', + 'threat.indicator.file.pe.sections.name': 'PE Section List name.', + 'threat.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', + 'threat.indicator.file.pe.sections.var_entropy': + 'Variance for Shannon entropy calculation from the section.', + 'threat.indicator.file.pe.sections.virtual_size': + 'PE Section List virtual size. This is always the same as `physical_size`.', + 'threat.indicator.file.size': 'File size in bytes.', + 'threat.indicator.file.target_path': 'Target path for symlinks.', + 'threat.indicator.file.type': 'File type (file, dir, or symlink).', + 'threat.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', + 'threat.indicator.file.x509.alternative_names': 'List of subject alternative names (SAN).', + 'threat.indicator.file.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.country': 'List of country (C) codes', + 'threat.indicator.file.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.locality': 'List of locality names (L)', + 'threat.indicator.file.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.indicator.file.x509.issuer.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.not_after': + 'Time at which the certificate is no longer considered valid.', + 'threat.indicator.file.x509.not_before': + 'Time at which the certificate is first considered valid.', + 'threat.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.indicator.file.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.indicator.file.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.indicator.file.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.indicator.file.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.indicator.file.x509.signature_algorithm': + 'Identifier for certificate signature algorithm.', + 'threat.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.indicator.file.x509.subject.country': 'List of country (C) code', + 'threat.indicator.file.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.file.x509.subject.locality': 'List of locality names (L)', + 'threat.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.indicator.file.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.indicator.file.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.indicator.file.x509.version_number': 'Version of x509 format.', + 'threat.indicator.first_seen': 'Date/time indicator was first reported.', + 'threat.indicator.geo.city_name': 'City name.', + 'threat.indicator.geo.continent_code': 'Continent code.', + 'threat.indicator.geo.continent_name': 'Name of the continent.', + 'threat.indicator.geo.country_iso_code': 'Country ISO code.', + 'threat.indicator.geo.country_name': 'Country name.', + 'threat.indicator.geo.location': 'Longitude and latitude.', + 'threat.indicator.geo.name': 'User-defined description of a location.', + 'threat.indicator.geo.postal_code': 'Postal code.', + 'threat.indicator.geo.region_iso_code': 'Region ISO code.', + 'threat.indicator.geo.region_name': 'Region name.', + 'threat.indicator.geo.timezone': 'Time zone.', + 'threat.indicator.ip': 'Indicator IP address', + 'threat.indicator.last_seen': 'Date/time indicator was last reported.', + 'threat.indicator.marking.tlp': 'Indicator TLP marking', + 'threat.indicator.marking.tlp_version': 'Indicator TLP version', + 'threat.indicator.modified_at': 'Date/time indicator was last updated.', + 'threat.indicator.name': 'Indicator display name', + 'threat.indicator.port': 'Indicator port', + 'threat.indicator.provider': 'Indicator provider', + 'threat.indicator.reference': 'Indicator reference URL', + 'threat.indicator.registry.data.bytes': 'Original bytes written with base64 encoding.', + 'threat.indicator.registry.data.strings': + 'List of strings representing what was written to the registry.', + 'threat.indicator.registry.data.type': 'Standard registry type for encoding contents', + 'threat.indicator.registry.hive': 'Abbreviated name for the hive.', + 'threat.indicator.registry.key': 'Hive-relative path of keys.', + 'threat.indicator.registry.path': 'Full path, including hive, key and value', + 'threat.indicator.registry.value': 'Name of the value written.', + 'threat.indicator.scanner_stats': 'Scanner statistics', + 'threat.indicator.sightings': 'Number of times indicator observed', + 'threat.indicator.type': 'Type of indicator', + 'threat.indicator.url.domain': 'Domain of the url.', + 'threat.indicator.url.extension': + 'File extension from the request url, excluding the leading dot.', + 'threat.indicator.url.fragment': 'Portion of the url after the `#`.', + 'threat.indicator.url.full': 'Full unparsed URL.', + 'threat.indicator.url.original': 'Unmodified original url as seen in the event source.', + 'threat.indicator.url.password': 'Password of the request.', + 'threat.indicator.url.path': 'Path of the request, such as "/search".', + 'threat.indicator.url.port': 'Port of the request, such as 443.', + 'threat.indicator.url.query': 'Query string of the request.', + 'threat.indicator.url.registered_domain': + 'The highest registered url domain, stripped of the subdomain.', + 'threat.indicator.url.scheme': 'Scheme of the url.', + 'threat.indicator.url.subdomain': 'The subdomain of the domain.', + 'threat.indicator.url.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'threat.indicator.url.username': 'Username of the request.', + 'threat.indicator.x509.alternative_names': 'List of subject alternative names (SAN).', + 'threat.indicator.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.country': 'List of country (C) codes', + 'threat.indicator.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'threat.indicator.x509.issuer.locality': 'List of locality names (L)', + 'threat.indicator.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'threat.indicator.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'threat.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'threat.indicator.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'threat.indicator.x509.not_before': 'Time at which the certificate is first considered valid.', + 'threat.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'threat.indicator.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'threat.indicator.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'threat.indicator.x509.public_key_size': 'The size of the public key space in bits.', + 'threat.indicator.x509.serial_number': + 'Unique serial number issued by the certificate authority.', + 'threat.indicator.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'threat.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', + 'threat.indicator.x509.subject.country': 'List of country (C) code', + 'threat.indicator.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'threat.indicator.x509.subject.locality': 'List of locality names (L)', + 'threat.indicator.x509.subject.organization': 'List of organizations (O) of subject.', + 'threat.indicator.x509.subject.organizational_unit': + 'List of organizational units (OU) of subject.', + 'threat.indicator.x509.subject.state_or_province': + 'List of state or province names (ST, S, or P)', + 'threat.indicator.x509.version_number': 'Version of x509 format.', + 'threat.software.alias': 'Alias of the software', + 'threat.software.id': 'ID of the software', + 'threat.software.name': 'Name of the software.', + 'threat.software.platforms': 'Platforms of the software.', + 'threat.software.reference': 'Software reference URL.', + 'threat.software.type': 'Software type.', + 'threat.tactic.id': 'Threat tactic id.', + 'threat.tactic.name': 'Threat tactic.', + 'threat.tactic.reference': 'Threat tactic URL reference.', + 'threat.technique.id': 'Threat technique id.', + 'threat.technique.name': 'Threat technique name.', + 'threat.technique.reference': 'Threat technique URL reference.', + 'threat.technique.subtechnique.id': 'Threat subtechnique id.', + 'threat.technique.subtechnique.name': 'Threat subtechnique name.', + 'threat.technique.subtechnique.reference': 'Threat subtechnique URL reference.', + 'tls.cipher': 'String indicating the cipher used during the current connection.', + 'tls.client.certificate': 'PEM-encoded stand-alone certificate offered by the client.', + 'tls.client.certificate_chain': + 'Array of PEM-encoded certificates that make up the certificate chain offered by the client.', + 'tls.client.hash.md5': + 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the client.', + 'tls.client.hash.sha1': + 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the client.', + 'tls.client.hash.sha256': + 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the client.', + 'tls.client.issuer': + 'Distinguished name of subject of the issuer of the x.509 certificate presented by the client.', + 'tls.client.ja3': + 'A hash that identifies clients based on how they perform an SSL/TLS handshake.', + 'tls.client.not_after': + 'Date/Time indicating when client certificate is no longer considered valid.', + 'tls.client.not_before': + 'Date/Time indicating when client certificate is first considered valid.', + 'tls.client.server_name': 'Hostname the client is trying to connect to. Also called the SNI.', + 'tls.client.subject': + 'Distinguished name of subject of the x.509 certificate presented by the client.', + 'tls.client.supported_ciphers': 'Array of ciphers offered by the client during the client hello.', + 'tls.client.x509.alternative_names': 'List of subject alternative names (SAN).', + 'tls.client.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'tls.client.x509.issuer.country': 'List of country (C) codes', + 'tls.client.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'tls.client.x509.issuer.locality': 'List of locality names (L)', + 'tls.client.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'tls.client.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'tls.client.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.client.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'tls.client.x509.not_before': 'Time at which the certificate is first considered valid.', + 'tls.client.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'tls.client.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'tls.client.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.client.x509.public_key_size': 'The size of the public key space in bits.', + 'tls.client.x509.serial_number': 'Unique serial number issued by the certificate authority.', + 'tls.client.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'tls.client.x509.subject.common_name': 'List of common names (CN) of subject.', + 'tls.client.x509.subject.country': 'List of country (C) code', + 'tls.client.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'tls.client.x509.subject.locality': 'List of locality names (L)', + 'tls.client.x509.subject.organization': 'List of organizations (O) of subject.', + 'tls.client.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'tls.client.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.client.x509.version_number': 'Version of x509 format.', + 'tls.curve': 'String indicating the curve used for the given cipher, when applicable.', + 'tls.established': + 'Boolean flag indicating if the TLS negotiation was successful and transitioned to an encrypted tunnel.', + 'tls.next_protocol': 'String indicating the protocol being tunneled.', + 'tls.resumed': + 'Boolean flag indicating if this TLS connection was resumed from an existing TLS negotiation.', + 'tls.server.certificate': 'PEM-encoded stand-alone certificate offered by the server.', + 'tls.server.certificate_chain': + 'Array of PEM-encoded certificates that make up the certificate chain offered by the server.', + 'tls.server.hash.md5': + 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the server.', + 'tls.server.hash.sha1': + 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the server.', + 'tls.server.hash.sha256': + 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the server.', + 'tls.server.issuer': 'Subject of the issuer of the x.509 certificate presented by the server.', + 'tls.server.ja3s': + 'A hash that identifies servers based on how they perform an SSL/TLS handshake.', + 'tls.server.not_after': + 'Timestamp indicating when server certificate is no longer considered valid.', + 'tls.server.not_before': + 'Timestamp indicating when server certificate is first considered valid.', + 'tls.server.subject': 'Subject of the x.509 certificate presented by the server.', + 'tls.server.x509.alternative_names': 'List of subject alternative names (SAN).', + 'tls.server.x509.issuer.common_name': + 'List of common name (CN) of issuing certificate authority.', + 'tls.server.x509.issuer.country': 'List of country (C) codes', + 'tls.server.x509.issuer.distinguished_name': + 'Distinguished name (DN) of issuing certificate authority.', + 'tls.server.x509.issuer.locality': 'List of locality names (L)', + 'tls.server.x509.issuer.organization': + 'List of organizations (O) of issuing certificate authority.', + 'tls.server.x509.issuer.organizational_unit': + 'List of organizational units (OU) of issuing certificate authority.', + 'tls.server.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.server.x509.not_after': 'Time at which the certificate is no longer considered valid.', + 'tls.server.x509.not_before': 'Time at which the certificate is first considered valid.', + 'tls.server.x509.public_key_algorithm': 'Algorithm used to generate the public key.', + 'tls.server.x509.public_key_curve': + 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', + 'tls.server.x509.public_key_exponent': + 'Exponent used to derive the public key. This is algorithm specific.', + 'tls.server.x509.public_key_size': 'The size of the public key space in bits.', + 'tls.server.x509.serial_number': 'Unique serial number issued by the certificate authority.', + 'tls.server.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', + 'tls.server.x509.subject.common_name': 'List of common names (CN) of subject.', + 'tls.server.x509.subject.country': 'List of country (C) code', + 'tls.server.x509.subject.distinguished_name': + 'Distinguished name (DN) of the certificate subject entity.', + 'tls.server.x509.subject.locality': 'List of locality names (L)', + 'tls.server.x509.subject.organization': 'List of organizations (O) of subject.', + 'tls.server.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', + 'tls.server.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', + 'tls.server.x509.version_number': 'Version of x509 format.', + 'tls.version': 'Numeric part of the version parsed from the original string.', + 'tls.version_protocol': 'Normalized lowercase protocol name parsed from original string.', + 'trace.id': 'Unique identifier of the trace.', + 'transaction.id': 'Unique identifier of the transaction within the scope of its trace.', + 'url.domain': 'Domain of the url.', + 'url.extension': 'File extension from the request url, excluding the leading dot.', + 'url.fragment': 'Portion of the url after the `#`.', + 'url.full': 'Full unparsed URL.', + 'url.original': 'Unmodified original url as seen in the event source.', + 'url.password': 'Password of the request.', + 'url.path': 'Path of the request, such as "/search".', + 'url.port': 'Port of the request, such as 443.', + 'url.query': 'Query string of the request.', + 'url.registered_domain': 'The highest registered url domain, stripped of the subdomain.', + 'url.scheme': 'Scheme of the url.', + 'url.subdomain': 'The subdomain of the domain.', + 'url.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', + 'url.username': 'Username of the request.', + 'user.changes.domain': 'Name of the directory the user is a member of.', + 'user.changes.email': 'User email address.', + 'user.changes.full_name': 'Users full name, if available.', + 'user.changes.group.domain': 'Name of the directory the group is a member of.', + 'user.changes.group.id': 'Unique identifier for the group on the system/platform.', + 'user.changes.group.name': 'Name of the group.', + 'user.changes.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.changes.id': 'Unique identifier of the user.', + 'user.changes.name': 'Short name or login of the user.', + 'user.changes.roles': 'Array of user roles at the time of the event.', + 'user.domain': 'Name of the directory the user is a member of.', + 'user.effective.domain': 'Name of the directory the user is a member of.', + 'user.effective.email': 'User email address.', + 'user.effective.full_name': 'Users full name, if available.', + 'user.effective.group.domain': 'Name of the directory the group is a member of.', + 'user.effective.group.id': 'Unique identifier for the group on the system/platform.', + 'user.effective.group.name': 'Name of the group.', + 'user.effective.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.effective.id': 'Unique identifier of the user.', + 'user.effective.name': 'Short name or login of the user.', + 'user.effective.roles': 'Array of user roles at the time of the event.', + 'user.email': 'User email address.', + 'user.full_name': 'Users full name, if available.', + 'user.group.domain': 'Name of the directory the group is a member of.', + 'user.group.id': 'Unique identifier for the group on the system/platform.', + 'user.group.name': 'Name of the group.', + 'user.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.id': 'Unique identifier of the user.', + 'user.name': 'Short name or login of the user.', + 'user.risk.calculated_level': + 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring.', + 'user.risk.calculated_score': + 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring.', + 'user.risk.calculated_score_norm': 'A normalized risk score calculated by an internal system.', + 'user.risk.static_level': + 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'user.risk.static_score': + 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform.', + 'user.risk.static_score_norm': 'A normalized risk score calculated by an external system.', + 'user.roles': 'Array of user roles at the time of the event.', + 'user.target.domain': 'Name of the directory the user is a member of.', + 'user.target.email': 'User email address.', + 'user.target.full_name': 'Users full name, if available.', + 'user.target.group.domain': 'Name of the directory the group is a member of.', + 'user.target.group.id': 'Unique identifier for the group on the system/platform.', + 'user.target.group.name': 'Name of the group.', + 'user.target.hash': 'Unique user hash to correlate information for a user in anonymized form.', + 'user.target.id': 'Unique identifier of the user.', + 'user.target.name': 'Short name or login of the user.', + 'user.target.roles': 'Array of user roles at the time of the event.', + 'user_agent.device.name': 'Name of the device.', + 'user_agent.name': 'Name of the user agent.', + 'user_agent.original': 'Unparsed user_agent string.', + 'user_agent.os.family': 'OS family (such as redhat, debian, freebsd, windows).', + 'user_agent.os.full': 'Operating system name, including the version or code name.', + 'user_agent.os.kernel': 'Operating system kernel version as a raw string.', + 'user_agent.os.name': 'Operating system name, without the version.', + 'user_agent.os.platform': 'Operating system platform (such centos, ubuntu, windows).', + 'user_agent.os.type': + 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', + 'user_agent.os.version': 'Operating system version as a raw string.', + 'user_agent.version': 'Version of the user agent.', + 'volume.bus_type': 'Bus type of the device.', + 'volume.default_access': 'Bus type of the device.', + 'volume.device_name': 'Device name of the volume.', + 'volume.device_type': 'Volume device type.', + 'volume.dos_name': 'DOS name of the device.', + 'volume.file_system_type': 'Volume device file system type.', + 'volume.mount_name': 'Mount name of the volume.', + 'volume.nt_name': 'NT name of the device.', + 'volume.product_id': 'ProductID of the device.', + 'volume.product_name': 'Produce name of the volume.', + 'volume.removable': 'Indicates if the volume is removable.', + 'volume.serial_number': 'Serial number of the device.', + 'volume.size': 'Size of the volume device in bytes.', + 'volume.vendor_id': 'VendorID of the device.', + 'volume.vendor_name': 'Vendor name of the device.', + 'volume.writable': 'Indicates if the volume is writable.', + 'vulnerability.category': 'Category of a vulnerability.', + 'vulnerability.classification': 'Classification of the vulnerability.', + 'vulnerability.description': 'Description of the vulnerability.', + 'vulnerability.enumeration': 'Identifier of the vulnerability.', + 'vulnerability.id': 'ID of the vulnerability.', + 'vulnerability.reference': 'Reference of the vulnerability.', + 'vulnerability.report_id': 'Scan identification number.', + 'vulnerability.scanner.vendor': 'Name of the scanner vendor.', + 'vulnerability.score.base': 'Vulnerability Base score.', + 'vulnerability.score.environmental': 'Vulnerability Environmental score.', + 'vulnerability.score.temporal': 'Vulnerability Temporal score.', + 'vulnerability.score.version': 'CVSS version.', + 'vulnerability.severity': 'Severity of the vulnerability.', +}; diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts index 34cd82738ca76..5ea92a5497188 100644 --- a/x-pack/plugins/integration_assistant/common/index.ts +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -24,3 +24,5 @@ export { INTEGRATION_BUILDER_PATH, INTEGRATION_ASSISTANT_BASE_PATH, } from './constants'; + +export { ECS_FULL } from './ecs'; diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx index a98461bedb8aa..7a371ddce09fc 100644 --- a/x-pack/plugins/integration_assistant/public/app.tsx +++ b/x-pack/plugins/integration_assistant/public/app.tsx @@ -5,16 +5,13 @@ * 2.0. */ -import React from 'react'; +import React, { useState } from 'react'; import ReactDOM from 'react-dom'; import { AppMountParameters } from '@kbn/core/public'; -import { - EuiPageTemplate, - EuiPageSection, - EuiText, - EuiHorizontalRule, - EuiListGroup, -} from '@elastic/eui'; +import type { IHttpFetchError } from '@kbn/core-http-browser'; +import { EuiPageTemplate, EuiText, EuiButton } from '@elastic/eui'; +import { EcsMappingApiRequest, EcsMappingApiResponse } from '../common'; + import { Services } from './services'; type Props = Services; @@ -25,28 +22,43 @@ function RoutingExplorer({ runRelatedGraph, runIntegrationBuilder, }: Props) { + const isFetchError = (response: any): response is IHttpFetchError => { + return 'message' in response; + }; + const [ecsResponseState, setEcsResponseState] = useState({} as EcsMappingApiResponse); + const [errorResponse, setErrorResponse] = useState({} as IHttpFetchError); + async function onEcsButtonClick(req: EcsMappingApiRequest) { + try { + const ecsResponse = await runEcsGraph(req); + if (isFetchError(ecsResponse)) { + setErrorResponse(ecsResponse); + console.log('finished with error'); + } else if (Object.keys(ecsResponse?.results).length > 0) { + setEcsResponseState(ecsResponse as EcsMappingApiResponse); + console.log('finished ecs graph'); + } + } catch (e) { + setErrorResponse(e); + } + } + const req = { + packageName: 'teleport', + dataStreamName: 'audit', + formSamples: [ + '{"ei":0,"event":"user.login","uid":"b675d102-fc25-4f7a-bf5d-96468cc176ea","code":"T1000I","time":"2024-02-23T18:56:50.628Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","required_private_key_policy":"none","success":true,"method":"local","mfa_device":{"mfa_device_name":"otp-device","mfa_device_uuid":"d07bf388-af49-4ec2-b8a4-c8a9e785b70b","mfa_device_type":"TOTP"},"user_agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36","addr.remote":"136.61.214.196:50332"}', + '{"ei":0,"event":"cert.create","uid":"efd326fc-dd13-4df8-acef-3102c2d717d3","code":"TC000I","time":"2024-02-23T18:56:50.653Z","cluster_name":"teleport.ericbeahan.com","cert_type":"user","identity":{"user":"teleport-admin","roles":["access","editor"],"logins":["root","ubuntu","ec2-user","-teleport-internal-join"],"expires":"2024-02-24T06:56:50.648137154Z","route_to_cluster":"teleport.ericbeahan.com","traits":{"aws_role_arns":null,"azure_identities":null,"db_names":null,"db_roles":null,"db_users":null,"gcp_service_accounts":null,"host_user_gid":[""],"host_user_uid":[""],"kubernetes_groups":null,"kubernetes_users":null,"logins":["root","ubuntu","ec2-user"],"windows_logins":null},"teleport_cluster":"teleport.ericbeahan.com","client_ip":"136.61.214.196","prev_identity_expires":"0001-01-01T00:00:00Z","private_key_policy":"none"}}', + '{"ei":0,"event":"session.start","uid":"fff30583-13be-49e8-b159-32952c6ea34f","code":"T2000I","time":"2024-02-23T18:56:57.199Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","login":"ec2-user","user_kind":1,"sid":"293fda2d-2266-4d4d-b9d1-bd5ea9dd9fc3","private_key_policy":"none","namespace":"default","server_id":"face0091-2bf1-43fd-a16a-f1514b4119f4","server_hostname":"ip-172-31-8-163.us-east-2.compute.internal","server_labels":{"hostname":"ip-172-31-8-163.us-east-2.compute.internal","teleport.internal/resource-id":"dccb2999-9fb8-4169-aded-ec7a1c0a26de"},"addr.remote":"136.61.214.196:50339","proto":"ssh","size":"80:25","initial_command":[""],"session_recording":"node"}', + ], + } as EcsMappingApiRequest; return ( -

Routing examples

+

Integration Assistant test UI

- - -

Run ECS graph

-
- - runEcsGraph(), - }, - ]} - /> -
+ onEcsButtonClick(req)}>Run ECS Graph
); diff --git a/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx b/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx deleted file mode 100644 index 4e5bfe8b9994b..0000000000000 --- a/x-pack/plugins/integration_assistant/public/constants/ecsFields.tsx +++ /dev/null @@ -1,2465 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -export const ECSFIELDS = { - '@timestamp': - 'Date/time when the event originated.\nThis is the date/time extracted from the event, typically representing when the event was generated by the source.\nIf the event source has no original timestamp, this value is typically populated by the first time the event was received by the pipeline.\nRequired field for all events.', - 'agent.build.original': - 'Extended build information for the agent.\nThis field is intended to contain any build information that a data source may provide, no specific formatting is required.', - 'agent.ephemeral_id': - 'Ephemeral identifier of this agent (if one exists).\nThis id normally changes across restarts, but `agent.id` does not.', - 'agent.id': - 'Unique identifier of this agent (if one exists).\nExample: For Beats this would be beat.id.', - 'agent.name': - 'Custom name of the agent.\nThis is a name that can be given to an agent. This can be helpful if for example two Filebeat instances are running on the same host but a human readable separation is needed on which Filebeat instance data is coming from.', - 'agent.type': - 'Type of the agent.\nThe agent type always stays the same and should be given by the agent used. In case of Filebeat the agent would always be Filebeat also if two Filebeat instances are run on the same machine.', - 'agent.version': 'Version of the agent.', - 'client.address': - 'Some event client addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', - 'client.as.number': - 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', - 'client.as.organization.name': 'Organization name.', - 'client.bytes': 'Bytes sent from the client to the server.', - 'client.domain': - 'The domain name of the client system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', - 'client.geo.city_name': 'City name.', - 'client.geo.continent_code': "Two-letter code representing continent's name.", - 'client.geo.continent_name': 'Name of the continent.', - 'client.geo.country_iso_code': 'Country ISO code.', - 'client.geo.country_name': 'Country name.', - 'client.geo.location': 'Longitude and latitude.', - 'client.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'client.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'client.geo.region_iso_code': 'Region ISO code.', - 'client.geo.region_name': 'Region name.', - 'client.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'client.ip': 'IP address of the client (IPv4 or IPv6).', - 'client.mac': - 'MAC address of the client.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', - 'client.nat.ip': - 'Translated IP of source based NAT sessions (e.g. internal client to internet).\nTypically connections traversing load balancers, firewalls, or routers.', - 'client.nat.port': - 'Translated port of source based NAT sessions (e.g. internal client to internet).\nTypically connections traversing load balancers, firewalls, or routers.', - 'client.packets': 'Packets sent from the client to the server.', - 'client.port': 'Port of the client.', - 'client.registered_domain': - 'The highest registered client domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'client.subdomain': - 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'client.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'client.user.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'client.user.email': 'User email address.', - 'client.user.full_name': "User's full name, if available.", - 'client.user.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'client.user.group.id': 'Unique identifier for the group on the system/platform.', - 'client.user.group.name': 'Name of the group.', - 'client.user.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'client.user.id': 'Unique identifier of the user.', - 'client.user.name': 'Short name or login of the user.', - 'client.user.roles': 'Array of user roles at the time of the event.', - 'cloud.account.id': - 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', - 'cloud.account.name': - 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', - 'cloud.availability_zone': - 'Availability zone in which this host, resource, or service is located.', - 'cloud.instance.id': 'Instance ID of the host machine.', - 'cloud.instance.name': 'Instance name of the host machine.', - 'cloud.machine.type': 'Machine type of the host machine.', - 'cloud.origin.account.id': - 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', - 'cloud.origin.account.name': - 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', - 'cloud.origin.availability_zone': - 'Availability zone in which this host, resource, or service is located.', - 'cloud.origin.instance.id': 'Instance ID of the host machine.', - 'cloud.origin.instance.name': 'Instance name of the host machine.', - 'cloud.origin.machine.type': 'Machine type of the host machine.', - 'cloud.origin.project.id': - 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', - 'cloud.origin.project.name': - 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', - 'cloud.origin.provider': - 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', - 'cloud.origin.region': 'Region in which this host, resource, or service is located.', - 'cloud.origin.service.name': - 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', - 'cloud.project.id': - 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', - 'cloud.project.name': - 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', - 'cloud.provider': - 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', - 'cloud.region': 'Region in which this host, resource, or service is located.', - 'cloud.service.name': - 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', - 'cloud.target.account.id': - 'The cloud account or organization id used to identify different entities in a multi-tenant environment.\nExamples: AWS account id, Google Cloud ORG Id, or other unique identifier.', - 'cloud.target.account.name': - 'The cloud account name or alias used to identify different entities in a multi-tenant environment.\nExamples: AWS account name, Google Cloud ORG display name.', - 'cloud.target.availability_zone': - 'Availability zone in which this host, resource, or service is located.', - 'cloud.target.instance.id': 'Instance ID of the host machine.', - 'cloud.target.instance.name': 'Instance name of the host machine.', - 'cloud.target.machine.type': 'Machine type of the host machine.', - 'cloud.target.project.id': - 'The cloud project identifier.\nExamples: Google Cloud Project id, Azure Project id.', - 'cloud.target.project.name': - 'The cloud project name.\nExamples: Google Cloud Project name, Azure Project name.', - 'cloud.target.provider': - 'Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.', - 'cloud.target.region': 'Region in which this host, resource, or service is located.', - 'cloud.target.service.name': - 'The cloud service name is intended to distinguish services running on different platforms within a provider, eg AWS EC2 vs Lambda, GCP GCE vs App Engine, Azure VM vs App Server.\nExamples: app engine, app service, cloud run, fargate, lambda.', - 'container.cpu.usage': - 'Percent CPU used which is normalized by the number of CPU cores and it ranges from 0 to 1. Scaling factor: 1000.', - 'container.disk.read.bytes': - 'The total number of bytes (gauge) read successfully (aggregated from all disks) since the last metric collection.', - 'container.disk.write.bytes': - 'The total number of bytes (gauge) written successfully (aggregated from all disks) since the last metric collection.', - 'container.id': 'Unique container id.', - 'container.image.hash.all': - 'An array of digests of the image the container was built on. Each digest consists of the hash algorithm and value in this format: `algorithm:value`. Algorithm names should align with the field names in the ECS hash field set.', - 'container.image.name': 'Name of the image the container was built on.', - 'container.image.tag': 'Container image tags.', - 'container.labels': 'Image labels.', - 'container.memory.usage': - 'Memory usage percentage and it ranges from 0 to 1. Scaling factor: 1000.', - 'container.name': 'Container name.', - 'container.network.egress.bytes': - 'The number of bytes (gauge) sent out on all network interfaces by the container since the last metric collection.', - 'container.network.ingress.bytes': - 'The number of bytes received (gauge) on all network interfaces by the container since the last metric collection.', - 'container.runtime': 'Runtime managing this container.', - 'data_stream.dataset': - 'The field can contain anything that makes sense to signify the source of the data.\nExamples include `nginx.access`, `prometheus`, `endpoint` etc. For data streams that otherwise fit, but that do not have dataset set we use the value "generic" for the dataset value. `event.dataset` should have the same value as `data_stream.dataset`.\nBeyond the Elasticsearch data stream naming criteria noted above, the `dataset` value has additional restrictions:\n * Must not contain `-`\n * No longer than 100 characters', - 'data_stream.namespace': - 'A user defined namespace. Namespaces are useful to allow grouping of data.\nMany users already organize their indices this way, and the data stream naming scheme now provides this best practice as a default. Many users will populate this field with `default`. If no value is used, it falls back to `default`.\nBeyond the Elasticsearch index naming criteria noted above, `namespace` value has the additional restrictions:\n * Must not contain `-`\n * No longer than 100 characters', - 'data_stream.type': - 'An overarching type for the data stream.\nCurrently allowed values are "logs" and "metrics". We expect to also add "traces" and "synthetics" in the near future.', - 'destination.address': - 'Some event destination addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', - 'destination.as.number': - 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', - 'destination.as.organization.name': 'Organization name.', - 'destination.bytes': 'Bytes sent from the destination to the source.', - 'destination.domain': - 'The domain name of the destination system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', - 'destination.geo.city_name': 'City name.', - 'destination.geo.continent_code': "Two-letter code representing continent's name.", - 'destination.geo.continent_name': 'Name of the continent.', - 'destination.geo.country_iso_code': 'Country ISO code.', - 'destination.geo.country_name': 'Country name.', - 'destination.geo.location': 'Longitude and latitude.', - 'destination.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'destination.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'destination.geo.region_iso_code': 'Region ISO code.', - 'destination.geo.region_name': 'Region name.', - 'destination.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'destination.ip': 'IP address of the destination (IPv4 or IPv6).', - 'destination.mac': - 'MAC address of the destination.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', - 'destination.nat.ip': - 'Translated ip of destination based NAT sessions (e.g. internet to private DMZ)\nTypically used with load balancers, firewalls, or routers.', - 'destination.nat.port': - 'Port the source session is translated to by NAT Device.\nTypically used with load balancers, firewalls, or routers.', - 'destination.packets': 'Packets sent from the destination to the source.', - 'destination.port': 'Port of the destination.', - 'destination.registered_domain': - 'The highest registered destination domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'destination.subdomain': - 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'destination.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'destination.user.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'destination.user.email': 'User email address.', - 'destination.user.full_name': "User's full name, if available.", - 'destination.user.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'destination.user.group.id': 'Unique identifier for the group on the system/platform.', - 'destination.user.group.name': 'Name of the group.', - 'destination.user.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'destination.user.id': 'Unique identifier of the user.', - 'destination.user.name': 'Short name or login of the user.', - 'destination.user.roles': 'Array of user roles at the time of the event.', - 'device.id': - 'The unique identifier of a device. The identifier must not change across application sessions but stay fixed for an instance of a (mobile) device. \nOn iOS, this value must be equal to the vendor identifier (https://developer.apple.com/documentation/uikit/uidevice/1620059-identifierforvendor). On Android, this value must be equal to the Firebase Installation ID or a globally unique UUID which is persisted across sessions in your application.\nFor GDPR and data protection law reasons this identifier should not carry information that would allow to identify a user.', - 'device.manufacturer': 'The vendor name of the device manufacturer.', - 'device.model.identifier': 'The machine readable identifier of the device model.', - 'device.model.name': 'The human readable marketing name of the device model.', - 'dll.code_signature.digest_algorithm': - 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', - 'dll.code_signature.exists': 'Boolean to capture if a signature is present.', - 'dll.code_signature.signing_id': - 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', - 'dll.code_signature.status': - 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', - 'dll.code_signature.subject_name': 'Subject name of the code signer', - 'dll.code_signature.team_id': - 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'dll.code_signature.timestamp': 'Date and time when the code signature was generated and signed.', - 'dll.code_signature.trusted': - 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', - 'dll.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', - 'dll.hash.md5': 'MD5 hash.', - 'dll.hash.sha1': 'SHA1 hash.', - 'dll.hash.sha256': 'SHA256 hash.', - 'dll.hash.sha384': 'SHA384 hash.', - 'dll.hash.sha512': 'SHA512 hash.', - 'dll.hash.ssdeep': 'SSDEEP hash.', - 'dll.hash.tlsh': 'TLSH hash.', - 'dll.name': 'Name of the library.\nThis generally maps to the name of the file on disk.', - 'dll.path': 'Full file path of the library.', - 'dll.pe.architecture': 'CPU architecture target for the file.', - 'dll.pe.company': 'Internal company name of the file, provided at compile-time.', - 'dll.pe.description': 'Internal description of the file, provided at compile-time.', - 'dll.pe.file_version': 'Internal version of the file, provided at compile-time.', - 'dll.pe.go_import_hash': - 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'dll.pe.go_imports': 'List of imported Go language element names and types.', - 'dll.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'dll.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'dll.pe.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'dll.pe.imphash': - 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', - 'dll.pe.import_hash': - 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', - 'dll.pe.imports': 'List of imported element names and types.', - 'dll.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'dll.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'dll.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'dll.pe.pehash': - 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'dll.pe.product': 'Internal product name of the file, provided at compile-time.', - 'dll.pe.sections': - 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', - 'dll.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'dll.pe.sections.name': 'PE Section List name.', - 'dll.pe.sections.physical_size': 'PE Section List physical size.', - 'dll.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'dll.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'dns.answers': - 'An array containing an object for each answer section returned by the server.\nThe main keys that should be present in these objects are defined by ECS. Records that have more information may contain more keys than what ECS defines.\nNot all DNS data sources give all details about DNS answers. At minimum, answer objects must contain the `data` key. If more information is available, map as much of it to ECS as possible, and add any additional fields to the answer objects as custom fields.', - 'dns.answers.class': 'The class of DNS data contained in this resource record.', - 'dns.answers.data': - 'The data describing the resource.\nThe meaning of this data depends on the type and class of the resource record.', - 'dns.answers.name': - "The domain name to which this resource record pertains.\nIf a chain of CNAME is being resolved, each answer's `name` should be the one that corresponds with the answer's `data`. It should not simply be the original `question.name` repeated.", - 'dns.answers.ttl': - 'The time interval in seconds that this resource record may be cached before it should be discarded. Zero values mean that the data should not be cached.', - 'dns.answers.type': 'The type of data contained in this resource record.', - 'dns.header_flags': 'Array of 2 letter DNS header flags.', - 'dns.id': - 'The DNS packet identifier assigned by the program that generated the query. The identifier is copied to the response.', - 'dns.op_code': - 'The DNS operation code that specifies the kind of query in the message. This value is set by the originator of a query and copied into the response.', - 'dns.question.class': 'The class of records being queried.', - 'dns.question.name': - 'The name being queried.\nIf the name field contains non-printable characters (below 32 or above 126), those characters should be represented as escaped base 10 integers (\\DDD). Back slashes and quotes should be escaped. Tabs, carriage returns, and line feeds should be converted to \\t, \\r, and \\n respectively.', - 'dns.question.registered_domain': - 'The highest registered domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'dns.question.subdomain': - 'The subdomain is all of the labels under the registered_domain.\nIf the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'dns.question.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'dns.question.type': 'The type of record being queried.', - 'dns.resolved_ip': - 'Array containing all IPs seen in `answers.data`.\nThe `answers` array can be difficult to use, because of the variety of data formats it can contain. Extracting all IP addresses seen in there to `dns.resolved_ip` makes it possible to index them as IP addresses, and makes them easier to visualize and query for.', - 'dns.response_code': 'The DNS response code.', - 'dns.type': - 'The type of DNS event captured, query or answer.\nIf your source of DNS events only gives you DNS queries, you should only create dns events of type `dns.type:query`.\nIf your source of DNS events gives you answers as well, you should create one event per query (optionally as soon as the query is seen). And a second event containing all query details as well as an array of answers.', - 'ecs.version': - 'ECS version this event conforms to. `ecs.version` is a required field and must exist in all events.\nWhen querying across multiple indices -- which may conform to slightly different ECS versions -- this field lets integrations adjust to the schema version of the events.', - 'email.attachments': - 'A list of objects describing the attachment files sent along with an email message.', - 'email.attachments.file.extension': 'Attachment file extension, excluding the leading dot.', - 'email.attachments.file.hash.md5': 'MD5 hash.', - 'email.attachments.file.hash.sha1': 'SHA1 hash.', - 'email.attachments.file.hash.sha256': 'SHA256 hash.', - 'email.attachments.file.hash.sha384': 'SHA384 hash.', - 'email.attachments.file.hash.sha512': 'SHA512 hash.', - 'email.attachments.file.hash.ssdeep': 'SSDEEP hash.', - 'email.attachments.file.hash.tlsh': 'TLSH hash.', - 'email.attachments.file.mime_type': - 'The MIME media type of the attachment.\nThis value will typically be extracted from the `Content-Type` MIME header field.', - 'email.attachments.file.name': 'Name of the attachment file including the file extension.', - 'email.attachments.file.size': 'Attachment file size in bytes.', - 'email.bcc.address': 'The email address of BCC recipient', - 'email.cc.address': 'The email address of CC recipient', - 'email.content_type': - 'Information about how the message is to be displayed.\nTypically a MIME type.', - 'email.delivery_timestamp': - 'The date and time when the email message was received by the service or client.', - 'email.direction': 'The direction of the message based on the sending and receiving domains.', - 'email.from.address': - 'The email address of the sender, typically from the RFC 5322 `From:` header field.', - 'email.local_id': - 'Unique identifier given to the email by the source that created the event.\nIdentifier is not persistent across hops.', - 'email.message_id': - 'Identifier from the RFC 5322 `Message-ID:` email header that refers to a particular email message.', - 'email.origination_timestamp': - 'The date and time the email message was composed. Many email clients will fill in this value automatically when the message is sent by a user.', - 'email.reply_to.address': - 'The address that replies should be delivered to based on the value in the RFC 5322 `Reply-To:` header.', - 'email.sender.address': - 'Per RFC 5322, specifies the address responsible for the actual transmission of the message.', - 'email.subject': 'A brief summary of the topic of the message.', - 'email.to.address': 'The email address of recipient', - 'email.x_mailer': - 'The name of the application that was used to draft and send the original email message.', - 'error.code': 'Error code describing the error.', - 'error.id': 'Unique identifier for the error.', - 'error.message': 'Error message.', - 'error.stack_trace': 'The stack trace of this error in plain text.', - 'error.type': 'The type of the error, for example the class name of the exception.', - 'event.action': - 'The action captured by the event.\nThis describes the information in the event. It is more specific than `event.category`. Examples are `group-add`, `process-started`, `file-created`. The value is normally defined by the implementer.', - 'event.agent_id_status': - "Agents are normally responsible for populating the `agent.id` field value. If the system receiving events is capable of validating the value based on authentication information for the client then this field can be used to reflect the outcome of that validation.\nFor example if the agent's connection is authenticated with mTLS and the client cert contains the ID of the agent to which the cert was issued then the `agent.id` value in events can be checked against the certificate. If the values match then `event.agent_id_status: verified` is added to the event, otherwise one of the other allowed values should be used.\nIf no validation is performed then the field should be omitted.\nThe allowed values are:\n`verified` - The `agent.id` field value matches expected value obtained from auth metadata.\n`mismatch` - The `agent.id` field value does not match the expected value obtained from auth metadata.\n`missing` - There was no `agent.id` field in the event to validate.\n`auth_metadata_missing` - There was no auth metadata or it was missing information about the agent ID.", - 'event.category': - 'This is one of four ECS Categorization Fields, and indicates the second level in the ECS category hierarchy.\n`event.category` represents the "big buckets" of ECS categories. For example, filtering on `event.category:process` yields all events relating to process activity. This field is closely related to `event.type`, which is used as a subcategory.\nThis field is an array. This will allow proper categorization of some events that fall in multiple categories.', - 'event.code': - 'Identification code for this event, if one exists.\nSome event sources use event codes to identify messages unambiguously, regardless of message language or wording adjustments over time. An example of this is the Windows Event ID.', - 'event.created': - "event.created contains the date/time when the event was first read by an agent, or by your pipeline.\nThis field is distinct from @timestamp in that @timestamp typically contain the time extracted from the original event.\nIn most situations, these two timestamps will be slightly different. The difference can be used to calculate the delay between your source generating an event, and the time when your agent first processed it. This can be used to monitor your agent's or pipeline's ability to keep up with your event source.\nIn case the two timestamps are identical, @timestamp should be used.", - 'event.dataset': - "Name of the dataset.\nIf an event source publishes more than one type of log or events (e.g. access log, error log), the dataset is used to specify which one the event comes from.\nIt's recommended but not required to start the dataset name with the module name, followed by a dot, then the dataset name.", - 'event.duration': - 'Duration of the event in nanoseconds.\nIf event.start and event.end are known this value should be the difference between the end and start time.', - 'event.end': - 'event.end contains the date when the event ended or when the activity was last observed.', - 'event.hash': - 'Hash (perhaps logstash fingerprint) of raw field to be able to demonstrate log integrity.', - 'event.id': 'Unique ID to describe the event.', - 'event.ingested': - "Timestamp when an event arrived in the central data store.\nThis is different from `@timestamp`, which is when the event originally occurred. It's also different from `event.created`, which is meant to capture the first time an agent saw the event.\nIn normal conditions, assuming no tampering, the timestamps should chronologically look like this: `@timestamp` < `event.created` < `event.ingested`.", - 'event.kind': - 'This is one of four ECS Categorization Fields, and indicates the highest level in the ECS category hierarchy.\n`event.kind` gives high-level information about what type of information the event contains, without being specific to the contents of the event. For example, values of this field distinguish alert events from metric events.\nThe value of this field can be used to inform how these kinds of events should be handled. They may warrant different retention, different access control, it may also help understand whether the data coming in at a regular interval or not.', - 'event.module': - 'Name of the module this data is coming from.\nIf your monitoring agent supports the concept of modules or plugins to process events of a given source (e.g. Apache logs), `event.module` should contain the name of this module.', - 'event.original': - 'Raw text message of entire event. Used to demonstrate log integrity or where the full log message (before splitting it up in multiple parts) may be required, e.g. for reindex.\nThis field is not indexed and doc_values are disabled. It cannot be searched, but it can be retrieved from `_source`. If users wish to override this and index this field, please see `Field data types` in the `Elasticsearch Reference`.', - 'event.outcome': - 'This is one of four ECS Categorization Fields, and indicates the lowest level in the ECS category hierarchy.\n`event.outcome` simply denotes whether the event represents a success or a failure from the perspective of the entity that produced the event.\nNote that when a single transaction is described in multiple events, each event may populate different values of `event.outcome`, according to their perspective.\nAlso note that in the case of a compound event (a single event that contains multiple logical events), this field should be populated with the value that best captures the overall success or failure from the perspective of the event producer.\nFurther note that not all events will have an associated outcome. For example, this field is generally not populated for metric events, events with `event.type:info`, or any events for which an outcome does not make logical sense.', - 'event.provider': - 'Source of the event.\nEvent transports such as Syslog or the Windows Event Log typically mention the source of an event. It can be the name of the software that generated the event (e.g. Sysmon, httpd), or of a subsystem of the operating system (kernel, Microsoft-Windows-Security-Auditing).', - 'event.reason': - 'Reason why this event happened, according to the source.\nThis describes the why of a particular action or outcome captured in the event. Where `event.action` captures the action from the event, `event.reason` describes why that action was taken. For example, a web proxy with an `event.action` which denied the request may also populate `event.reason` with the reason why (e.g. `blocked site`).', - 'event.reference': - 'Reference URL linking to additional information about this event.\nThis URL links to a static definition of this event. Alert events, indicated by `event.kind:alert`, are a common use case for this field.', - 'event.risk_score': - "Risk score or priority of the event (e.g. security solutions). Use your system's original value here.", - 'event.risk_score_norm': - 'Normalized risk score or priority of the event, on a scale of 0 to 100.\nThis is mainly useful if you use more than one system that assigns risk scores, and you want to see a normalized value across all systems.', - 'event.sequence': - 'Sequence number of the event.\nThe sequence number is a value published by some event sources, to make the exact ordering of events unambiguous, regardless of the timestamp precision.', - 'event.severity': - "The numeric severity of the event according to your event source.\nWhat the different severity values mean can be different between sources and use cases. It's up to the implementer to make sure severities are consistent across events from the same source.\nThe Syslog severity belongs in `log.syslog.severity.code`. `event.severity` is meant to represent the severity according to the event source (e.g. firewall, IDS). If the event source does not publish its own severity, you may optionally copy the `log.syslog.severity.code` to `event.severity`.", - 'event.start': - 'event.start contains the date when the event started or when the activity was first observed.', - 'event.timezone': - 'This field should be populated when the event\'s timestamp does not include timezone information already (e.g. default Syslog timestamps). It\'s optional otherwise.\nAcceptable timezone formats are: a canonical ID (e.g. "Europe/Amsterdam"), abbreviated (e.g. "EST") or an HH:mm differential (e.g. "-05:00").', - 'event.type': - 'This is one of four ECS Categorization Fields, and indicates the third level in the ECS category hierarchy.\n`event.type` represents a categorization "sub-bucket" that, when used along with the `event.category` field values, enables filtering events down to a level appropriate for single visualization.\nThis field is an array. This will allow proper categorization of some events that fall in multiple event types.', - 'event.url': - 'URL linking to an external system to continue investigation of this event.\nThis URL links to another system where in-depth investigation of the specific occurrence of this event can take place. Alert events, indicated by `event.kind:alert`, are a common use case for this field.', - 'faas.coldstart': 'Boolean value indicating a cold start of a function.', - 'faas.execution': 'The execution ID of the current function execution.', - 'faas.id': - "The unique identifier of a serverless function.\nFor AWS Lambda it's the function ARN (Amazon Resource Name) without a version or alias suffix.", - 'faas.name': 'The name of a serverless function.', - 'faas.trigger.request_id': 'The ID of the trigger request , message, event, etc.', - 'faas.trigger.type': 'The trigger for the function execution.', - 'faas.version': 'The version of a serverless function.', - 'file.accessed': - 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', - 'file.attributes': - "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", - 'file.code_signature.digest_algorithm': - 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', - 'file.code_signature.exists': 'Boolean to capture if a signature is present.', - 'file.code_signature.signing_id': - 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', - 'file.code_signature.status': - 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', - 'file.code_signature.subject_name': 'Subject name of the code signer', - 'file.code_signature.team_id': - 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'file.code_signature.timestamp': - 'Date and time when the code signature was generated and signed.', - 'file.code_signature.trusted': - 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', - 'file.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', - 'file.created': 'File creation time.\nNote that not all filesystems store the creation time.', - 'file.ctime': - 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', - 'file.device': 'Device that is the source of the file.', - 'file.directory': - 'Directory where the file is located. It should include the drive letter, when appropriate.', - 'file.drive_letter': - 'Drive letter where the file is located. This field is only relevant on Windows.\nThe value should be uppercase, and not include the colon.', - 'file.elf.architecture': 'Machine architecture of the ELF file.', - 'file.elf.byte_order': 'Byte sequence of ELF file.', - 'file.elf.cpu_type': 'CPU type of the ELF file.', - 'file.elf.creation_date': - "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", - 'file.elf.exports': 'List of exported element names and types.', - 'file.elf.go_import_hash': - 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'file.elf.go_imports': 'List of imported Go language element names and types.', - 'file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'file.elf.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', - 'file.elf.header.class': 'Header class of the ELF file.', - 'file.elf.header.data': 'Data table of the ELF header.', - 'file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'file.elf.header.object_version': '"0x1" for original ELF files.', - 'file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'file.elf.header.type': 'Header type of the ELF file.', - 'file.elf.header.version': 'Version of the ELF header.', - 'file.elf.import_hash': - 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', - 'file.elf.imports': 'List of imported element names and types.', - 'file.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'file.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'file.elf.sections': - 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', - 'file.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'file.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'file.elf.sections.flags': 'ELF Section List flags.', - 'file.elf.sections.name': 'ELF Section List name.', - 'file.elf.sections.physical_offset': 'ELF Section List offset.', - 'file.elf.sections.physical_size': 'ELF Section List physical size.', - 'file.elf.sections.type': 'ELF Section List type.', - 'file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'file.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'file.elf.segments': - 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', - 'file.elf.segments.sections': 'ELF object segment sections.', - 'file.elf.segments.type': 'ELF object segment type.', - 'file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'file.elf.telfhash': 'telfhash symbol hash for ELF file.', - 'file.extension': - 'File extension, excluding the leading dot.\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', - 'file.fork_name': - 'A fork is additional data associated with a filesystem object.\nOn Linux, a resource fork is used to store additional data with a filesystem object. A file always has at least one fork for the data portion, and additional forks may exist.\nOn NTFS, this is analogous to an Alternate Data Stream (ADS), and the default data stream for a file is just called $DATA. Zone.Identifier is commonly used by Windows to track contents downloaded from the Internet. An ADS is typically of the form: `C:\\path\\to\\filename.extension:some_fork_name`, and `some_fork_name` is the value that should populate `fork_name`. `filename.extension` should populate `file.name`, and `extension` should populate `file.extension`. The full path, `file.path`, will include the fork name.', - 'file.gid': 'Primary group ID (GID) of the file.', - 'file.group': 'Primary group name of the file.', - 'file.hash.md5': 'MD5 hash.', - 'file.hash.sha1': 'SHA1 hash.', - 'file.hash.sha256': 'SHA256 hash.', - 'file.hash.sha384': 'SHA384 hash.', - 'file.hash.sha512': 'SHA512 hash.', - 'file.hash.ssdeep': 'SSDEEP hash.', - 'file.hash.tlsh': 'TLSH hash.', - 'file.inode': 'Inode representing the file in the filesystem.', - 'file.macho.go_import_hash': - 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'file.macho.go_imports': 'List of imported Go language element names and types.', - 'file.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.macho.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'file.macho.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'file.macho.import_hash': - 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for symhash.', - 'file.macho.imports': 'List of imported element names and types.', - 'file.macho.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'file.macho.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'file.macho.sections': - 'An array containing an object for each section of the Mach-O file.\nThe keys that should be present in these objects are defined by sub-fields underneath `macho.sections.*`.', - 'file.macho.sections.entropy': 'Shannon entropy calculation from the section.', - 'file.macho.sections.name': 'Mach-O Section List name.', - 'file.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'file.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.macho.sections.virtual_size': - 'Mach-O Section List virtual size. This is always the same as `physical_size`.', - 'file.macho.symhash': - 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a Mach-O implementation of the Windows PE imphash', - 'file.mime_type': - 'MIME type should identify the format of the file or stream of bytes using https://www.iana.org/assignments/media-types/media-types.xhtml[IANA official types], where possible. When more than one type is applicable, the most specific type should be used.', - 'file.mode': 'Mode of the file in octal representation.', - 'file.mtime': 'Last time the file content was modified.', - 'file.name': 'Name of the file including the extension, without the directory.', - 'file.owner': "File owner's username.", - 'file.path': - 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', - 'file.pe.architecture': 'CPU architecture target for the file.', - 'file.pe.company': 'Internal company name of the file, provided at compile-time.', - 'file.pe.description': 'Internal description of the file, provided at compile-time.', - 'file.pe.file_version': 'Internal version of the file, provided at compile-time.', - 'file.pe.go_import_hash': - 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'file.pe.go_imports': 'List of imported Go language element names and types.', - 'file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'file.pe.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'file.pe.imphash': - 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', - 'file.pe.import_hash': - 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', - 'file.pe.imports': 'List of imported element names and types.', - 'file.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'file.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'file.pe.pehash': - 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'file.pe.product': 'Internal product name of the file, provided at compile-time.', - 'file.pe.sections': - 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', - 'file.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'file.pe.sections.name': 'PE Section List name.', - 'file.pe.sections.physical_size': 'PE Section List physical size.', - 'file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', - 'file.target_path': 'Target path for symlinks.', - 'file.type': 'File type (file, dir, or symlink).', - 'file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', - 'file.x509.alternative_names': - 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', - 'file.x509.issuer.country': 'List of country \\(C) codes', - 'file.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'file.x509.issuer.locality': 'List of locality names (L)', - 'file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', - 'file.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'file.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'file.x509.not_before': 'Time at which the certificate is first considered valid.', - 'file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'file.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'file.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'file.x509.public_key_size': 'The size of the public key space in bits.', - 'file.x509.serial_number': - 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', - 'file.x509.signature_algorithm': - 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'file.x509.subject.common_name': 'List of common names (CN) of subject.', - 'file.x509.subject.country': 'List of country \\(C) code', - 'file.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'file.x509.subject.locality': 'List of locality names (L)', - 'file.x509.subject.organization': 'List of organizations (O) of subject.', - 'file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', - 'file.x509.version_number': 'Version of x509 format.', - 'group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'group.id': 'Unique identifier for the group on the system/platform.', - 'group.name': 'Name of the group.', - 'host.architecture': 'Operating system architecture.', - 'host.boot.id': - 'Linux boot uuid taken from /proc/sys/kernel/random/boot_id. Note the boot_id value from /proc may or may not be the same in containers as on the host. Some container runtimes will bind mount a new boot_id value onto the proc file in each container.', - 'host.cpu.usage': - 'Percent CPU used which is normalized by the number of CPU cores and it ranges from 0 to 1.\nScaling factor: 1000.\nFor example: For a two core host, this value should be the average of the two cores, between 0 and 1.', - 'host.disk.read.bytes': - 'The total number of bytes (gauge) read successfully (aggregated from all disks) since the last metric collection.', - 'host.disk.write.bytes': - 'The total number of bytes (gauge) written successfully (aggregated from all disks) since the last metric collection.', - 'host.domain': - "Name of the domain of which the host is a member.\nFor example, on Windows this could be the host's Active Directory domain or NetBIOS domain name. For Linux this could be the domain of the host's LDAP provider.", - 'host.geo.city_name': 'City name.', - 'host.geo.continent_code': "Two-letter code representing continent's name.", - 'host.geo.continent_name': 'Name of the continent.', - 'host.geo.country_iso_code': 'Country ISO code.', - 'host.geo.country_name': 'Country name.', - 'host.geo.location': 'Longitude and latitude.', - 'host.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'host.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'host.geo.region_iso_code': 'Region ISO code.', - 'host.geo.region_name': 'Region name.', - 'host.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'host.hostname': - 'Hostname of the host.\nIt normally contains what the `hostname` command returns on the host machine.', - 'host.id': - 'Unique host id.\nAs hostname is not always unique, use values that are meaningful in your environment.\nExample: The current usage of `beat.name`.', - 'host.ip': 'Host ip addresses.', - 'host.mac': - 'Host MAC addresses.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', - 'host.name': - 'Name of the host.\nIt can contain what hostname returns on Unix systems, the fully qualified domain name (FQDN), or a name specified by the user. The recommended value is the lowercase FQDN of the host.', - 'host.network.egress.bytes': - 'The number of bytes (gauge) sent out on all network interfaces by the host since the last metric collection.', - 'host.network.egress.packets': - 'The number of packets (gauge) sent out on all network interfaces by the host since the last metric collection.', - 'host.network.ingress.bytes': - 'The number of bytes received (gauge) on all network interfaces by the host since the last metric collection.', - 'host.network.ingress.packets': - 'The number of packets (gauge) received on all network interfaces by the host since the last metric collection.', - 'host.os.family': 'OS family (such as redhat, debian, freebsd, windows).', - 'host.os.full': 'Operating system name, including the version or code name.', - 'host.os.kernel': 'Operating system kernel version as a raw string.', - 'host.os.name': 'Operating system name, without the version.', - 'host.os.platform': 'Operating system platform (such centos, ubuntu, windows).', - 'host.os.type': - "Use the `os.type` field to categorize the operating system into one of the broad commercial families.\nIf the OS you're dealing with is not listed as an expected value, the field should not be populated. Please let us know by opening an issue with ECS, to propose its addition.", - 'host.os.version': 'Operating system version as a raw string.', - 'host.pid_ns_ino': - 'This is the inode number of the namespace in the namespace file system (nsfs). Unsigned int inum in include/linux/ns_common.h.', - 'host.risk.calculated_level': - 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring.', - 'host.risk.calculated_score': - 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring.', - 'host.risk.calculated_score_norm': - 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring, and normalized to a range of 0 to 100.', - 'host.risk.static_level': - 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform.', - 'host.risk.static_score': - 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform.', - 'host.risk.static_score_norm': - 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform, and normalized to a range of 0 to 100.', - 'host.type': - 'Type of host.\nFor Cloud providers this can be the machine type like `t2.medium`. If vm, this could be the container, for example, or other information meaningful in your environment.', - 'host.uptime': 'Seconds the host has been up.', - 'http.request.body.bytes': 'Size in bytes of the request body.', - 'http.request.body.content': 'The full HTTP request body.', - 'http.request.bytes': 'Total size in bytes of the request (body and headers).', - 'http.request.id': - 'A unique identifier for each HTTP request to correlate logs between clients and servers in transactions.\nThe id may be contained in a non-standard HTTP header, such as `X-Request-ID` or `X-Correlation-ID`.', - 'http.request.method': - 'HTTP request method.\nThe value should retain its casing from the original event. For example, `GET`, `get`, and `GeT` are all considered valid values for this field.', - 'http.request.mime_type': - "Mime type of the body of the request.\nThis value must only be populated based on the content of the request body, not on the `Content-Type` header. Comparing the mime type of a request with the request's Content-Type header can be helpful in detecting threats or misconfigured clients.", - 'http.request.referrer': 'Referrer for this HTTP request.', - 'http.response.body.bytes': 'Size in bytes of the response body.', - 'http.response.body.content': 'The full HTTP response body.', - 'http.response.bytes': 'Total size in bytes of the response (body and headers).', - 'http.response.mime_type': - "Mime type of the body of the response.\nThis value must only be populated based on the content of the response body, not on the `Content-Type` header. Comparing the mime type of a response with the response's Content-Type header can be helpful in detecting misconfigured servers.", - 'http.response.status_code': 'HTTP response status code.', - 'http.version': 'HTTP version.', - labels: - 'Custom key/value pairs.\nCan be used to add meta information to events. Should not contain nested objects. All values are stored as keyword.\nExample: `docker` and `k8s` labels.', - 'log.file.path': - "Full path to the log file this event came from, including the file name. It should include the drive letter, when appropriate.\nIf the event wasn't read from a log file, do not populate this field.", - 'log.level': - "Original log level of the log event.\nIf the source of the event provides a log level or textual severity, this is the one that goes in `log.level`. If your source doesn't specify one, you may put your event transport's severity here (e.g. Syslog severity).\nSome examples are `warn`, `err`, `i`, `informational`.", - 'log.logger': - 'The name of the logger inside an application. This is usually the name of the class which initialized the logger, or can be a custom name.', - 'log.origin.file.line': - 'The line number of the file containing the source code which originated the log event.', - 'log.origin.file.name': - 'The name of the file containing the source code which originated the log event.\nNote that this field is not meant to capture the log file. The correct field to capture the log file is `log.file.path`.', - 'log.origin.function': 'The name of the function or method which originated the log event.', - 'log.syslog': - 'The Syslog metadata of the event, if the event was transmitted via Syslog. Please see RFCs 5424 or 3164.', - 'log.syslog.appname': - 'The device or application that originated the Syslog message, if available.', - 'log.syslog.facility.code': - 'The Syslog numeric facility of the log event, if available.\nAccording to RFCs 5424 and 3164, this value should be an integer between 0 and 23.', - 'log.syslog.facility.name': 'The Syslog text-based facility of the log event, if available.', - 'log.syslog.hostname': - 'The hostname, FQDN, or IP of the machine that originally sent the Syslog message. This is sourced from the hostname field of the syslog header. Depending on the environment, this value may be different from the host that handled the event, especially if the host handling the events is acting as a collector.', - 'log.syslog.msgid': - 'An identifier for the type of Syslog message, if available. Only applicable for RFC 5424 messages.', - 'log.syslog.priority': - 'Syslog numeric priority of the event, if available.\nAccording to RFCs 5424 and 3164, the priority is 8 * facility + severity. This number is therefore expected to contain a value between 0 and 191.', - 'log.syslog.procid': 'The process name or ID that originated the Syslog message, if available.', - 'log.syslog.severity.code': - "The Syslog numeric severity of the log event, if available.\nIf the event source publishing via Syslog provides a different numeric severity value (e.g. firewall, IDS), your source's numeric severity should go to `event.severity`. If the event source does not specify a distinct severity, you can optionally copy the Syslog severity to `event.severity`.", - 'log.syslog.severity.name': - "The Syslog numeric severity of the log event, if available.\nIf the event source publishing via Syslog provides a different severity value (e.g. firewall, IDS), your source's text severity should go to `log.level`. If the event source does not specify a distinct severity, you can optionally copy the Syslog severity to `log.level`.", - 'log.syslog.structured_data': - 'Structured data expressed in RFC 5424 messages, if available. These are key-value pairs formed from the structured data portion of the syslog message, as defined in RFC 5424 Section 6.3.', - 'log.syslog.version': - 'The version of the Syslog protocol specification. Only applicable for RFC 5424 messages.', - message: - 'For log events the message field contains the log message, optimized for viewing in a log viewer.\nFor structured logs without an original message field, other fields can be concatenated to form a human-readable summary of the event.\nIf multiple messages exist, they can be combined into one message.', - 'network.application': - "When a specific application or service is identified from network connection details (source/dest IPs, ports, certificates, or wire format), this field captures the application's or service's name.\nFor example, the original event identifies the network connection being from a specific web service in a `https` network connection, like `facebook` or `twitter`.\nThe field value must be normalized to lowercase for querying.", - 'network.bytes': - 'Total bytes transferred in both directions.\nIf `source.bytes` and `destination.bytes` are known, `network.bytes` is their sum.', - 'network.community_id': - 'A hash of source and destination IPs and ports, as well as the protocol used in a communication. This is a tool-agnostic standard to identify flows.\nLearn more at https://github.com/corelight/community-id-spec.', - 'network.direction': - 'Direction of the network traffic.\nWhen mapping events from a host-based monitoring context, populate this field from the host\'s point of view, using the values "ingress" or "egress".\nWhen mapping events from a network or perimeter-based monitoring context, populate this field from the point of view of the network perimeter, using the values "inbound", "outbound", "internal" or "external".\nNote that "internal" is not crossing perimeter boundaries, and is meant to describe communication between two hosts within the perimeter. Note also that "external" is meant to describe traffic between two hosts that are external to the perimeter. This could for example be useful for ISPs or VPN service providers.', - 'network.forwarded_ip': 'Host IP address when the source IP address is the proxy.', - 'network.iana_number': - 'IANA Protocol Number (https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml). Standardized list of protocols. This aligns well with NetFlow and sFlow related logs which use the IANA Protocol Number.', - 'network.inner': - 'Network.inner fields are added in addition to network.vlan fields to describe the innermost VLAN when q-in-q VLAN tagging is present. Allowed fields include vlan.id and vlan.name. Inner vlan fields are typically used when sending traffic with multiple 802.1q encapsulations to a network sensor (e.g. Zeek, Wireshark.)', - 'network.inner.vlan.id': 'VLAN ID as reported by the observer.', - 'network.inner.vlan.name': 'Optional VLAN name as reported by the observer.', - 'network.name': 'Name given by operators to sections of their network.', - 'network.packets': - 'Total packets transferred in both directions.\nIf `source.packets` and `destination.packets` are known, `network.packets` is their sum.', - 'network.protocol': - 'In the OSI Model this would be the Application Layer protocol. For example, `http`, `dns`, or `ssh`.\nThe field value must be normalized to lowercase for querying.', - 'network.transport': - 'Same as network.iana_number, but instead using the Keyword name of the transport layer (udp, tcp, ipv6-icmp, etc.)\nThe field value must be normalized to lowercase for querying.', - 'network.type': - 'In the OSI Model this would be the Network Layer. ipv4, ipv6, ipsec, pim, etc\nThe field value must be normalized to lowercase for querying.', - 'network.vlan.id': 'VLAN ID as reported by the observer.', - 'network.vlan.name': 'Optional VLAN name as reported by the observer.', - 'observer.egress': - 'Observer.egress holds information like interface number and name, vlan, and zone information to classify egress traffic. Single armed monitoring such as a network sensor on a span port should only use observer.ingress to categorize traffic.', - 'observer.egress.interface.alias': - 'Interface alias as reported by the system, typically used in firewall implementations for e.g. inside, outside, or dmz logical interface naming.', - 'observer.egress.interface.id': - 'Interface ID as reported by an observer (typically SNMP interface ID).', - 'observer.egress.interface.name': 'Interface name as reported by the system.', - 'observer.egress.vlan.id': 'VLAN ID as reported by the observer.', - 'observer.egress.vlan.name': 'Optional VLAN name as reported by the observer.', - 'observer.egress.zone': - 'Network zone of outbound traffic as reported by the observer to categorize the destination area of egress traffic, e.g. Internal, External, DMZ, HR, Legal, etc.', - 'observer.geo.city_name': 'City name.', - 'observer.geo.continent_code': "Two-letter code representing continent's name.", - 'observer.geo.continent_name': 'Name of the continent.', - 'observer.geo.country_iso_code': 'Country ISO code.', - 'observer.geo.country_name': 'Country name.', - 'observer.geo.location': 'Longitude and latitude.', - 'observer.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'observer.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'observer.geo.region_iso_code': 'Region ISO code.', - 'observer.geo.region_name': 'Region name.', - 'observer.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'observer.hostname': 'Hostname of the observer.', - 'observer.ingress': - 'Observer.ingress holds information like interface number and name, vlan, and zone information to classify ingress traffic. Single armed monitoring such as a network sensor on a span port should only use observer.ingress to categorize traffic.', - 'observer.ingress.interface.alias': - 'Interface alias as reported by the system, typically used in firewall implementations for e.g. inside, outside, or dmz logical interface naming.', - 'observer.ingress.interface.id': - 'Interface ID as reported by an observer (typically SNMP interface ID).', - 'observer.ingress.interface.name': 'Interface name as reported by the system.', - 'observer.ingress.vlan.id': 'VLAN ID as reported by the observer.', - 'observer.ingress.vlan.name': 'Optional VLAN name as reported by the observer.', - 'observer.ingress.zone': - 'Network zone of incoming traffic as reported by the observer to categorize the source area of ingress traffic. e.g. internal, External, DMZ, HR, Legal, etc.', - 'observer.ip': 'IP addresses of the observer.', - 'observer.mac': - 'MAC addresses of the observer.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', - 'observer.name': - 'Custom name of the observer.\nThis is a name that can be given to an observer. This can be helpful for example if multiple firewalls of the same model are used in an organization.\nIf no custom name is needed, the field can be left empty.', - 'observer.os.family': 'OS family (such as redhat, debian, freebsd, windows).', - 'observer.os.full': 'Operating system name, including the version or code name.', - 'observer.os.kernel': 'Operating system kernel version as a raw string.', - 'observer.os.name': 'Operating system name, without the version.', - 'observer.os.platform': 'Operating system platform (such centos, ubuntu, windows).', - 'observer.os.type': - "Use the `os.type` field to categorize the operating system into one of the broad commercial families.\nIf the OS you're dealing with is not listed as an expected value, the field should not be populated. Please let us know by opening an issue with ECS, to propose its addition.", - 'observer.os.version': 'Operating system version as a raw string.', - 'observer.product': 'The product name of the observer.', - 'observer.serial_number': 'Observer serial number.', - 'observer.type': - 'The type of the observer the data is coming from.\nThere is no predefined list of observer types. Some examples are `forwarder`, `firewall`, `ids`, `ips`, `proxy`, `poller`, `sensor`, `APM server`.', - 'observer.vendor': 'Vendor name of the observer.', - 'observer.version': 'Observer version.', - 'orchestrator.api_version': 'API version being used to carry out the action', - 'orchestrator.cluster.id': 'Unique ID of the cluster.', - 'orchestrator.cluster.name': 'Name of the cluster.', - 'orchestrator.cluster.url': 'URL of the API used to manage the cluster.', - 'orchestrator.cluster.version': 'The version of the cluster.', - 'orchestrator.namespace': 'Namespace in which the action is taking place.', - 'orchestrator.organization': - 'Organization affected by the event (for multi-tenant orchestrator setups).', - 'orchestrator.resource.annotation': 'The list of annotations added to the resource.', - 'orchestrator.resource.id': 'Unique ID of the resource being acted upon.', - 'orchestrator.resource.ip': - 'IP address assigned to the resource associated with the event being observed. In the case of a Kubernetes Pod, this array would contain only one element: the IP of the Pod (as opposed to the Node on which the Pod is running).', - 'orchestrator.resource.label': 'The list of labels added to the resource.', - 'orchestrator.resource.name': 'Name of the resource being acted upon.', - 'orchestrator.resource.parent.type': - 'Type or kind of the parent resource associated with the event being observed. In Kubernetes, this will be the name of a built-in workload resource (e.g., Deployment, StatefulSet, DaemonSet).', - 'orchestrator.resource.type': 'Type of resource being acted upon.', - 'orchestrator.type': 'Orchestrator cluster type (e.g. kubernetes, nomad or cloudfoundry).', - 'organization.id': 'Unique identifier for the organization.', - 'organization.name': 'Organization name.', - 'package.architecture': 'Package architecture.', - 'package.build_version': - 'Additional information about the build version of the installed package.\nFor example use the commit SHA of a non-released package.', - 'package.checksum': 'Checksum of the installed package for verification.', - 'package.description': 'Description of the package.', - 'package.install_scope': 'Indicating how the package was installed, e.g. user-local, global.', - 'package.installed': 'Time when package was installed.', - 'package.license': - 'License under which the package was released.\nUse a short name, e.g. the license identifier from SPDX License List where possible (https://spdx.org/licenses/).', - 'package.name': 'Package name', - 'package.path': 'Path where the package is installed.', - 'package.reference': 'Home page or reference URL of the software in this package, if available.', - 'package.size': 'Package size in bytes.', - 'package.type': - 'Type of package.\nThis should contain the package file type, rather than the package manager name. Examples: rpm, dpkg, brew, npm, gem, nupkg, jar.', - 'package.version': 'Package version', - 'process.args': - 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', - 'process.args_count': - 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', - 'process.code_signature.digest_algorithm': - 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', - 'process.code_signature.exists': 'Boolean to capture if a signature is present.', - 'process.code_signature.signing_id': - 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', - 'process.code_signature.status': - 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', - 'process.code_signature.subject_name': 'Subject name of the code signer', - 'process.code_signature.team_id': - 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'process.code_signature.timestamp': - 'Date and time when the code signature was generated and signed.', - 'process.code_signature.trusted': - 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', - 'process.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', - 'process.command_line': - 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', - 'process.elf.architecture': 'Machine architecture of the ELF file.', - 'process.elf.byte_order': 'Byte sequence of ELF file.', - 'process.elf.cpu_type': 'CPU type of the ELF file.', - 'process.elf.creation_date': - "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", - 'process.elf.exports': 'List of exported element names and types.', - 'process.elf.go_import_hash': - 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'process.elf.go_imports': 'List of imported Go language element names and types.', - 'process.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.elf.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'process.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', - 'process.elf.header.class': 'Header class of the ELF file.', - 'process.elf.header.data': 'Data table of the ELF header.', - 'process.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'process.elf.header.object_version': '"0x1" for original ELF files.', - 'process.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'process.elf.header.type': 'Header type of the ELF file.', - 'process.elf.header.version': 'Version of the ELF header.', - 'process.elf.import_hash': - 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', - 'process.elf.imports': 'List of imported element names and types.', - 'process.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.elf.sections': - 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', - 'process.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'process.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.elf.sections.flags': 'ELF Section List flags.', - 'process.elf.sections.name': 'ELF Section List name.', - 'process.elf.sections.physical_offset': 'ELF Section List offset.', - 'process.elf.sections.physical_size': 'ELF Section List physical size.', - 'process.elf.sections.type': 'ELF Section List type.', - 'process.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'process.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'process.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'process.elf.segments': - 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', - 'process.elf.segments.sections': 'ELF object segment sections.', - 'process.elf.segments.type': 'ELF object segment type.', - 'process.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'process.elf.telfhash': 'telfhash symbol hash for ELF file.', - 'process.end': 'The time the process ended.', - 'process.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.entry_leader.args': - 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', - 'process.entry_leader.args_count': - 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', - 'process.entry_leader.attested_groups.name': 'Name of the group.', - 'process.entry_leader.attested_user.id': 'Unique identifier of the user.', - 'process.entry_leader.attested_user.name': 'Short name or login of the user.', - 'process.entry_leader.command_line': - 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', - 'process.entry_leader.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.entry_leader.entry_meta.source.ip': 'IP address of the source (IPv4 or IPv6).', - 'process.entry_leader.entry_meta.type': - 'The entry type for the entry session leader. Values include: init(e.g systemd), sshd, ssm, kubelet, teleport, terminal, console\nNote: This field is only set on process.session_leader.', - 'process.entry_leader.executable': 'Absolute path to the process executable.', - 'process.entry_leader.group.id': 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.group.name': 'Name of the group.', - 'process.entry_leader.interactive': - 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', - 'process.entry_leader.name': 'Process name.\nSometimes called program name or similar.', - 'process.entry_leader.parent.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.entry_leader.parent.pid': 'Process id.', - 'process.entry_leader.parent.session_leader.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.entry_leader.parent.session_leader.pid': 'Process id.', - 'process.entry_leader.parent.session_leader.start': 'The time the process started.', - 'process.entry_leader.parent.start': 'The time the process started.', - 'process.entry_leader.pid': 'Process id.', - 'process.entry_leader.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.real_group.name': 'Name of the group.', - 'process.entry_leader.real_user.id': 'Unique identifier of the user.', - 'process.entry_leader.real_user.name': 'Short name or login of the user.', - 'process.entry_leader.same_as_process': - "This boolean is used to identify if a leader process is the same as the top level process.\nFor example, if `process.group_leader.same_as_process = true`, it means the process event in question is the leader of its process group. Details under `process.*` like `pid` would be the same under `process.group_leader.*` The same applies for both `process.session_leader` and `process.entry_leader`.\nThis field exists to the benefit of EQL and other rule engines since it's not possible to compare equality between two fields in a single document. e.g `process.entity_id` = `process.group_leader.entity_id` (top level process is the process group leader) OR `process.entity_id` = `process.entry_leader.entity_id` (top level process is the entry session leader)\nInstead these rules could be written like: `process.group_leader.same_as_process: true` OR `process.entry_leader.same_as_process: true`\nNote: This field is only set on `process.entry_leader`, `process.session_leader` and `process.group_leader`.", - 'process.entry_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.saved_group.name': 'Name of the group.', - 'process.entry_leader.saved_user.id': 'Unique identifier of the user.', - 'process.entry_leader.saved_user.name': 'Short name or login of the user.', - 'process.entry_leader.start': 'The time the process started.', - 'process.entry_leader.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.supplemental_groups.name': 'Name of the group.', - 'process.entry_leader.tty': - 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', - 'process.entry_leader.tty.char_device.major': - 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', - 'process.entry_leader.tty.char_device.minor': - 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', - 'process.entry_leader.user.id': 'Unique identifier of the user.', - 'process.entry_leader.user.name': 'Short name or login of the user.', - 'process.entry_leader.working_directory': 'The working directory of the process.', - 'process.env_vars': - 'Array of environment variable bindings. Captured from a snapshot of the environment at the time of execution.\nMay be filtered to protect sensitive information.', - 'process.executable': 'Absolute path to the process executable.', - 'process.exit_code': - 'The exit code of the process, if this is a termination event.\nThe field should be absent if there is no exit code for the event (e.g. process start).', - 'process.group_leader.args': - 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', - 'process.group_leader.args_count': - 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', - 'process.group_leader.command_line': - 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', - 'process.group_leader.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.group_leader.executable': 'Absolute path to the process executable.', - 'process.group_leader.group.id': 'Unique identifier for the group on the system/platform.', - 'process.group_leader.group.name': 'Name of the group.', - 'process.group_leader.interactive': - 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', - 'process.group_leader.name': 'Process name.\nSometimes called program name or similar.', - 'process.group_leader.pid': 'Process id.', - 'process.group_leader.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.group_leader.real_group.name': 'Name of the group.', - 'process.group_leader.real_user.id': 'Unique identifier of the user.', - 'process.group_leader.real_user.name': 'Short name or login of the user.', - 'process.group_leader.same_as_process': - "This boolean is used to identify if a leader process is the same as the top level process.\nFor example, if `process.group_leader.same_as_process = true`, it means the process event in question is the leader of its process group. Details under `process.*` like `pid` would be the same under `process.group_leader.*` The same applies for both `process.session_leader` and `process.entry_leader`.\nThis field exists to the benefit of EQL and other rule engines since it's not possible to compare equality between two fields in a single document. e.g `process.entity_id` = `process.group_leader.entity_id` (top level process is the process group leader) OR `process.entity_id` = `process.entry_leader.entity_id` (top level process is the entry session leader)\nInstead these rules could be written like: `process.group_leader.same_as_process: true` OR `process.entry_leader.same_as_process: true`\nNote: This field is only set on `process.entry_leader`, `process.session_leader` and `process.group_leader`.", - 'process.group_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.group_leader.saved_group.name': 'Name of the group.', - 'process.group_leader.saved_user.id': 'Unique identifier of the user.', - 'process.group_leader.saved_user.name': 'Short name or login of the user.', - 'process.group_leader.start': 'The time the process started.', - 'process.group_leader.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.group_leader.supplemental_groups.name': 'Name of the group.', - 'process.group_leader.tty': - 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', - 'process.group_leader.tty.char_device.major': - 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', - 'process.group_leader.tty.char_device.minor': - 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', - 'process.group_leader.user.id': 'Unique identifier of the user.', - 'process.group_leader.user.name': 'Short name or login of the user.', - 'process.group_leader.working_directory': 'The working directory of the process.', - 'process.hash.md5': 'MD5 hash.', - 'process.hash.sha1': 'SHA1 hash.', - 'process.hash.sha256': 'SHA256 hash.', - 'process.hash.sha384': 'SHA384 hash.', - 'process.hash.sha512': 'SHA512 hash.', - 'process.hash.ssdeep': 'SSDEEP hash.', - 'process.hash.tlsh': 'TLSH hash.', - 'process.interactive': - 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', - 'process.io': - 'A chunk of input or output (IO) from a single process.\nThis field only appears on the top level process object, which is the process that wrote the output or read the input.', - 'process.io.bytes_skipped': - 'An array of byte offsets and lengths denoting where IO data has been skipped.', - 'process.io.bytes_skipped.length': 'The length of bytes skipped.', - 'process.io.bytes_skipped.offset': - "The byte offset into this event's io.text (or io.bytes in the future) where length bytes were skipped.", - 'process.io.max_bytes_per_process_exceeded': - 'If true, the process producing the output has exceeded the max_kilobytes_per_process configuration setting.', - 'process.io.text': - 'A chunk of output or input sanitized to UTF-8.\nBest efforts are made to ensure complete lines are captured in these events. Assumptions should NOT be made that multiple lines will appear in the same event. TTY output may contain terminal control codes such as for cursor movement, so some string queries may not match due to terminal codes inserted between characters of a word.', - 'process.io.total_bytes_captured': 'The total number of bytes captured in this event.', - 'process.io.total_bytes_skipped': - 'The total number of bytes that were not captured due to implementation restrictions such as buffer size limits. Implementors should strive to ensure this value is always zero', - 'process.io.type': - "The type of object on which the IO action (read or write) was taken.\nCurrently only 'tty' is supported. Other types may be added in the future for 'file' and 'socket' support.", - 'process.macho.go_import_hash': - 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'process.macho.go_imports': 'List of imported Go language element names and types.', - 'process.macho.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.macho.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.macho.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'process.macho.import_hash': - 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for symhash.', - 'process.macho.imports': 'List of imported element names and types.', - 'process.macho.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.macho.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.macho.sections': - 'An array containing an object for each section of the Mach-O file.\nThe keys that should be present in these objects are defined by sub-fields underneath `macho.sections.*`.', - 'process.macho.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.macho.sections.name': 'Mach-O Section List name.', - 'process.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'process.macho.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.macho.sections.virtual_size': - 'Mach-O Section List virtual size. This is always the same as `physical_size`.', - 'process.macho.symhash': - 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a Mach-O implementation of the Windows PE imphash', - 'process.name': 'Process name.\nSometimes called program name or similar.', - 'process.parent.args': - 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', - 'process.parent.args_count': - 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', - 'process.parent.code_signature.digest_algorithm': - 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', - 'process.parent.code_signature.exists': 'Boolean to capture if a signature is present.', - 'process.parent.code_signature.signing_id': - 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', - 'process.parent.code_signature.status': - 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', - 'process.parent.code_signature.subject_name': 'Subject name of the code signer', - 'process.parent.code_signature.team_id': - 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'process.parent.code_signature.timestamp': - 'Date and time when the code signature was generated and signed.', - 'process.parent.code_signature.trusted': - 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', - 'process.parent.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', - 'process.parent.command_line': - 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', - 'process.parent.elf.architecture': 'Machine architecture of the ELF file.', - 'process.parent.elf.byte_order': 'Byte sequence of ELF file.', - 'process.parent.elf.cpu_type': 'CPU type of the ELF file.', - 'process.parent.elf.creation_date': - "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", - 'process.parent.elf.exports': 'List of exported element names and types.', - 'process.parent.elf.go_import_hash': - 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'process.parent.elf.go_imports': 'List of imported Go language element names and types.', - 'process.parent.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.parent.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.parent.elf.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'process.parent.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', - 'process.parent.elf.header.class': 'Header class of the ELF file.', - 'process.parent.elf.header.data': 'Data table of the ELF header.', - 'process.parent.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'process.parent.elf.header.object_version': '"0x1" for original ELF files.', - 'process.parent.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'process.parent.elf.header.type': 'Header type of the ELF file.', - 'process.parent.elf.header.version': 'Version of the ELF header.', - 'process.parent.elf.import_hash': - 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', - 'process.parent.elf.imports': 'List of imported element names and types.', - 'process.parent.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.elf.sections': - 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', - 'process.parent.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'process.parent.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.parent.elf.sections.flags': 'ELF Section List flags.', - 'process.parent.elf.sections.name': 'ELF Section List name.', - 'process.parent.elf.sections.physical_offset': 'ELF Section List offset.', - 'process.parent.elf.sections.physical_size': 'ELF Section List physical size.', - 'process.parent.elf.sections.type': 'ELF Section List type.', - 'process.parent.elf.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.parent.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'process.parent.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'process.parent.elf.segments': - 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', - 'process.parent.elf.segments.sections': 'ELF object segment sections.', - 'process.parent.elf.segments.type': 'ELF object segment type.', - 'process.parent.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'process.parent.elf.telfhash': 'telfhash symbol hash for ELF file.', - 'process.parent.end': 'The time the process ended.', - 'process.parent.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.parent.executable': 'Absolute path to the process executable.', - 'process.parent.exit_code': - 'The exit code of the process, if this is a termination event.\nThe field should be absent if there is no exit code for the event (e.g. process start).', - 'process.parent.group.id': 'Unique identifier for the group on the system/platform.', - 'process.parent.group.name': 'Name of the group.', - 'process.parent.group_leader.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.parent.group_leader.pid': 'Process id.', - 'process.parent.group_leader.start': 'The time the process started.', - 'process.parent.hash.md5': 'MD5 hash.', - 'process.parent.hash.sha1': 'SHA1 hash.', - 'process.parent.hash.sha256': 'SHA256 hash.', - 'process.parent.hash.sha384': 'SHA384 hash.', - 'process.parent.hash.sha512': 'SHA512 hash.', - 'process.parent.hash.ssdeep': 'SSDEEP hash.', - 'process.parent.hash.tlsh': 'TLSH hash.', - 'process.parent.interactive': - 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', - 'process.parent.macho.go_import_hash': - 'A hash of the Go language imports in a Mach-O file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'process.parent.macho.go_imports': 'List of imported Go language element names and types.', - 'process.parent.macho.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.parent.macho.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.parent.macho.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'process.parent.macho.import_hash': - 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for symhash.', - 'process.parent.macho.imports': 'List of imported element names and types.', - 'process.parent.macho.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.macho.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.macho.sections': - 'An array containing an object for each section of the Mach-O file.\nThe keys that should be present in these objects are defined by sub-fields underneath `macho.sections.*`.', - 'process.parent.macho.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.parent.macho.sections.name': 'Mach-O Section List name.', - 'process.parent.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'process.parent.macho.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.parent.macho.sections.virtual_size': - 'Mach-O Section List virtual size. This is always the same as `physical_size`.', - 'process.parent.macho.symhash': - 'A hash of the imports in a Mach-O file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a Mach-O implementation of the Windows PE imphash', - 'process.parent.name': 'Process name.\nSometimes called program name or similar.', - 'process.parent.pe.architecture': 'CPU architecture target for the file.', - 'process.parent.pe.company': 'Internal company name of the file, provided at compile-time.', - 'process.parent.pe.description': 'Internal description of the file, provided at compile-time.', - 'process.parent.pe.file_version': 'Internal version of the file, provided at compile-time.', - 'process.parent.pe.go_import_hash': - 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'process.parent.pe.go_imports': 'List of imported Go language element names and types.', - 'process.parent.pe.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.parent.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.parent.pe.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'process.parent.pe.imphash': - 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', - 'process.parent.pe.import_hash': - 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', - 'process.parent.pe.imports': 'List of imported element names and types.', - 'process.parent.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'process.parent.pe.pehash': - 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'process.parent.pe.product': 'Internal product name of the file, provided at compile-time.', - 'process.parent.pe.sections': - 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', - 'process.parent.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.parent.pe.sections.name': 'PE Section List name.', - 'process.parent.pe.sections.physical_size': 'PE Section List physical size.', - 'process.parent.pe.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.parent.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'process.parent.pgid': - 'Deprecated for removal in next major version release. This field is superseded by `process.group_leader.pid`.\nIdentifier of the group of processes the process belongs to.', - 'process.parent.pid': 'Process id.', - 'process.parent.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.parent.real_group.name': 'Name of the group.', - 'process.parent.real_user.id': 'Unique identifier of the user.', - 'process.parent.real_user.name': 'Short name or login of the user.', - 'process.parent.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.parent.saved_group.name': 'Name of the group.', - 'process.parent.saved_user.id': 'Unique identifier of the user.', - 'process.parent.saved_user.name': 'Short name or login of the user.', - 'process.parent.start': 'The time the process started.', - 'process.parent.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.parent.supplemental_groups.name': 'Name of the group.', - 'process.parent.thread.id': 'Thread ID.', - 'process.parent.thread.name': 'Thread name.', - 'process.parent.title': - 'Process title.\nThe proctitle, some times the same as process name. Can also be different: for example a browser setting its title to the web page currently opened.', - 'process.parent.tty': - 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', - 'process.parent.tty.char_device.major': - 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', - 'process.parent.tty.char_device.minor': - 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', - 'process.parent.uptime': 'Seconds the process has been up.', - 'process.parent.user.id': 'Unique identifier of the user.', - 'process.parent.user.name': 'Short name or login of the user.', - 'process.parent.working_directory': 'The working directory of the process.', - 'process.pe.architecture': 'CPU architecture target for the file.', - 'process.pe.company': 'Internal company name of the file, provided at compile-time.', - 'process.pe.description': 'Internal description of the file, provided at compile-time.', - 'process.pe.file_version': 'Internal version of the file, provided at compile-time.', - 'process.pe.go_import_hash': - 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'process.pe.go_imports': 'List of imported Go language element names and types.', - 'process.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'process.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.pe.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'process.pe.imphash': - 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', - 'process.pe.import_hash': - 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', - 'process.pe.imports': 'List of imported element names and types.', - 'process.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'process.pe.pehash': - 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'process.pe.product': 'Internal product name of the file, provided at compile-time.', - 'process.pe.sections': - 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', - 'process.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.pe.sections.name': 'PE Section List name.', - 'process.pe.sections.physical_size': 'PE Section List physical size.', - 'process.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'process.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'process.pgid': - 'Deprecated for removal in next major version release. This field is superseded by `process.group_leader.pid`.\nIdentifier of the group of processes the process belongs to.', - 'process.pid': 'Process id.', - 'process.previous.args': - 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', - 'process.previous.args_count': - 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', - 'process.previous.executable': 'Absolute path to the process executable.', - 'process.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.real_group.name': 'Name of the group.', - 'process.real_user.id': 'Unique identifier of the user.', - 'process.real_user.name': 'Short name or login of the user.', - 'process.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.saved_group.name': 'Name of the group.', - 'process.saved_user.id': 'Unique identifier of the user.', - 'process.saved_user.name': 'Short name or login of the user.', - 'process.session_leader.args': - 'Array of process arguments, starting with the absolute path to the executable.\nMay be filtered to protect sensitive information.', - 'process.session_leader.args_count': - 'Length of the process.args array.\nThis field can be useful for querying or performing bucket analysis on how many arguments were provided to start a process. More arguments may be an indication of suspicious activity.', - 'process.session_leader.command_line': - 'Full command line that started the process, including the absolute path to the executable, and all arguments.\nSome arguments may be filtered to protect sensitive information.', - 'process.session_leader.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.session_leader.executable': 'Absolute path to the process executable.', - 'process.session_leader.group.id': 'Unique identifier for the group on the system/platform.', - 'process.session_leader.group.name': 'Name of the group.', - 'process.session_leader.interactive': - 'Whether the process is connected to an interactive shell.\nProcess interactivity is inferred from the processes file descriptors. If the character device for the controlling tty is the same as stdin and stderr for the process, the process is considered interactive.\nNote: A non-interactive process can belong to an interactive session and is simply one that does not have open file descriptors reading the controlling TTY on FD 0 (stdin) or writing to the controlling TTY on FD 2 (stderr). A backgrounded process is still considered interactive if stdin and stderr are connected to the controlling TTY.', - 'process.session_leader.name': 'Process name.\nSometimes called program name or similar.', - 'process.session_leader.parent.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.session_leader.parent.pid': 'Process id.', - 'process.session_leader.parent.session_leader.entity_id': - 'Unique identifier for the process.\nThe implementation of this is specified by the data source, but some examples of what could be used here are a process-generated UUID, Sysmon Process GUIDs, or a hash of some uniquely identifying components of a process.\nConstructing a globally unique identifier is a common practice to mitigate PID reuse as well as to identify a specific process over time, across multiple monitored hosts.', - 'process.session_leader.parent.session_leader.pid': 'Process id.', - 'process.session_leader.parent.session_leader.start': 'The time the process started.', - 'process.session_leader.parent.start': 'The time the process started.', - 'process.session_leader.pid': 'Process id.', - 'process.session_leader.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.session_leader.real_group.name': 'Name of the group.', - 'process.session_leader.real_user.id': 'Unique identifier of the user.', - 'process.session_leader.real_user.name': 'Short name or login of the user.', - 'process.session_leader.same_as_process': - "This boolean is used to identify if a leader process is the same as the top level process.\nFor example, if `process.group_leader.same_as_process = true`, it means the process event in question is the leader of its process group. Details under `process.*` like `pid` would be the same under `process.group_leader.*` The same applies for both `process.session_leader` and `process.entry_leader`.\nThis field exists to the benefit of EQL and other rule engines since it's not possible to compare equality between two fields in a single document. e.g `process.entity_id` = `process.group_leader.entity_id` (top level process is the process group leader) OR `process.entity_id` = `process.entry_leader.entity_id` (top level process is the entry session leader)\nInstead these rules could be written like: `process.group_leader.same_as_process: true` OR `process.entry_leader.same_as_process: true`\nNote: This field is only set on `process.entry_leader`, `process.session_leader` and `process.group_leader`.", - 'process.session_leader.saved_group.id': - 'Unique identifier for the group on the system/platform.', - 'process.session_leader.saved_group.name': 'Name of the group.', - 'process.session_leader.saved_user.id': 'Unique identifier of the user.', - 'process.session_leader.saved_user.name': 'Short name or login of the user.', - 'process.session_leader.start': 'The time the process started.', - 'process.session_leader.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.session_leader.supplemental_groups.name': 'Name of the group.', - 'process.session_leader.tty': - 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', - 'process.session_leader.tty.char_device.major': - 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', - 'process.session_leader.tty.char_device.minor': - 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', - 'process.session_leader.user.id': 'Unique identifier of the user.', - 'process.session_leader.user.name': 'Short name or login of the user.', - 'process.session_leader.working_directory': 'The working directory of the process.', - 'process.start': 'The time the process started.', - 'process.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', - 'process.supplemental_groups.name': 'Name of the group.', - 'process.thread.id': 'Thread ID.', - 'process.thread.name': 'Thread name.', - 'process.title': - 'Process title.\nThe proctitle, some times the same as process name. Can also be different: for example a browser setting its title to the web page currently opened.', - 'process.tty': - 'Information about the controlling TTY device. If set, the process belongs to an interactive session.', - 'process.tty.char_device.major': - 'The major number identifies the driver associated with the device. The character device\'s major and minor numbers can be algorithmically combined to produce the more familiar terminal identifiers such as "ttyS0" and "pts/0". For more details, please refer to the Linux kernel documentation.', - 'process.tty.char_device.minor': - 'The minor number is used only by the driver specified by the major number; other parts of the kernel don\u2019t use it, and merely pass it along to the driver. It is common for a driver to control several devices; the minor number provides a way for the driver to differentiate among them.', - 'process.tty.columns': - "The number of character columns per line. e.g terminal width\nTerminal sizes can change, so this value reflects the maximum value for a given IO event. i.e. where event.action = 'text_output'", - 'process.tty.rows': - "The number of character rows in the terminal. e.g terminal height\nTerminal sizes can change, so this value reflects the maximum value for a given IO event. i.e. where event.action = 'text_output'", - 'process.uptime': 'Seconds the process has been up.', - 'process.user.id': 'Unique identifier of the user.', - 'process.user.name': 'Short name or login of the user.', - 'process.working_directory': 'The working directory of the process.', - 'registry.data.bytes': - 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', - 'registry.data.strings': - 'Content when writing string types.\nPopulated as an array when writing string data to the registry. For single string registry types (REG_SZ, REG_EXPAND_SZ), this should be an array with one string. For sequences of string with REG_MULTI_SZ, this array will be variable length. For numeric data, such as REG_DWORD and REG_QWORD, this should be populated with the decimal representation (e.g `"1"`).', - 'registry.data.type': 'Standard registry type for encoding contents', - 'registry.hive': 'Abbreviated name for the hive.', - 'registry.key': 'Hive-relative path of keys.', - 'registry.path': 'Full path, including hive, key and value', - 'registry.value': 'Name of the value written.', - 'related.hash': - "All the hashes seen on your event. Populating this field, then using it to search for hashes can help in situations where you're unsure what the hash algorithm is (and therefore which key name to search).", - 'related.hosts': - 'All hostnames or other host identifiers seen on your event. Example identifiers include FQDNs, domain names, workstation names, or aliases.', - 'related.ip': 'All of the IPs seen on your event.', - 'related.user': 'All the user names or other user identifiers seen on the event.', - 'rule.author': - 'Name, organization, or pseudonym of the author or authors who created the rule used to generate this event.', - 'rule.category': - 'A categorization value keyword used by the entity using the rule for detection of this event.', - 'rule.description': 'The description of the rule generating the event.', - 'rule.id': - 'A rule ID that is unique within the scope of an agent, observer, or other entity using the rule for detection of this event.', - 'rule.license': - 'Name of the license under which the rule used to generate this event is made available.', - 'rule.name': 'The name of the rule or signature generating the event.', - 'rule.reference': - "Reference URL to additional information about the rule used to generate this event.\nThe URL can point to the vendor's documentation about the rule. If that's not available, it can also be a link to a more general page describing this type of alert.", - 'rule.ruleset': - 'Name of the ruleset, policy, group, or parent category in which the rule used to generate this event is a member.', - 'rule.uuid': - 'A rule ID that is unique within the scope of a set or group of agents, observers, or other entities using the rule for detection of this event.', - 'rule.version': 'The version / revision of the rule being used for analysis.', - 'server.address': - 'Some event server addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', - 'server.as.number': - 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', - 'server.as.organization.name': 'Organization name.', - 'server.bytes': 'Bytes sent from the server to the client.', - 'server.domain': - 'The domain name of the server system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', - 'server.geo.city_name': 'City name.', - 'server.geo.continent_code': "Two-letter code representing continent's name.", - 'server.geo.continent_name': 'Name of the continent.', - 'server.geo.country_iso_code': 'Country ISO code.', - 'server.geo.country_name': 'Country name.', - 'server.geo.location': 'Longitude and latitude.', - 'server.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'server.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'server.geo.region_iso_code': 'Region ISO code.', - 'server.geo.region_name': 'Region name.', - 'server.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'server.ip': 'IP address of the server (IPv4 or IPv6).', - 'server.mac': - 'MAC address of the server.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', - 'server.nat.ip': - 'Translated ip of destination based NAT sessions (e.g. internet to private DMZ)\nTypically used with load balancers, firewalls, or routers.', - 'server.nat.port': - 'Translated port of destination based NAT sessions (e.g. internet to private DMZ)\nTypically used with load balancers, firewalls, or routers.', - 'server.packets': 'Packets sent from the server to the client.', - 'server.port': 'Port of the server.', - 'server.registered_domain': - 'The highest registered server domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'server.subdomain': - 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'server.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'server.user.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'server.user.email': 'User email address.', - 'server.user.full_name': "User's full name, if available.", - 'server.user.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'server.user.group.id': 'Unique identifier for the group on the system/platform.', - 'server.user.group.name': 'Name of the group.', - 'server.user.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'server.user.id': 'Unique identifier of the user.', - 'server.user.name': 'Short name or login of the user.', - 'server.user.roles': 'Array of user roles at the time of the event.', - 'service.address': - 'Address where data about this service was collected from.\nThis should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets).', - 'service.environment': - 'Identifies the environment where the service is running.\nIf the same service runs in different environments (production, staging, QA, development, etc.), the environment can identify other instances of the same service. Can also group services and applications from the same environment.', - 'service.ephemeral_id': - 'Ephemeral identifier of this service (if one exists).\nThis id normally changes across restarts, but `service.id` does not.', - 'service.id': - 'Unique identifier of the running service. If the service is comprised of many nodes, the `service.id` should be the same for all nodes.\nThis id should uniquely identify the service. This makes it possible to correlate logs and metrics for one specific service, no matter which particular node emitted the event.\nNote that if you need to see the events from one specific host of the service, you should filter on that `host.name` or `host.id` instead.', - 'service.name': - 'Name of the service data is collected from.\nThe name of the service is normally user given. This allows for distributed services that run on multiple hosts to correlate the related instances based on the name.\nIn the case of Elasticsearch the `service.name` could contain the cluster name. For Beats the `service.name` is by default a copy of the `service.type` field if no name is specified.', - 'service.node.name': - "Name of a service node.\nThis allows for two nodes of the same service running on the same host to be differentiated. Therefore, `service.node.name` should typically be unique across nodes of a given service.\nIn the case of Elasticsearch, the `service.node.name` could contain the unique node name within the Elasticsearch cluster. In cases where the service doesn't have the concept of a node name, the host name or container name can be used to distinguish running instances that make up this service. If those do not provide uniqueness (e.g. multiple instances of the service running on the same host) - the node name can be manually set.", - 'service.node.role': - 'Deprecated for removal in next major version release. This field will be superseded by `node.roles`.\nRole of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks`.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data`.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', - 'service.node.roles': - 'Roles of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks` or both.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data` or both.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', - 'service.origin.address': - 'Address where data about this service was collected from.\nThis should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets).', - 'service.origin.environment': - 'Identifies the environment where the service is running.\nIf the same service runs in different environments (production, staging, QA, development, etc.), the environment can identify other instances of the same service. Can also group services and applications from the same environment.', - 'service.origin.ephemeral_id': - 'Ephemeral identifier of this service (if one exists).\nThis id normally changes across restarts, but `service.id` does not.', - 'service.origin.id': - 'Unique identifier of the running service. If the service is comprised of many nodes, the `service.id` should be the same for all nodes.\nThis id should uniquely identify the service. This makes it possible to correlate logs and metrics for one specific service, no matter which particular node emitted the event.\nNote that if you need to see the events from one specific host of the service, you should filter on that `host.name` or `host.id` instead.', - 'service.origin.name': - 'Name of the service data is collected from.\nThe name of the service is normally user given. This allows for distributed services that run on multiple hosts to correlate the related instances based on the name.\nIn the case of Elasticsearch the `service.name` could contain the cluster name. For Beats the `service.name` is by default a copy of the `service.type` field if no name is specified.', - 'service.origin.node.name': - "Name of a service node.\nThis allows for two nodes of the same service running on the same host to be differentiated. Therefore, `service.node.name` should typically be unique across nodes of a given service.\nIn the case of Elasticsearch, the `service.node.name` could contain the unique node name within the Elasticsearch cluster. In cases where the service doesn't have the concept of a node name, the host name or container name can be used to distinguish running instances that make up this service. If those do not provide uniqueness (e.g. multiple instances of the service running on the same host) - the node name can be manually set.", - 'service.origin.node.role': - 'Deprecated for removal in next major version release. This field will be superseded by `node.roles`.\nRole of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks`.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data`.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', - 'service.origin.node.roles': - 'Roles of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks` or both.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data` or both.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', - 'service.origin.state': 'Current state of the service.', - 'service.origin.type': - 'The type of the service data is collected from.\nThe type can be used to group and correlate logs and metrics from one service type.\nExample: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`.', - 'service.origin.version': - 'Version of the service the data was collected from.\nThis allows to look at a data set only for a specific version of a service.', - 'service.state': 'Current state of the service.', - 'service.target.address': - 'Address where data about this service was collected from.\nThis should be a URI, network address (ipv4:port or [ipv6]:port) or a resource path (sockets).', - 'service.target.environment': - 'Identifies the environment where the service is running.\nIf the same service runs in different environments (production, staging, QA, development, etc.), the environment can identify other instances of the same service. Can also group services and applications from the same environment.', - 'service.target.ephemeral_id': - 'Ephemeral identifier of this service (if one exists).\nThis id normally changes across restarts, but `service.id` does not.', - 'service.target.id': - 'Unique identifier of the running service. If the service is comprised of many nodes, the `service.id` should be the same for all nodes.\nThis id should uniquely identify the service. This makes it possible to correlate logs and metrics for one specific service, no matter which particular node emitted the event.\nNote that if you need to see the events from one specific host of the service, you should filter on that `host.name` or `host.id` instead.', - 'service.target.name': - 'Name of the service data is collected from.\nThe name of the service is normally user given. This allows for distributed services that run on multiple hosts to correlate the related instances based on the name.\nIn the case of Elasticsearch the `service.name` could contain the cluster name. For Beats the `service.name` is by default a copy of the `service.type` field if no name is specified.', - 'service.target.node.name': - "Name of a service node.\nThis allows for two nodes of the same service running on the same host to be differentiated. Therefore, `service.node.name` should typically be unique across nodes of a given service.\nIn the case of Elasticsearch, the `service.node.name` could contain the unique node name within the Elasticsearch cluster. In cases where the service doesn't have the concept of a node name, the host name or container name can be used to distinguish running instances that make up this service. If those do not provide uniqueness (e.g. multiple instances of the service running on the same host) - the node name can be manually set.", - 'service.target.node.role': - 'Deprecated for removal in next major version release. This field will be superseded by `node.roles`.\nRole of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks`.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data`.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', - 'service.target.node.roles': - 'Roles of a service node.\nThis allows for distinction between different running roles of the same service.\nIn the case of Kibana, the `service.node.role` could be `ui` or `background_tasks` or both.\nIn the case of Elasticsearch, the `service.node.role` could be `master` or `data` or both.\nOther services could use this to distinguish between a `web` and `worker` role running as part of the service.', - 'service.target.state': 'Current state of the service.', - 'service.target.type': - 'The type of the service data is collected from.\nThe type can be used to group and correlate logs and metrics from one service type.\nExample: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`.', - 'service.target.version': - 'Version of the service the data was collected from.\nThis allows to look at a data set only for a specific version of a service.', - 'service.type': - 'The type of the service data is collected from.\nThe type can be used to group and correlate logs and metrics from one service type.\nExample: If logs or metrics are collected from Elasticsearch, `service.type` would be `elasticsearch`.', - 'service.version': - 'Version of the service the data was collected from.\nThis allows to look at a data set only for a specific version of a service.', - 'source.address': - 'Some event source addresses are defined ambiguously. The event will sometimes list an IP, a domain or a unix socket. You should always store the raw address in the `.address` field.\nThen it should be duplicated to `.ip` or `.domain`, depending on which one it is.', - 'source.as.number': - 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', - 'source.as.organization.name': 'Organization name.', - 'source.bytes': 'Bytes sent from the source to the destination.', - 'source.domain': - 'The domain name of the source system.\nThis value may be a host name, a fully qualified domain name, or another host naming format. The value may derive from the original event or be added from enrichment.', - 'source.geo.city_name': 'City name.', - 'source.geo.continent_code': "Two-letter code representing continent's name.", - 'source.geo.continent_name': 'Name of the continent.', - 'source.geo.country_iso_code': 'Country ISO code.', - 'source.geo.country_name': 'Country name.', - 'source.geo.location': 'Longitude and latitude.', - 'source.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'source.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'source.geo.region_iso_code': 'Region ISO code.', - 'source.geo.region_name': 'Region name.', - 'source.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'source.ip': 'IP address of the source (IPv4 or IPv6).', - 'source.mac': - 'MAC address of the source.\nThe notation format from RFC 7042 is suggested: Each octet (that is, 8-bit byte) is represented by two [uppercase] hexadecimal digits giving the value of the octet as an unsigned integer. Successive octets are separated by a hyphen.', - 'source.nat.ip': - 'Translated ip of source based NAT sessions (e.g. internal client to internet)\nTypically connections traversing load balancers, firewalls, or routers.', - 'source.nat.port': - 'Translated port of source based NAT sessions. (e.g. internal client to internet)\nTypically used with load balancers, firewalls, or routers.', - 'source.packets': 'Packets sent from the source to the destination.', - 'source.port': 'Port of the source.', - 'source.registered_domain': - 'The highest registered source domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'source.subdomain': - 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'source.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'source.user.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'source.user.email': 'User email address.', - 'source.user.full_name': "User's full name, if available.", - 'source.user.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'source.user.group.id': 'Unique identifier for the group on the system/platform.', - 'source.user.group.name': 'Name of the group.', - 'source.user.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'source.user.id': 'Unique identifier of the user.', - 'source.user.name': 'Short name or login of the user.', - 'source.user.roles': 'Array of user roles at the time of the event.', - 'span.id': - 'Unique identifier of the span within the scope of its trace.\nA span represents an operation within a transaction, such as a request to another service, or a database query.', - tags: 'List of keywords used to tag each event.', - 'threat.enrichments': - 'A list of associated indicators objects enriching the event, and the context of that association/enrichment.', - 'threat.enrichments.indicator': 'Object containing associated indicators enriching the event.', - 'threat.enrichments.indicator.as.number': - 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', - 'threat.enrichments.indicator.as.organization.name': 'Organization name.', - 'threat.enrichments.indicator.confidence': - 'Identifies the vendor-neutral confidence rating using the None/Low/Medium/High scale defined in Appendix A of the STIX 2.1 framework. Vendor-specific confidence scales may be added as custom fields.', - 'threat.enrichments.indicator.description': - 'Describes the type of action conducted by the threat.', - 'threat.enrichments.indicator.email.address': - 'Identifies a threat indicator as an email address (irrespective of direction).', - 'threat.enrichments.indicator.file.accessed': - 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', - 'threat.enrichments.indicator.file.attributes': - "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", - 'threat.enrichments.indicator.file.code_signature.digest_algorithm': - 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', - 'threat.enrichments.indicator.file.code_signature.exists': - 'Boolean to capture if a signature is present.', - 'threat.enrichments.indicator.file.code_signature.signing_id': - 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', - 'threat.enrichments.indicator.file.code_signature.status': - 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', - 'threat.enrichments.indicator.file.code_signature.subject_name': - 'Subject name of the code signer', - 'threat.enrichments.indicator.file.code_signature.team_id': - 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'threat.enrichments.indicator.file.code_signature.timestamp': - 'Date and time when the code signature was generated and signed.', - 'threat.enrichments.indicator.file.code_signature.trusted': - 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', - 'threat.enrichments.indicator.file.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', - 'threat.enrichments.indicator.file.created': - 'File creation time.\nNote that not all filesystems store the creation time.', - 'threat.enrichments.indicator.file.ctime': - 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', - 'threat.enrichments.indicator.file.device': 'Device that is the source of the file.', - 'threat.enrichments.indicator.file.directory': - 'Directory where the file is located. It should include the drive letter, when appropriate.', - 'threat.enrichments.indicator.file.drive_letter': - 'Drive letter where the file is located. This field is only relevant on Windows.\nThe value should be uppercase, and not include the colon.', - 'threat.enrichments.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', - 'threat.enrichments.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', - 'threat.enrichments.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', - 'threat.enrichments.indicator.file.elf.creation_date': - "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", - 'threat.enrichments.indicator.file.elf.exports': 'List of exported element names and types.', - 'threat.enrichments.indicator.file.elf.go_import_hash': - 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'threat.enrichments.indicator.file.elf.go_imports': - 'List of imported Go language element names and types.', - 'threat.enrichments.indicator.file.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.elf.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'threat.enrichments.indicator.file.elf.header.abi_version': - 'Version of the ELF Application Binary Interface (ABI).', - 'threat.enrichments.indicator.file.elf.header.class': 'Header class of the ELF file.', - 'threat.enrichments.indicator.file.elf.header.data': 'Data table of the ELF header.', - 'threat.enrichments.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'threat.enrichments.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', - 'threat.enrichments.indicator.file.elf.header.os_abi': - 'Application Binary Interface (ABI) of the Linux OS.', - 'threat.enrichments.indicator.file.elf.header.type': 'Header type of the ELF file.', - 'threat.enrichments.indicator.file.elf.header.version': 'Version of the ELF header.', - 'threat.enrichments.indicator.file.elf.import_hash': - 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', - 'threat.enrichments.indicator.file.elf.imports': 'List of imported element names and types.', - 'threat.enrichments.indicator.file.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.elf.sections': - 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', - 'threat.enrichments.indicator.file.elf.sections.chi2': - 'Chi-square probability distribution of the section.', - 'threat.enrichments.indicator.file.elf.sections.entropy': - 'Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.elf.sections.flags': 'ELF Section List flags.', - 'threat.enrichments.indicator.file.elf.sections.name': 'ELF Section List name.', - 'threat.enrichments.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', - 'threat.enrichments.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', - 'threat.enrichments.indicator.file.elf.sections.type': 'ELF Section List type.', - 'threat.enrichments.indicator.file.elf.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.elf.sections.virtual_address': - 'ELF Section List virtual address.', - 'threat.enrichments.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'threat.enrichments.indicator.file.elf.segments': - 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', - 'threat.enrichments.indicator.file.elf.segments.sections': 'ELF object segment sections.', - 'threat.enrichments.indicator.file.elf.segments.type': 'ELF object segment type.', - 'threat.enrichments.indicator.file.elf.shared_libraries': - 'List of shared libraries used by this ELF object.', - 'threat.enrichments.indicator.file.elf.telfhash': 'telfhash symbol hash for ELF file.', - 'threat.enrichments.indicator.file.extension': - 'File extension, excluding the leading dot.\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', - 'threat.enrichments.indicator.file.fork_name': - 'A fork is additional data associated with a filesystem object.\nOn Linux, a resource fork is used to store additional data with a filesystem object. A file always has at least one fork for the data portion, and additional forks may exist.\nOn NTFS, this is analogous to an Alternate Data Stream (ADS), and the default data stream for a file is just called $DATA. Zone.Identifier is commonly used by Windows to track contents downloaded from the Internet. An ADS is typically of the form: `C:\\path\\to\\filename.extension:some_fork_name`, and `some_fork_name` is the value that should populate `fork_name`. `filename.extension` should populate `file.name`, and `extension` should populate `file.extension`. The full path, `file.path`, will include the fork name.', - 'threat.enrichments.indicator.file.gid': 'Primary group ID (GID) of the file.', - 'threat.enrichments.indicator.file.group': 'Primary group name of the file.', - 'threat.enrichments.indicator.file.hash.md5': 'MD5 hash.', - 'threat.enrichments.indicator.file.hash.sha1': 'SHA1 hash.', - 'threat.enrichments.indicator.file.hash.sha256': 'SHA256 hash.', - 'threat.enrichments.indicator.file.hash.sha384': 'SHA384 hash.', - 'threat.enrichments.indicator.file.hash.sha512': 'SHA512 hash.', - 'threat.enrichments.indicator.file.hash.ssdeep': 'SSDEEP hash.', - 'threat.enrichments.indicator.file.hash.tlsh': 'TLSH hash.', - 'threat.enrichments.indicator.file.inode': 'Inode representing the file in the filesystem.', - 'threat.enrichments.indicator.file.mime_type': - 'MIME type should identify the format of the file or stream of bytes using https://www.iana.org/assignments/media-types/media-types.xhtml[IANA official types], where possible. When more than one type is applicable, the most specific type should be used.', - 'threat.enrichments.indicator.file.mode': 'Mode of the file in octal representation.', - 'threat.enrichments.indicator.file.mtime': 'Last time the file content was modified.', - 'threat.enrichments.indicator.file.name': - 'Name of the file including the extension, without the directory.', - 'threat.enrichments.indicator.file.owner': "File owner's username.", - 'threat.enrichments.indicator.file.path': - 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', - 'threat.enrichments.indicator.file.pe.architecture': 'CPU architecture target for the file.', - 'threat.enrichments.indicator.file.pe.company': - 'Internal company name of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.description': - 'Internal description of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.file_version': - 'Internal version of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.go_import_hash': - 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'threat.enrichments.indicator.file.pe.go_imports': - 'List of imported Go language element names and types.', - 'threat.enrichments.indicator.file.pe.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.pe.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'threat.enrichments.indicator.file.pe.imphash': - 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', - 'threat.enrichments.indicator.file.pe.import_hash': - 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', - 'threat.enrichments.indicator.file.pe.imports': 'List of imported element names and types.', - 'threat.enrichments.indicator.file.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.pe.original_file_name': - 'Internal name of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.pehash': - 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'threat.enrichments.indicator.file.pe.product': - 'Internal product name of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.sections': - 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', - 'threat.enrichments.indicator.file.pe.sections.entropy': - 'Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.pe.sections.name': 'PE Section List name.', - 'threat.enrichments.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', - 'threat.enrichments.indicator.file.pe.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'threat.enrichments.indicator.file.size': - 'File size in bytes.\nOnly relevant when `file.type` is "file".', - 'threat.enrichments.indicator.file.target_path': 'Target path for symlinks.', - 'threat.enrichments.indicator.file.type': 'File type (file, dir, or symlink).', - 'threat.enrichments.indicator.file.uid': - 'The user ID (UID) or security identifier (SID) of the file owner.', - 'threat.enrichments.indicator.file.x509.alternative_names': - 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'threat.enrichments.indicator.file.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.country': 'List of country \\(C) codes', - 'threat.enrichments.indicator.file.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.file.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.file.x509.not_after': - 'Time at which the certificate is no longer considered valid.', - 'threat.enrichments.indicator.file.x509.not_before': - 'Time at which the certificate is first considered valid.', - 'threat.enrichments.indicator.file.x509.public_key_algorithm': - 'Algorithm used to generate the public key.', - 'threat.enrichments.indicator.file.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.enrichments.indicator.file.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.enrichments.indicator.file.x509.public_key_size': - 'The size of the public key space in bits.', - 'threat.enrichments.indicator.file.x509.serial_number': - 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', - 'threat.enrichments.indicator.file.x509.signature_algorithm': - 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'threat.enrichments.indicator.file.x509.subject.common_name': - 'List of common names (CN) of subject.', - 'threat.enrichments.indicator.file.x509.subject.country': 'List of country \\(C) code', - 'threat.enrichments.indicator.file.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.enrichments.indicator.file.x509.subject.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.file.x509.subject.organization': - 'List of organizations (O) of subject.', - 'threat.enrichments.indicator.file.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.enrichments.indicator.file.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.file.x509.version_number': 'Version of x509 format.', - 'threat.enrichments.indicator.first_seen': - 'The date and time when intelligence source first reported sighting this indicator.', - 'threat.enrichments.indicator.geo.city_name': 'City name.', - 'threat.enrichments.indicator.geo.continent_code': - "Two-letter code representing continent's name.", - 'threat.enrichments.indicator.geo.continent_name': 'Name of the continent.', - 'threat.enrichments.indicator.geo.country_iso_code': 'Country ISO code.', - 'threat.enrichments.indicator.geo.country_name': 'Country name.', - 'threat.enrichments.indicator.geo.location': 'Longitude and latitude.', - 'threat.enrichments.indicator.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'threat.enrichments.indicator.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'threat.enrichments.indicator.geo.region_iso_code': 'Region ISO code.', - 'threat.enrichments.indicator.geo.region_name': 'Region name.', - 'threat.enrichments.indicator.geo.timezone': - 'The time zone of the location, such as IANA time zone name.', - 'threat.enrichments.indicator.ip': - 'Identifies a threat indicator as an IP address (irrespective of direction).', - 'threat.enrichments.indicator.last_seen': - 'The date and time when intelligence source last reported sighting this indicator.', - 'threat.enrichments.indicator.marking.tlp': 'Traffic Light Protocol sharing markings.', - 'threat.enrichments.indicator.marking.tlp_version': 'Traffic Light Protocol version.', - 'threat.enrichments.indicator.modified_at': - 'The date and time when intelligence source last modified information for this indicator.', - 'threat.enrichments.indicator.name': 'The display name indicator in an UI friendly format', - 'threat.enrichments.indicator.port': - 'Identifies a threat indicator as a port number (irrespective of direction).', - 'threat.enrichments.indicator.provider': "The name of the indicator's provider.", - 'threat.enrichments.indicator.reference': - 'Reference URL linking to additional information about this indicator.', - 'threat.enrichments.indicator.registry.data.bytes': - 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', - 'threat.enrichments.indicator.registry.data.strings': - 'Content when writing string types.\nPopulated as an array when writing string data to the registry. For single string registry types (REG_SZ, REG_EXPAND_SZ), this should be an array with one string. For sequences of string with REG_MULTI_SZ, this array will be variable length. For numeric data, such as REG_DWORD and REG_QWORD, this should be populated with the decimal representation (e.g `"1"`).', - 'threat.enrichments.indicator.registry.data.type': 'Standard registry type for encoding contents', - 'threat.enrichments.indicator.registry.hive': 'Abbreviated name for the hive.', - 'threat.enrichments.indicator.registry.key': 'Hive-relative path of keys.', - 'threat.enrichments.indicator.registry.path': 'Full path, including hive, key and value', - 'threat.enrichments.indicator.registry.value': 'Name of the value written.', - 'threat.enrichments.indicator.scanner_stats': - 'Count of AV/EDR vendors that successfully detected malicious file or URL.', - 'threat.enrichments.indicator.sightings': - 'Number of times this indicator was observed conducting threat activity.', - 'threat.enrichments.indicator.type': - 'Type of indicator as represented by Cyber Observable in STIX 2.0.', - 'threat.enrichments.indicator.url.domain': - 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', - 'threat.enrichments.indicator.url.extension': - 'The field contains the file extension from the original request url, excluding the leading dot.\nThe file extension is only set if it exists, as not every url has a file extension.\nThe leading period must not be included. For example, the value must be "png", not ".png".\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', - 'threat.enrichments.indicator.url.fragment': - 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', - 'threat.enrichments.indicator.url.full': - 'If full URLs are important to your use case, they should be stored in `url.full`, whether this field is reconstructed or present in the event source.', - 'threat.enrichments.indicator.url.original': - 'Unmodified original url as seen in the event source.\nNote that in network monitoring, the observed URL may be a full URL, whereas in access logs, the URL is often just represented as a path.\nThis field is meant to represent the URL as it was observed, complete or not.', - 'threat.enrichments.indicator.url.password': 'Password of the request.', - 'threat.enrichments.indicator.url.path': 'Path of the request, such as "/search".', - 'threat.enrichments.indicator.url.port': 'Port of the request, such as 443.', - 'threat.enrichments.indicator.url.query': - 'The query field describes the query string of the request, such as "q=elasticsearch".\nThe `?` is excluded from the query string. If a URL contains no `?`, there is no query field. If there is a `?` but no query, the query field exists with an empty string. The `exists` query can be used to differentiate between the two cases.', - 'threat.enrichments.indicator.url.registered_domain': - 'The highest registered url domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'threat.enrichments.indicator.url.scheme': - 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', - 'threat.enrichments.indicator.url.subdomain': - 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'threat.enrichments.indicator.url.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'threat.enrichments.indicator.url.username': 'Username of the request.', - 'threat.enrichments.indicator.x509.alternative_names': - 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'threat.enrichments.indicator.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.country': 'List of country \\(C) codes', - 'threat.enrichments.indicator.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.x509.not_after': - 'Time at which the certificate is no longer considered valid.', - 'threat.enrichments.indicator.x509.not_before': - 'Time at which the certificate is first considered valid.', - 'threat.enrichments.indicator.x509.public_key_algorithm': - 'Algorithm used to generate the public key.', - 'threat.enrichments.indicator.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.enrichments.indicator.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.enrichments.indicator.x509.public_key_size': 'The size of the public key space in bits.', - 'threat.enrichments.indicator.x509.serial_number': - 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', - 'threat.enrichments.indicator.x509.signature_algorithm': - 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'threat.enrichments.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', - 'threat.enrichments.indicator.x509.subject.country': 'List of country \\(C) code', - 'threat.enrichments.indicator.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.enrichments.indicator.x509.subject.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.enrichments.indicator.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.enrichments.indicator.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.x509.version_number': 'Version of x509 format.', - 'threat.enrichments.matched.atomic': - 'Identifies the atomic indicator value that matched a local environment endpoint or network event.', - 'threat.enrichments.matched.field': - 'Identifies the field of the atomic indicator that matched a local environment endpoint or network event.', - 'threat.enrichments.matched.id': - 'Identifies the _id of the indicator document enriching the event.', - 'threat.enrichments.matched.index': - 'Identifies the _index of the indicator document enriching the event.', - 'threat.enrichments.matched.occurred': 'Indicates when the indicator match was generated', - 'threat.enrichments.matched.type': - 'Identifies the type of match that caused the event to be enriched with the given indicator', - 'threat.feed.dashboard_id': - 'The saved object ID of the dashboard belonging to the threat feed for displaying dashboard links to threat feeds in Kibana.', - 'threat.feed.description': 'Description of the threat feed in a UI friendly format.', - 'threat.feed.name': 'The name of the threat feed in UI friendly format.', - 'threat.feed.reference': 'Reference information for the threat feed in a UI friendly format.', - 'threat.framework': - 'Name of the threat framework used to further categorize and classify the tactic and technique of the reported threat. Framework classification can be provided by detecting systems, evaluated at ingest time, or retrospectively tagged to events.', - 'threat.group.alias': - 'The alias(es) of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group alias(es).', - 'threat.group.id': - 'The id of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group id.', - 'threat.group.name': - 'The name of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group name.', - 'threat.group.reference': - 'The reference URL of the group for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae group reference URL.', - 'threat.indicator.as.number': - 'Unique number allocated to the autonomous system. The autonomous system number (ASN) uniquely identifies each network on the Internet.', - 'threat.indicator.as.organization.name': 'Organization name.', - 'threat.indicator.confidence': - 'Identifies the vendor-neutral confidence rating using the None/Low/Medium/High scale defined in Appendix A of the STIX 2.1 framework. Vendor-specific confidence scales may be added as custom fields.', - 'threat.indicator.description': 'Describes the type of action conducted by the threat.', - 'threat.indicator.email.address': - 'Identifies a threat indicator as an email address (irrespective of direction).', - 'threat.indicator.file.accessed': - 'Last time the file was accessed.\nNote that not all filesystems keep track of access time.', - 'threat.indicator.file.attributes': - "Array of file attributes.\nAttributes names will vary by platform. Here's a non-exhaustive list of values that are expected in this field: archive, compressed, directory, encrypted, execute, hidden, read, readonly, system, write.", - 'threat.indicator.file.code_signature.digest_algorithm': - 'The hashing algorithm used to sign the process.\nThis value can distinguish signatures when a file is signed multiple times by the same signer but with a different digest algorithm.', - 'threat.indicator.file.code_signature.exists': 'Boolean to capture if a signature is present.', - 'threat.indicator.file.code_signature.signing_id': - 'The identifier used to sign the process.\nThis is used to identify the application manufactured by a software vendor. The field is relevant to Apple *OS only.', - 'threat.indicator.file.code_signature.status': - 'Additional information about the certificate status.\nThis is useful for logging cryptographic errors with the certificate validity or trust status. Leave unpopulated if the validity or trust of the certificate was unchecked.', - 'threat.indicator.file.code_signature.subject_name': 'Subject name of the code signer', - 'threat.indicator.file.code_signature.team_id': - 'The team identifier used to sign the process.\nThis is used to identify the team or vendor of a software product. The field is relevant to Apple *OS only.', - 'threat.indicator.file.code_signature.timestamp': - 'Date and time when the code signature was generated and signed.', - 'threat.indicator.file.code_signature.trusted': - 'Stores the trust status of the certificate chain.\nValidating the trust of the certificate chain may be complicated, and this field should only be populated by tools that actively check the status.', - 'threat.indicator.file.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.\nLeave unpopulated if a certificate was unchecked.', - 'threat.indicator.file.created': - 'File creation time.\nNote that not all filesystems store the creation time.', - 'threat.indicator.file.ctime': - 'Last time the file attributes or metadata changed.\nNote that changes to the file content will update `mtime`. This implies `ctime` will be adjusted at the same time, since `mtime` is an attribute of the file.', - 'threat.indicator.file.device': 'Device that is the source of the file.', - 'threat.indicator.file.directory': - 'Directory where the file is located. It should include the drive letter, when appropriate.', - 'threat.indicator.file.drive_letter': - 'Drive letter where the file is located. This field is only relevant on Windows.\nThe value should be uppercase, and not include the colon.', - 'threat.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', - 'threat.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', - 'threat.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', - 'threat.indicator.file.elf.creation_date': - "Extracted when possible from the file's metadata. Indicates when it was built or compiled. It can also be faked by malware creators.", - 'threat.indicator.file.elf.exports': 'List of exported element names and types.', - 'threat.indicator.file.elf.go_import_hash': - 'A hash of the Go language imports in an ELF file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'threat.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', - 'threat.indicator.file.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.elf.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'threat.indicator.file.elf.header.abi_version': - 'Version of the ELF Application Binary Interface (ABI).', - 'threat.indicator.file.elf.header.class': 'Header class of the ELF file.', - 'threat.indicator.file.elf.header.data': 'Data table of the ELF header.', - 'threat.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'threat.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', - 'threat.indicator.file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'threat.indicator.file.elf.header.type': 'Header type of the ELF file.', - 'threat.indicator.file.elf.header.version': 'Version of the ELF header.', - 'threat.indicator.file.elf.import_hash': - 'A hash of the imports in an ELF file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is an ELF implementation of the Windows PE imphash.', - 'threat.indicator.file.elf.imports': 'List of imported element names and types.', - 'threat.indicator.file.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.elf.sections': - 'An array containing an object for each section of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.sections.*`.', - 'threat.indicator.file.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'threat.indicator.file.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'threat.indicator.file.elf.sections.flags': 'ELF Section List flags.', - 'threat.indicator.file.elf.sections.name': 'ELF Section List name.', - 'threat.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', - 'threat.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', - 'threat.indicator.file.elf.sections.type': 'ELF Section List type.', - 'threat.indicator.file.elf.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'threat.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'threat.indicator.file.elf.segments': - 'An array containing an object for each segment of the ELF file.\nThe keys that should be present in these objects are defined by sub-fields underneath `elf.segments.*`.', - 'threat.indicator.file.elf.segments.sections': 'ELF object segment sections.', - 'threat.indicator.file.elf.segments.type': 'ELF object segment type.', - 'threat.indicator.file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'threat.indicator.file.elf.telfhash': 'telfhash symbol hash for ELF file.', - 'threat.indicator.file.extension': - 'File extension, excluding the leading dot.\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', - 'threat.indicator.file.fork_name': - 'A fork is additional data associated with a filesystem object.\nOn Linux, a resource fork is used to store additional data with a filesystem object. A file always has at least one fork for the data portion, and additional forks may exist.\nOn NTFS, this is analogous to an Alternate Data Stream (ADS), and the default data stream for a file is just called $DATA. Zone.Identifier is commonly used by Windows to track contents downloaded from the Internet. An ADS is typically of the form: `C:\\path\\to\\filename.extension:some_fork_name`, and `some_fork_name` is the value that should populate `fork_name`. `filename.extension` should populate `file.name`, and `extension` should populate `file.extension`. The full path, `file.path`, will include the fork name.', - 'threat.indicator.file.gid': 'Primary group ID (GID) of the file.', - 'threat.indicator.file.group': 'Primary group name of the file.', - 'threat.indicator.file.hash.md5': 'MD5 hash.', - 'threat.indicator.file.hash.sha1': 'SHA1 hash.', - 'threat.indicator.file.hash.sha256': 'SHA256 hash.', - 'threat.indicator.file.hash.sha384': 'SHA384 hash.', - 'threat.indicator.file.hash.sha512': 'SHA512 hash.', - 'threat.indicator.file.hash.ssdeep': 'SSDEEP hash.', - 'threat.indicator.file.hash.tlsh': 'TLSH hash.', - 'threat.indicator.file.inode': 'Inode representing the file in the filesystem.', - 'threat.indicator.file.mime_type': - 'MIME type should identify the format of the file or stream of bytes using https://www.iana.org/assignments/media-types/media-types.xhtml[IANA official types], where possible. When more than one type is applicable, the most specific type should be used.', - 'threat.indicator.file.mode': 'Mode of the file in octal representation.', - 'threat.indicator.file.mtime': 'Last time the file content was modified.', - 'threat.indicator.file.name': 'Name of the file including the extension, without the directory.', - 'threat.indicator.file.owner': "File owner's username.", - 'threat.indicator.file.path': - 'Full path to the file, including the file name. It should include the drive letter, when appropriate.', - 'threat.indicator.file.pe.architecture': 'CPU architecture target for the file.', - 'threat.indicator.file.pe.company': - 'Internal company name of the file, provided at compile-time.', - 'threat.indicator.file.pe.description': - 'Internal description of the file, provided at compile-time.', - 'threat.indicator.file.pe.file_version': - 'Internal version of the file, provided at compile-time.', - 'threat.indicator.file.pe.go_import_hash': - 'A hash of the Go language imports in a PE file excluding standard library imports. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThe algorithm used to calculate the Go symbol hash and a reference implementation are available [here](https://github.com/elastic/toutoumomoma).', - 'threat.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', - 'threat.indicator.file.pe.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.pe.go_stripped': - 'Set to true if the file is a Go executable that has had its symbols stripped or obfuscated and false if an unobfuscated Go executable.', - 'threat.indicator.file.pe.imphash': - 'A hash of the imports in a PE file. An imphash -- or import hash -- can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nLearn more at https://www.fireeye.com/blog/threat-research/2014/01/tracking-malware-import-hashing.html.', - 'threat.indicator.file.pe.import_hash': - 'A hash of the imports in a PE file. An import hash can be used to fingerprint binaries even after recompilation or other code-level transformations have occurred, which would change more traditional hash values.\nThis is a synonym for imphash.', - 'threat.indicator.file.pe.imports': 'List of imported element names and types.', - 'threat.indicator.file.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.pe.original_file_name': - 'Internal name of the file, provided at compile-time.', - 'threat.indicator.file.pe.pehash': - 'A hash of the PE header and data from one or more PE sections. An pehash can be used to cluster files by transforming structural information about a file into a hash value.\nLearn more at https://www.usenix.org/legacy/events/leet09/tech/full_papers/wicherski/wicherski_html/index.html.', - 'threat.indicator.file.pe.product': - 'Internal product name of the file, provided at compile-time.', - 'threat.indicator.file.pe.sections': - 'An array containing an object for each section of the PE file.\nThe keys that should be present in these objects are defined by sub-fields underneath `pe.sections.*`.', - 'threat.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'threat.indicator.file.pe.sections.name': 'PE Section List name.', - 'threat.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', - 'threat.indicator.file.pe.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.indicator.file.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'threat.indicator.file.size': 'File size in bytes.\nOnly relevant when `file.type` is "file".', - 'threat.indicator.file.target_path': 'Target path for symlinks.', - 'threat.indicator.file.type': 'File type (file, dir, or symlink).', - 'threat.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', - 'threat.indicator.file.x509.alternative_names': - 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'threat.indicator.file.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.country': 'List of country \\(C) codes', - 'threat.indicator.file.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.locality': 'List of locality names (L)', - 'threat.indicator.file.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.indicator.file.x509.not_after': - 'Time at which the certificate is no longer considered valid.', - 'threat.indicator.file.x509.not_before': - 'Time at which the certificate is first considered valid.', - 'threat.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'threat.indicator.file.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.indicator.file.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.indicator.file.x509.public_key_size': 'The size of the public key space in bits.', - 'threat.indicator.file.x509.serial_number': - 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', - 'threat.indicator.file.x509.signature_algorithm': - 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'threat.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', - 'threat.indicator.file.x509.subject.country': 'List of country \\(C) code', - 'threat.indicator.file.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.indicator.file.x509.subject.locality': 'List of locality names (L)', - 'threat.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.indicator.file.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.indicator.file.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.indicator.file.x509.version_number': 'Version of x509 format.', - 'threat.indicator.first_seen': - 'The date and time when intelligence source first reported sighting this indicator.', - 'threat.indicator.geo.city_name': 'City name.', - 'threat.indicator.geo.continent_code': "Two-letter code representing continent's name.", - 'threat.indicator.geo.continent_name': 'Name of the continent.', - 'threat.indicator.geo.country_iso_code': 'Country ISO code.', - 'threat.indicator.geo.country_name': 'Country name.', - 'threat.indicator.geo.location': 'Longitude and latitude.', - 'threat.indicator.geo.name': - 'User-defined description of a location, at the level of granularity they care about.\nCould be the name of their data centers, the floor number, if this describes a local physical entity, city names.\nNot typically used in automated geolocation.', - 'threat.indicator.geo.postal_code': - 'Postal code associated with the location.\nValues appropriate for this field may also be known as a postcode or ZIP code and will vary widely from country to country.', - 'threat.indicator.geo.region_iso_code': 'Region ISO code.', - 'threat.indicator.geo.region_name': 'Region name.', - 'threat.indicator.geo.timezone': 'The time zone of the location, such as IANA time zone name.', - 'threat.indicator.ip': - 'Identifies a threat indicator as an IP address (irrespective of direction).', - 'threat.indicator.last_seen': - 'The date and time when intelligence source last reported sighting this indicator.', - 'threat.indicator.marking.tlp': 'Traffic Light Protocol sharing markings.', - 'threat.indicator.marking.tlp_version': 'Traffic Light Protocol version.', - 'threat.indicator.modified_at': - 'The date and time when intelligence source last modified information for this indicator.', - 'threat.indicator.name': 'The display name indicator in an UI friendly format', - 'threat.indicator.port': - 'Identifies a threat indicator as a port number (irrespective of direction).', - 'threat.indicator.provider': "The name of the indicator's provider.", - 'threat.indicator.reference': - 'Reference URL linking to additional information about this indicator.', - 'threat.indicator.registry.data.bytes': - 'Original bytes written with base64 encoding.\nFor Windows registry operations, such as SetValueEx and RegQueryValueEx, this corresponds to the data pointed by `lp_data`. This is optional but provides better recoverability and should be populated for REG_BINARY encoded values.', - 'threat.indicator.registry.data.strings': - 'Content when writing string types.\nPopulated as an array when writing string data to the registry. For single string registry types (REG_SZ, REG_EXPAND_SZ), this should be an array with one string. For sequences of string with REG_MULTI_SZ, this array will be variable length. For numeric data, such as REG_DWORD and REG_QWORD, this should be populated with the decimal representation (e.g `"1"`).', - 'threat.indicator.registry.data.type': 'Standard registry type for encoding contents', - 'threat.indicator.registry.hive': 'Abbreviated name for the hive.', - 'threat.indicator.registry.key': 'Hive-relative path of keys.', - 'threat.indicator.registry.path': 'Full path, including hive, key and value', - 'threat.indicator.registry.value': 'Name of the value written.', - 'threat.indicator.scanner_stats': - 'Count of AV/EDR vendors that successfully detected malicious file or URL.', - 'threat.indicator.sightings': - 'Number of times this indicator was observed conducting threat activity.', - 'threat.indicator.type': 'Type of indicator as represented by Cyber Observable in STIX 2.0.', - 'threat.indicator.url.domain': - 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', - 'threat.indicator.url.extension': - 'The field contains the file extension from the original request url, excluding the leading dot.\nThe file extension is only set if it exists, as not every url has a file extension.\nThe leading period must not be included. For example, the value must be "png", not ".png".\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', - 'threat.indicator.url.fragment': - 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', - 'threat.indicator.url.full': - 'If full URLs are important to your use case, they should be stored in `url.full`, whether this field is reconstructed or present in the event source.', - 'threat.indicator.url.original': - 'Unmodified original url as seen in the event source.\nNote that in network monitoring, the observed URL may be a full URL, whereas in access logs, the URL is often just represented as a path.\nThis field is meant to represent the URL as it was observed, complete or not.', - 'threat.indicator.url.password': 'Password of the request.', - 'threat.indicator.url.path': 'Path of the request, such as "/search".', - 'threat.indicator.url.port': 'Port of the request, such as 443.', - 'threat.indicator.url.query': - 'The query field describes the query string of the request, such as "q=elasticsearch".\nThe `?` is excluded from the query string. If a URL contains no `?`, there is no query field. If there is a `?` but no query, the query field exists with an empty string. The `exists` query can be used to differentiate between the two cases.', - 'threat.indicator.url.registered_domain': - 'The highest registered url domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'threat.indicator.url.scheme': - 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', - 'threat.indicator.url.subdomain': - 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'threat.indicator.url.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'threat.indicator.url.username': 'Username of the request.', - 'threat.indicator.x509.alternative_names': - 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'threat.indicator.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.indicator.x509.issuer.country': 'List of country \\(C) codes', - 'threat.indicator.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.indicator.x509.issuer.locality': 'List of locality names (L)', - 'threat.indicator.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.indicator.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'threat.indicator.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'threat.indicator.x509.not_before': 'Time at which the certificate is first considered valid.', - 'threat.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'threat.indicator.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.indicator.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.indicator.x509.public_key_size': 'The size of the public key space in bits.', - 'threat.indicator.x509.serial_number': - 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', - 'threat.indicator.x509.signature_algorithm': - 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'threat.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', - 'threat.indicator.x509.subject.country': 'List of country \\(C) code', - 'threat.indicator.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.indicator.x509.subject.locality': 'List of locality names (L)', - 'threat.indicator.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.indicator.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.indicator.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.indicator.x509.version_number': 'Version of x509 format.', - 'threat.software.alias': - 'The alias(es) of the software for a set of related intrusion activity that are tracked by a common name in the security community.\nWhile not required, you can use a MITRE ATT&CK\u00ae associated software description.', - 'threat.software.id': - 'The id of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software id.', - 'threat.software.name': - 'The name of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software name.', - 'threat.software.platforms': - 'The platforms of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use MITRE ATT&CK\u00ae software platform values.', - 'threat.software.reference': - 'The reference URL of the software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software reference URL.', - 'threat.software.type': - 'The type of software used by this threat to conduct behavior commonly modeled using MITRE ATT&CK\u00ae.\nWhile not required, you can use a MITRE ATT&CK\u00ae software type.', - 'threat.tactic.id': - 'The id of tactic used by this threat. You can use a MITRE ATT&CK\u00ae tactic, for example. (ex. https://attack.mitre.org/tactics/TA0002/ )', - 'threat.tactic.name': - 'Name of the type of tactic used by this threat. You can use a MITRE ATT&CK\u00ae tactic, for example. (ex. https://attack.mitre.org/tactics/TA0002/)', - 'threat.tactic.reference': - 'The reference url of tactic used by this threat. You can use a MITRE ATT&CK\u00ae tactic, for example. (ex. https://attack.mitre.org/tactics/TA0002/ )', - 'threat.technique.id': - 'The id of technique used by this threat. You can use a MITRE ATT&CK\u00ae technique, for example. (ex. https://attack.mitre.org/techniques/T1059/)', - 'threat.technique.name': - 'The name of technique used by this threat. You can use a MITRE ATT&CK\u00ae technique, for example. (ex. https://attack.mitre.org/techniques/T1059/)', - 'threat.technique.reference': - 'The reference url of technique used by this threat. You can use a MITRE ATT&CK\u00ae technique, for example. (ex. https://attack.mitre.org/techniques/T1059/)', - 'threat.technique.subtechnique.id': - 'The full id of subtechnique used by this threat. You can use a MITRE ATT&CK\u00ae subtechnique, for example. (ex. https://attack.mitre.org/techniques/T1059/001/)', - 'threat.technique.subtechnique.name': - 'The name of subtechnique used by this threat. You can use a MITRE ATT&CK\u00ae subtechnique, for example. (ex. https://attack.mitre.org/techniques/T1059/001/)', - 'threat.technique.subtechnique.reference': - 'The reference url of subtechnique used by this threat. You can use a MITRE ATT&CK\u00ae subtechnique, for example. (ex. https://attack.mitre.org/techniques/T1059/001/)', - 'tls.cipher': 'String indicating the cipher used during the current connection.', - 'tls.client.certificate': - 'PEM-encoded stand-alone certificate offered by the client. This is usually mutually-exclusive of `client.certificate_chain` since this value also exists in that list.', - 'tls.client.certificate_chain': - 'Array of PEM-encoded certificates that make up the certificate chain offered by the client. This is usually mutually-exclusive of `client.certificate` since that value should be the first certificate in the chain.', - 'tls.client.hash.md5': - 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', - 'tls.client.hash.sha1': - 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', - 'tls.client.hash.sha256': - 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the client. For consistency with other hash values, this value should be formatted as an uppercase hash.', - 'tls.client.issuer': - 'Distinguished name of subject of the issuer of the x.509 certificate presented by the client.', - 'tls.client.ja3': - 'A hash that identifies clients based on how they perform an SSL/TLS handshake.', - 'tls.client.not_after': - 'Date/Time indicating when client certificate is no longer considered valid.', - 'tls.client.not_before': - 'Date/Time indicating when client certificate is first considered valid.', - 'tls.client.server_name': - 'Also called an SNI, this tells the server which hostname to which the client is attempting to connect to. When this value is available, it should get copied to `destination.domain`.', - 'tls.client.subject': - 'Distinguished name of subject of the x.509 certificate presented by the client.', - 'tls.client.supported_ciphers': 'Array of ciphers offered by the client during the client hello.', - 'tls.client.x509.alternative_names': - 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'tls.client.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'tls.client.x509.issuer.country': 'List of country \\(C) codes', - 'tls.client.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'tls.client.x509.issuer.locality': 'List of locality names (L)', - 'tls.client.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'tls.client.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'tls.client.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.client.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'tls.client.x509.not_before': 'Time at which the certificate is first considered valid.', - 'tls.client.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'tls.client.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'tls.client.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'tls.client.x509.public_key_size': 'The size of the public key space in bits.', - 'tls.client.x509.serial_number': - 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', - 'tls.client.x509.signature_algorithm': - 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'tls.client.x509.subject.common_name': 'List of common names (CN) of subject.', - 'tls.client.x509.subject.country': 'List of country \\(C) code', - 'tls.client.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'tls.client.x509.subject.locality': 'List of locality names (L)', - 'tls.client.x509.subject.organization': 'List of organizations (O) of subject.', - 'tls.client.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'tls.client.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.client.x509.version_number': 'Version of x509 format.', - 'tls.curve': 'String indicating the curve used for the given cipher, when applicable.', - 'tls.established': - 'Boolean flag indicating if the TLS negotiation was successful and transitioned to an encrypted tunnel.', - 'tls.next_protocol': - 'String indicating the protocol being tunneled. Per the values in the IANA registry (https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids), this string should be lower case.', - 'tls.resumed': - 'Boolean flag indicating if this TLS connection was resumed from an existing TLS negotiation.', - 'tls.server.certificate': - 'PEM-encoded stand-alone certificate offered by the server. This is usually mutually-exclusive of `server.certificate_chain` since this value also exists in that list.', - 'tls.server.certificate_chain': - 'Array of PEM-encoded certificates that make up the certificate chain offered by the server. This is usually mutually-exclusive of `server.certificate` since that value should be the first certificate in the chain.', - 'tls.server.hash.md5': - 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the server. For consistency with other hash values, this value should be formatted as an uppercase hash.', - 'tls.server.hash.sha1': - 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the server. For consistency with other hash values, this value should be formatted as an uppercase hash.', - 'tls.server.hash.sha256': - 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the server. For consistency with other hash values, this value should be formatted as an uppercase hash.', - 'tls.server.issuer': 'Subject of the issuer of the x.509 certificate presented by the server.', - 'tls.server.ja3s': - 'A hash that identifies servers based on how they perform an SSL/TLS handshake.', - 'tls.server.not_after': - 'Timestamp indicating when server certificate is no longer considered valid.', - 'tls.server.not_before': - 'Timestamp indicating when server certificate is first considered valid.', - 'tls.server.subject': 'Subject of the x.509 certificate presented by the server.', - 'tls.server.x509.alternative_names': - 'List of subject alternative names (SAN). Name types vary by certificate authority and certificate type but commonly contain IP addresses, DNS names (and wildcards), and email addresses.', - 'tls.server.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'tls.server.x509.issuer.country': 'List of country \\(C) codes', - 'tls.server.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'tls.server.x509.issuer.locality': 'List of locality names (L)', - 'tls.server.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'tls.server.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'tls.server.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.server.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'tls.server.x509.not_before': 'Time at which the certificate is first considered valid.', - 'tls.server.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'tls.server.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'tls.server.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'tls.server.x509.public_key_size': 'The size of the public key space in bits.', - 'tls.server.x509.serial_number': - 'Unique serial number issued by the certificate authority. For consistency, if this value is alphanumeric, it should be formatted without colons and uppercase characters.', - 'tls.server.x509.signature_algorithm': - 'Identifier for certificate signature algorithm. We recommend using names found in Go Lang Crypto library. See https://github.com/golang/go/blob/go1.14/src/crypto/x509/x509.go#L337-L353.', - 'tls.server.x509.subject.common_name': 'List of common names (CN) of subject.', - 'tls.server.x509.subject.country': 'List of country \\(C) code', - 'tls.server.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'tls.server.x509.subject.locality': 'List of locality names (L)', - 'tls.server.x509.subject.organization': 'List of organizations (O) of subject.', - 'tls.server.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'tls.server.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.server.x509.version_number': 'Version of x509 format.', - 'tls.version': 'Numeric part of the version parsed from the original string.', - 'tls.version_protocol': 'Normalized lowercase protocol name parsed from original string.', - 'trace.id': - 'Unique identifier of the trace.\nA trace groups multiple events like transactions that belong together. For example, a user request handled by multiple inter-connected services.', - 'transaction.id': - 'Unique identifier of the transaction within the scope of its trace.\nA transaction is the highest level of work measured within a service, such as a request to a server.', - 'url.domain': - 'Domain of the url, such as "www.elastic.co".\nIn some cases a URL may refer to an IP and/or port directly, without a domain name. In this case, the IP address would go to the `domain` field.\nIf the URL contains a literal IPv6 address enclosed by `[` and `]` (IETF RFC 2732), the `[` and `]` characters should also be captured in the `domain` field.', - 'url.extension': - 'The field contains the file extension from the original request url, excluding the leading dot.\nThe file extension is only set if it exists, as not every url has a file extension.\nThe leading period must not be included. For example, the value must be "png", not ".png".\nNote that when the file name has multiple extensions (example.tar.gz), only the last one should be captured ("gz", not "tar.gz").', - 'url.fragment': - 'Portion of the url after the `#`, such as "top".\nThe `#` is not part of the fragment.', - 'url.full': - 'If full URLs are important to your use case, they should be stored in `url.full`, whether this field is reconstructed or present in the event source.', - 'url.original': - 'Unmodified original url as seen in the event source.\nNote that in network monitoring, the observed URL may be a full URL, whereas in access logs, the URL is often just represented as a path.\nThis field is meant to represent the URL as it was observed, complete or not.', - 'url.password': 'Password of the request.', - 'url.path': 'Path of the request, such as "/search".', - 'url.port': 'Port of the request, such as 443.', - 'url.query': - 'The query field describes the query string of the request, such as "q=elasticsearch".\nThe `?` is excluded from the query string. If a URL contains no `?`, there is no query field. If there is a `?` but no query, the query field exists with an empty string. The `exists` query can be used to differentiate between the two cases.', - 'url.registered_domain': - 'The highest registered url domain, stripped of the subdomain.\nFor example, the registered domain for "foo.example.com" is "example.com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last two labels will not work well for TLDs such as "co.uk".', - 'url.scheme': 'Scheme of the request, such as "https".\nNote: The `:` is not part of the scheme.', - 'url.subdomain': - 'The subdomain portion of a fully qualified domain name includes all of the names except the host name under the registered_domain. In a partially qualified domain, or if the the qualification level of the full name cannot be determined, subdomain contains all of the names below the registered domain.\nFor example the subdomain portion of "www.east.mydomain.co.uk" is "east". If the domain has multiple levels of subdomain, such as "sub2.sub1.example.com", the subdomain field should contain "sub2.sub1", with no trailing period.', - 'url.top_level_domain': - 'The effective top level domain (eTLD), also known as the domain suffix, is the last part of the domain name. For example, the top level domain for example.com is "com".\nThis value can be determined precisely with a list like the public suffix list (http://publicsuffix.org). Trying to approximate this by simply taking the last label will not work well for effective TLDs such as "co.uk".', - 'url.username': 'Username of the request.', - 'user.changes.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.changes.email': 'User email address.', - 'user.changes.full_name': "User's full name, if available.", - 'user.changes.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.changes.group.id': 'Unique identifier for the group on the system/platform.', - 'user.changes.group.name': 'Name of the group.', - 'user.changes.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'user.changes.id': 'Unique identifier of the user.', - 'user.changes.name': 'Short name or login of the user.', - 'user.changes.roles': 'Array of user roles at the time of the event.', - 'user.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.effective.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.effective.email': 'User email address.', - 'user.effective.full_name': "User's full name, if available.", - 'user.effective.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.effective.group.id': 'Unique identifier for the group on the system/platform.', - 'user.effective.group.name': 'Name of the group.', - 'user.effective.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'user.effective.id': 'Unique identifier of the user.', - 'user.effective.name': 'Short name or login of the user.', - 'user.effective.roles': 'Array of user roles at the time of the event.', - 'user.email': 'User email address.', - 'user.full_name': "User's full name, if available.", - 'user.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.group.id': 'Unique identifier for the group on the system/platform.', - 'user.group.name': 'Name of the group.', - 'user.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'user.id': 'Unique identifier of the user.', - 'user.name': 'Short name or login of the user.', - 'user.risk.calculated_level': - 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring.', - 'user.risk.calculated_score': - 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring.', - 'user.risk.calculated_score_norm': - 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring, and normalized to a range of 0 to 100.', - 'user.risk.static_level': - 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform.', - 'user.risk.static_score': - 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform.', - 'user.risk.static_score_norm': - 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform, and normalized to a range of 0 to 100.', - 'user.roles': 'Array of user roles at the time of the event.', - 'user.target.domain': - 'Name of the directory the user is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.target.email': 'User email address.', - 'user.target.full_name': "User's full name, if available.", - 'user.target.group.domain': - 'Name of the directory the group is a member of.\nFor example, an LDAP or Active Directory domain name.', - 'user.target.group.id': 'Unique identifier for the group on the system/platform.', - 'user.target.group.name': 'Name of the group.', - 'user.target.hash': - 'Unique user hash to correlate information for a user in anonymized form.\nUseful if `user.id` or `user.name` contain confidential information and cannot be used.', - 'user.target.id': 'Unique identifier of the user.', - 'user.target.name': 'Short name or login of the user.', - 'user.target.roles': 'Array of user roles at the time of the event.', - 'user_agent.device.name': 'Name of the device.', - 'user_agent.name': 'Name of the user agent.', - 'user_agent.original': 'Unparsed user_agent string.', - 'user_agent.os.family': 'OS family (such as redhat, debian, freebsd, windows).', - 'user_agent.os.full': 'Operating system name, including the version or code name.', - 'user_agent.os.kernel': 'Operating system kernel version as a raw string.', - 'user_agent.os.name': 'Operating system name, without the version.', - 'user_agent.os.platform': 'Operating system platform (such centos, ubuntu, windows).', - 'user_agent.os.type': - "Use the `os.type` field to categorize the operating system into one of the broad commercial families.\nIf the OS you're dealing with is not listed as an expected value, the field should not be populated. Please let us know by opening an issue with ECS, to propose its addition.", - 'user_agent.os.version': 'Operating system version as a raw string.', - 'user_agent.version': 'Version of the user agent.', - 'vulnerability.category': - 'The type of system or architecture that the vulnerability affects. These may be platform-specific (for example, Debian or SUSE) or general (for example, Database or Firewall). For example (https://qualysguard.qualys.com/qwebhelp/fo_portal/knowledgebase/vulnerability_categories.htm[Qualys vulnerability categories])\nThis field must be an array.', - 'vulnerability.classification': - 'The classification of the vulnerability scoring system. For example (https://www.first.org/cvss/)', - 'vulnerability.description': - 'The description of the vulnerability that provides additional context of the vulnerability. For example (https://cve.mitre.org/about/faqs.html#cve_entry_descriptions_created[Common Vulnerabilities and Exposure CVE description])', - 'vulnerability.enumeration': - 'The type of identifier used for this vulnerability. For example (https://cve.mitre.org/about/)', - 'vulnerability.id': - 'The identification (ID) is the number portion of a vulnerability entry. It includes a unique identification number for the vulnerability. For example (https://cve.mitre.org/about/faqs.html#what_is_cve_id)[Common Vulnerabilities and Exposure CVE ID]', - 'vulnerability.reference': - 'A resource that provides additional information, context, and mitigations for the identified vulnerability.', - 'vulnerability.report_id': 'The report or scan identification number.', - 'vulnerability.scanner.vendor': 'The name of the vulnerability scanner vendor.', - 'vulnerability.score.base': - 'Scores can range from 0.0 to 10.0, with 10.0 being the most severe.\nBase scores cover an assessment for exploitability metrics (attack vector, complexity, privileges, and user interaction), impact metrics (confidentiality, integrity, and availability), and scope. For example (https://www.first.org/cvss/specification-document)', - 'vulnerability.score.environmental': - 'Scores can range from 0.0 to 10.0, with 10.0 being the most severe.\nEnvironmental scores cover an assessment for any modified Base metrics, confidentiality, integrity, and availability requirements. For example (https://www.first.org/cvss/specification-document)', - 'vulnerability.score.temporal': - 'Scores can range from 0.0 to 10.0, with 10.0 being the most severe.\nTemporal scores cover an assessment for code maturity, remediation level, and confidence. For example (https://www.first.org/cvss/specification-document)', - 'vulnerability.score.version': - 'The National Vulnerability Database (NVD) provides qualitative severity rankings of "Low", "Medium", and "High" for CVSS v2.0 base score ranges in addition to the severity ratings for CVSS v3.0 as they are defined in the CVSS v3.0 specification.\nCVSS is owned and managed by FIRST.Org, Inc. (FIRST), a US-based non-profit organization, whose mission is to help computer security incident response teams across the world. For example (https://nvd.nist.gov/vuln-metrics/cvss)', - 'vulnerability.severity': - 'The severity of the vulnerability can help with metrics and internal prioritization regarding remediation. For example (https://nvd.nist.gov/vuln-metrics/cvss)', -}; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx deleted file mode 100644 index 8b6a082146606..0000000000000 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/BuildIntegrationPage.tsx +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -import React from 'react'; -import { EuiPageTemplate } from '@elastic/eui'; -import { BuildIntegrationButtons } from '../../components/BuildIntegration/BuildIntegrationButtons'; - -export const BuildIntegration = () => { - return ( - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx deleted file mode 100644 index e8a5c0b3c8adc..0000000000000 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/CategorizationPage.tsx +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; -import { useGlobalStore } from '../../stores/useGlobalStore'; - -import { EmptyPrompt } from '../../components/EmptyPrompt/EmptyPrompt'; -import { CategorizationButtons } from '../../components/Categorization/CategorizationButtons'; -import { PipelineResults } from '../../components/IntegrationResults/PipelineResults'; -import { ECS_GRAPH_PATH } from '../../../common'; - -export const CategorizationPage = () => { - const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); - - if (Object.keys(ingestPipeline).length <= 0) { - return ( - - ); - } - return ( - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx deleted file mode 100644 index 686f4067f5d62..0000000000000 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/EcsMapperPage.tsx +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { EcsFormStats } from '../../components/Ecs/EcsFormStats'; -import { EcsButtons } from '../../components/Ecs/EcsButtons'; -import { EcsForm } from '../../components/Ecs/EcsForm'; -import { EcsTable } from '../../components/Ecs/EcsTable'; - -export const EcsMapperPage = () => { - const ecsMappingTableState = useGlobalStore((state) => state.ecsMappingTableState); - return ( - - {ecsMappingTableState.length <= 0 && } - {ecsMappingTableState.length >= 1 && ( - <> - - - - - - - )} - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx deleted file mode 100644 index dc468e2135135..0000000000000 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/RelatedPage.tsx +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; - -import { EmptyPrompt } from '../../components/EmptyPrompt/EmptyPrompt'; -import { RelatedButtons } from '../../components/Related/RelatedButtons'; -import { PipelineResults } from '../../components/IntegrationResults/PipelineResults'; -import { RoutePaths } from '../../constants/routePaths'; - -export const RelatedPage = () => { - const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); - - if (Object.keys(ingestPipeline).length <= 0) { - return ( - - ); - } - return ( - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx b/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx deleted file mode 100644 index 55c6f258598fe..0000000000000 --- a/x-pack/plugins/integration_assistant/public/pages/IntegrationBuilder/ViewResultsPage.tsx +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPageTemplate, EuiSpacer } from '@elastic/eui'; -import { useGlobalStore } from '@Stores/useGlobalStore'; -import { DocResults } from '../../components/IntegrationResults/DocsResults'; -import { PipelineResults } from '../../components/IntegrationResults/PipelineResults'; -import { EmptyPrompt } from '../../components/EmptyPrompt/EmptyPrompt'; -import { ViewResultsButtons } from '../../components/ViewResults/ViewResultsButtons'; -import { RoutePaths } from '../../constants/routePaths'; - -export const ViewResultsPage = () => { - const ingestPipeline = useGlobalStore((state) => state.ingestPipeline); - const docs = useGlobalStore((state) => state.docs); - - if (Object.keys(ingestPipeline).length <= 0) { - return ( - - ); - } - return ( - - - - - - - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx b/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx deleted file mode 100644 index fb7ab85aa0f19..0000000000000 --- a/x-pack/plugins/integration_assistant/public/pages/Main/MainPage.tsx +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React from 'react'; -import { EuiPageSection } from '@elastic/eui'; - -export const MainPage = () => { - return ( - -

Mainpage Test

-
- ); -}; diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts index ca8b120cb86f1..4347c66c51536 100644 --- a/x-pack/plugins/integration_assistant/public/services.ts +++ b/x-pack/plugins/integration_assistant/public/services.ts @@ -7,7 +7,15 @@ import type { CoreStart } from '@kbn/core/public'; import type { IHttpFetchError } from '@kbn/core-http-browser'; -import type { EcsMappingApiRequest, EcsMappingApiResponse } from '../common'; +import type { + EcsMappingApiRequest, + EcsMappingApiResponse, + CategorizationApiRequest, + CategorizationApiResponse, + RelatedApiRequest, + RelatedApiResponse, + BuildIntegrationApiRequest, +} from '../common'; import { ECS_GRAPH_PATH, CATEGORIZATION_GRAPH_PATH, @@ -17,40 +25,45 @@ import { export interface Services { runEcsGraph: (req: EcsMappingApiRequest) => Promise; - runCategorizationGraph: () => Promise; - runRelatedGraph: () => Promise; - runIntegrationBuilder: () => Promise; + runCategorizationGraph: ( + req: CategorizationApiRequest + ) => Promise; + runRelatedGraph: (req: RelatedApiRequest) => Promise; + runIntegrationBuilder: (req: BuildIntegrationApiRequest) => Promise; } export function getServices(core: CoreStart): Services { return { - runEcsGraph: async (req: EcsMappingApiRequest) => { + runEcsGraph: async (req: EcsMappingApiRequest): Promise => { try { - const response = await core.http.post(ECS_GRAPH_PATH, {}); + const response = await core.http.post(ECS_GRAPH_PATH, { + body: JSON.stringify({ ...req }), + }); + console.log(response); return response; } catch (e) { return e; } }, - runCategorizationGraph: async () => { + runCategorizationGraph: async (req: CategorizationApiRequest) => { try { - const response = await core.http.fetch<{}>(CATEGORIZATION_GRAPH_PATH); + const response = await core.http.post(CATEGORIZATION_GRAPH_PATH, {}); return response; } catch (e) { return e; } }, - runRelatedGraph: async () => { + runRelatedGraph: async (req: RelatedApiRequest) => { try { - const response = await core.http.fetch<{}>(RELATED_GRAPH_PATH); + const response = await core.http.post(RELATED_GRAPH_PATH, {}); return response; } catch (e) { return e; } }, - runIntegrationBuilder: async () => { + runIntegrationBuilder: async (req: BuildIntegrationApiRequest) => { try { - const response = await core.http.fetch<{}>(INTEGRATION_BUILDER_PATH); + const response = await core.http.post(INTEGRATION_BUILDER_PATH, {}); return response; } catch (e) { return e; diff --git a/x-pack/plugins/integration_assistant/public/utils/samples.tsx b/x-pack/plugins/integration_assistant/public/utils/samples.tsx deleted file mode 100644 index f56270da84af7..0000000000000 --- a/x-pack/plugins/integration_assistant/public/utils/samples.tsx +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { htmlIdGenerator } from '@elastic/eui'; -import { ECSFIELDS } from '../constants/ecsFields'; -import { EcsMappingTableItem } from '../types'; - -function isEmptyValue(value): boolean { - return value === null || value === ''; -} - -function getExampleValueByPath(obj: object, path: string): object { - return path.split('.').reduce((acc, part) => acc && acc[part], obj); -} - -export function mergeDeeply(objects: object[], base?: object): object { - const result: object = base ?? {}; - - const merge = (target: object, source: object): object => { - Object.keys(source).forEach((key) => { - const targetValue = target[key]; - const sourceValue = source[key]; - - if (typeof sourceValue === 'object' && sourceValue !== null && !Array.isArray(sourceValue)) { - if (typeof targetValue !== 'object' || targetValue === null || isEmptyValue(targetValue)) { - target[key] = merge({}, sourceValue); - } else { - target[key] = merge(targetValue, sourceValue); - } - } else if ( - !Object.prototype.hasOwnProperty.call(target, key) || - (isEmptyValue(targetValue) && !isEmptyValue(sourceValue)) - ) { - target[key] = sourceValue; - } - }); - - return target; - }; - - objects.forEach((obj) => { - merge(result, obj); - }); - - return result; -} - -export function traverseAndMatchFields( - mapping: object, - mergedObject: object, - packageName: string, - dataStreamName: string, - path: string[] = [] -): EcsMappingTableItem[] { - const makeId = htmlIdGenerator(); - let matches: EcsMappingTableItem[] = []; - - Object.entries(mapping).forEach(([key, value]) => { - if (typeof value === 'object' && value !== null) { - matches = matches.concat( - traverseAndMatchFields(value, mergedObject, packageName, dataStreamName, path.concat(key)) - ); - } else { - const matchKey = value; - const isECS = ECSFIELDS.hasOwnProperty(matchKey); // eslint-disable-line no-prototype-builtins - const fullPath = path.concat(key).join('.'); - const exampleValue = getExampleValueByPath(mergedObject, fullPath); - const destinationField = isECS ? matchKey : `${packageName}.${dataStreamName}.${fullPath}`; - - matches.push({ - sourceField: fullPath, - destinationField, - isEcs: isECS, - description: isECS ? ECSFIELDS[matchKey] : '', - id: makeId(), - exampleValue, - }); - } - }); - - return matches; -} diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts index 1e37046f6c594..cb5f2e34d6076 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts @@ -9,1946 +9,6 @@ interface EcsFields { [key: string]: any; } -export const ECS_FULL: EcsFields = { - '@timestamp': 'Date/time when the event originated.', - 'agent.build.original': 'Extended build information for the agent.', - 'agent.ephemeral_id': 'Ephemeral identifier of this agent.', - 'agent.id': 'Unique identifier of this agent.', - 'agent.name': 'Custom name of the agent.', - 'agent.type': 'Type of the agent.', - 'agent.version': 'Version of the agent.', - 'client.address': 'Client network address.', - 'client.as.number': 'Unique number allocated to the autonomous system.', - 'client.as.organization.name': 'Organization name.', - 'client.bytes': 'Bytes sent from the client to the server.', - 'client.domain': 'The domain name of the client.', - 'client.geo.city_name': 'City name.', - 'client.geo.continent_code': 'Continent code.', - 'client.geo.continent_name': 'Name of the continent.', - 'client.geo.country_iso_code': 'Country ISO code.', - 'client.geo.country_name': 'Country name.', - 'client.geo.location': 'Longitude and latitude.', - 'client.geo.name': 'User-defined description of a location.', - 'client.geo.postal_code': 'Postal code.', - 'client.geo.region_iso_code': 'Region ISO code.', - 'client.geo.region_name': 'Region name.', - 'client.geo.timezone': 'Time zone.', - 'client.ip': 'IP address of the client.', - 'client.mac': 'MAC address of the client.', - 'client.nat.ip': 'Client NAT ip address', - 'client.nat.port': 'Client NAT port', - 'client.packets': 'Packets sent from the client to the server.', - 'client.port': 'Port of the client.', - 'client.registered_domain': 'The highest registered client domain, stripped of the subdomain.', - 'client.subdomain': 'The subdomain of the domain.', - 'client.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', - 'client.user.domain': 'Name of the directory the user is a member of.', - 'client.user.email': 'User email address.', - 'client.user.full_name': 'Users full name, if available.', - 'client.user.group.domain': 'Name of the directory the group is a member of.', - 'client.user.group.id': 'Unique identifier for the group on the system/platform.', - 'client.user.group.name': 'Name of the group.', - 'client.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', - 'client.user.id': 'Unique identifier of the user.', - 'client.user.name': 'Short name or login of the user.', - 'client.user.roles': 'Array of user roles at the time of the event.', - 'cloud.account.id': 'The cloud account or organization id.', - 'cloud.account.name': 'The cloud account name.', - 'cloud.availability_zone': - 'Availability zone in which this host, resource, or service is located.', - 'cloud.instance.id': 'Instance ID of the host machine.', - 'cloud.instance.name': 'Instance name of the host machine.', - 'cloud.machine.type': 'Machine type of the host machine.', - 'cloud.origin.account.id': 'The cloud account or organization id.', - 'cloud.origin.account.name': 'The cloud account name.', - 'cloud.origin.availability_zone': - 'Availability zone in which this host, resource, or service is located.', - 'cloud.origin.instance.id': 'Instance ID of the host machine.', - 'cloud.origin.instance.name': 'Instance name of the host machine.', - 'cloud.origin.machine.type': 'Machine type of the host machine.', - 'cloud.origin.project.id': 'The cloud project id.', - 'cloud.origin.project.name': 'The cloud project name.', - 'cloud.origin.provider': 'Name of the cloud provider.', - 'cloud.origin.region': 'Region in which this host, resource, or service is located.', - 'cloud.origin.service.name': 'The cloud service name.', - 'cloud.project.id': 'The cloud project id.', - 'cloud.project.name': 'The cloud project name.', - 'cloud.provider': 'Name of the cloud provider.', - 'cloud.region': 'Region in which this host, resource, or service is located.', - 'cloud.service.name': 'The cloud service name.', - 'cloud.target.account.id': 'The cloud account or organization id.', - 'cloud.target.account.name': 'The cloud account name.', - 'cloud.target.availability_zone': - 'Availability zone in which this host, resource, or service is located.', - 'cloud.target.instance.id': 'Instance ID of the host machine.', - 'cloud.target.instance.name': 'Instance name of the host machine.', - 'cloud.target.machine.type': 'Machine type of the host machine.', - 'cloud.target.project.id': 'The cloud project id.', - 'cloud.target.project.name': 'The cloud project name.', - 'cloud.target.provider': 'Name of the cloud provider.', - 'cloud.target.region': 'Region in which this host, resource, or service is located.', - 'cloud.target.service.name': 'The cloud service name.', - 'container.cpu.usage': 'Percent CPU used, between 0 and 1.', - 'container.disk.read.bytes': 'The number of bytes read by all disks.', - 'container.disk.write.bytes': 'The number of bytes written on all disks.', - 'container.id': 'Unique container id.', - 'container.image.hash.all': 'An array of digests of the image the container was built on.', - 'container.image.name': 'Name of the image the container was built on.', - 'container.image.tag': 'Container image tags.', - 'container.labels': 'Image labels.', - 'container.memory.usage': 'Percent memory used, between 0 and 1.', - 'container.name': 'Container name.', - 'container.network.egress.bytes': 'The number of bytes sent on all network interfaces.', - 'container.network.ingress.bytes': 'The number of bytes received on all network interfaces.', - 'container.runtime': 'Runtime managing this container.', - 'container.security_context.privileged': - 'Indicates whether the container is running in privileged mode.', - 'data_stream.dataset': - 'The field can contain anything that makes sense to signify the source of the data.', - 'data_stream.namespace': - 'A user defined namespace. Namespaces are useful to allow grouping of data.', - 'data_stream.type': 'An overarching type for the data stream.', - 'destination.address': 'Destination network address.', - 'destination.as.number': 'Unique number allocated to the autonomous system.', - 'destination.as.organization.name': 'Organization name.', - 'destination.bytes': 'Bytes sent from the destination to the source.', - 'destination.domain': 'The domain name of the destination.', - 'destination.geo.city_name': 'City name.', - 'destination.geo.continent_code': 'Continent code.', - 'destination.geo.continent_name': 'Name of the continent.', - 'destination.geo.country_iso_code': 'Country ISO code.', - 'destination.geo.country_name': 'Country name.', - 'destination.geo.location': 'Longitude and latitude.', - 'destination.geo.name': 'User-defined description of a location.', - 'destination.geo.postal_code': 'Postal code.', - 'destination.geo.region_iso_code': 'Region ISO code.', - 'destination.geo.region_name': 'Region name.', - 'destination.geo.timezone': 'Time zone.', - 'destination.ip': 'IP address of the destination.', - 'destination.mac': 'MAC address of the destination.', - 'destination.nat.ip': 'Destination NAT ip', - 'destination.nat.port': 'Destination NAT Port', - 'destination.packets': 'Packets sent from the destination to the source.', - 'destination.port': 'Port of the destination.', - 'destination.registered_domain': - 'The highest registered destination domain, stripped of the subdomain.', - 'destination.subdomain': 'The subdomain of the domain.', - 'destination.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', - 'destination.user.domain': 'Name of the directory the user is a member of.', - 'destination.user.email': 'User email address.', - 'destination.user.full_name': 'Users full name, if available.', - 'destination.user.group.domain': 'Name of the directory the group is a member of.', - 'destination.user.group.id': 'Unique identifier for the group on the system/platform.', - 'destination.user.group.name': 'Name of the group.', - 'destination.user.hash': - 'Unique user hash to correlate information for a user in anonymized form.', - 'destination.user.id': 'Unique identifier of the user.', - 'destination.user.name': 'Short name or login of the user.', - 'destination.user.roles': 'Array of user roles at the time of the event.', - 'device.id': 'The unique identifier of a device.', - 'device.manufacturer': 'The vendor name of the device manufacturer.', - 'device.model.identifier': 'The machine readable identifier of the device model.', - 'device.model.name': 'The human readable marketing name of the device model.', - 'dll.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', - 'dll.code_signature.exists': 'Boolean to capture if a signature is present.', - 'dll.code_signature.signing_id': 'The identifier used to sign the process.', - 'dll.code_signature.status': 'Additional information about the certificate status.', - 'dll.code_signature.subject_name': 'Subject name of the code signer', - 'dll.code_signature.team_id': 'The team identifier used to sign the process.', - 'dll.code_signature.timestamp': 'When the signature was generated and signed.', - 'dll.code_signature.trusted': 'Stores the trust status of the certificate chain.', - 'dll.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.', - 'dll.hash.md5': 'MD5 hash.', - 'dll.hash.sha1': 'SHA1 hash.', - 'dll.hash.sha256': 'SHA256 hash.', - 'dll.hash.sha384': 'SHA384 hash.', - 'dll.hash.sha512': 'SHA512 hash.', - 'dll.hash.ssdeep': 'SSDEEP hash.', - 'dll.hash.tlsh': 'TLSH hash.', - 'dll.name': 'Name of the library.', - 'dll.path': 'Full file path of the library.', - 'dll.pe.architecture': 'CPU architecture target for the file.', - 'dll.pe.company': 'Internal company name of the file, provided at compile-time.', - 'dll.pe.description': 'Internal description of the file, provided at compile-time.', - 'dll.pe.file_version': 'Process name.', - 'dll.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', - 'dll.pe.go_imports': 'List of imported Go language element names and types.', - 'dll.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'dll.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'dll.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'dll.pe.imphash': 'A hash of the imports in a PE file.', - 'dll.pe.import_hash': 'A hash of the imports in a PE file.', - 'dll.pe.imports': 'List of imported element names and types.', - 'dll.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'dll.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'dll.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'dll.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', - 'dll.pe.product': 'Internal product name of the file, provided at compile-time.', - 'dll.pe.sections': 'Section information of the PE file.', - 'dll.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'dll.pe.sections.name': 'PE Section List name.', - 'dll.pe.sections.physical_size': 'PE Section List physical size.', - 'dll.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'dll.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'dns.answers': 'Array of DNS answers.', - 'dns.answers.class': 'The class of DNS data contained in this resource record.', - 'dns.answers.data': 'The data describing the resource.', - 'dns.answers.name': 'The domain name to which this resource record pertains.', - 'dns.answers.ttl': - 'The time interval in seconds that this resource record may be cached before it should be discarded.', - 'dns.answers.type': 'The type of data contained in this resource record.', - 'dns.header_flags': 'Array of DNS header flags.', - 'dns.id': - 'The DNS packet identifier assigned by the program that generated the query. The identifier is copied to the response.', - 'dns.op_code': 'The DNS operation code that specifies the kind of query in the message.', - 'dns.question.class': 'The class of records being queried.', - 'dns.question.name': 'The name being queried.', - 'dns.question.registered_domain': 'The highest registered domain, stripped of the subdomain.', - 'dns.question.subdomain': 'The subdomain of the domain.', - 'dns.question.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', - 'dns.question.type': 'The type of record being queried.', - 'dns.resolved_ip': 'Array containing all IPs seen in answers.data', - 'dns.response_code': 'The DNS response code.', - 'dns.type': 'The type of DNS event captured, query or answer.', - 'ecs.version': 'ECS version this event conforms to.', - 'email.attachments': 'List of objects describing the attachments.', - 'email.attachments.file.extension': 'Attachment file extension.', - 'email.attachments.file.hash.md5': 'MD5 hash.', - 'email.attachments.file.hash.sha1': 'SHA1 hash.', - 'email.attachments.file.hash.sha256': 'SHA256 hash.', - 'email.attachments.file.hash.sha384': 'SHA384 hash.', - 'email.attachments.file.hash.sha512': 'SHA512 hash.', - 'email.attachments.file.hash.ssdeep': 'SSDEEP hash.', - 'email.attachments.file.hash.tlsh': 'TLSH hash.', - 'email.attachments.file.mime_type': 'MIME type of the attachment file.', - 'email.attachments.file.name': 'Name of the attachment file.', - 'email.attachments.file.size': 'Attachment file size.', - 'email.bcc.address': 'Email address of BCC recipient', - 'email.cc.address': 'Email address of CC recipient', - 'email.content_type': 'MIME type of the email message.', - 'email.delivery_timestamp': 'Date and time when message was delivered.', - 'email.direction': 'Direction of the message.', - 'email.from.address': 'The senders email address.', - 'email.local_id': 'Unique identifier given by the source.', - 'email.message_id': 'Value from the Message-ID header.', - 'email.origination_timestamp': 'Date and time the email was composed.', - 'email.reply_to.address': 'Address replies should be delivered to.', - 'email.sender.address': 'Address of the message sender.', - 'email.subject': 'The subject of the email message.', - 'email.to.address': 'Email address of recipient', - 'email.x_mailer': 'Application that drafted email.', - 'error.code': 'Error code describing the error.', - 'error.id': 'Unique identifier for the error.', - 'error.message': 'Error message.', - 'error.stack_trace': 'The stack trace of this error in plain text.', - 'error.type': 'The type of the error, for example the class name of the exception.', - 'event.action': 'The action captured by the event.', - 'event.agent_id_status': 'Validation status of the events agent.id field.', - 'event.category': 'Event category. The second categorization field in the hierarchy.', - 'event.code': 'Identification code for this event.', - 'event.created': 'Time when the event was first read by an agent or by your pipeline.', - 'event.dataset': 'Name of the dataset.', - 'event.duration': 'Duration of the event in nanoseconds.', - 'event.end': - '`event.end` contains the date when the event ended or when the activity was last observed.', - 'event.hash': - 'Hash (perhaps logstash fingerprint) of raw field to be able to demonstrate log integrity.', - 'event.id': 'Unique ID to describe the event.', - 'event.ingested': 'Timestamp when an event arrived in the central data store.', - 'event.kind': 'The kind of the event. The highest categorization field in the hierarchy.', - 'event.module': 'Name of the module this data is coming from.', - 'event.original': 'Raw text message of entire event.', - 'event.outcome': - 'The outcome of the event. The lowest level categorization field in the hierarchy.', - 'event.provider': 'Source of the event.', - 'event.reason': 'Reason why this event happened, according to the source', - 'event.reference': 'Event reference URL', - 'event.risk_score': - 'Risk score or priority of the event (e.g. security solutions). Use your systems original value here.', - 'event.risk_score_norm': 'Normalized risk score or priority of the event (0-100).', - 'event.sequence': 'Sequence number of the event.', - 'event.severity': 'Numeric severity of the event.', - 'event.start': - '`event.start` contains the date when the event started or when the activity was first observed.', - 'event.timezone': 'Event time zone.', - 'event.type': 'Event type. The third categorization field in the hierarchy.', - 'event.url': 'Event investigation URL', - 'faas.coldstart': 'Boolean value indicating a cold start of a function.', - 'faas.execution': 'The execution ID of the current function execution.', - 'faas.id': 'The unique identifier of a serverless function.', - 'faas.name': 'The name of a serverless function.', - 'faas.trigger.request_id': 'The ID of the trigger request , message, event, etc.', - 'faas.trigger.type': 'The trigger for the function execution.', - 'faas.version': 'The version of a serverless function.', - 'file.accessed': 'Last time the file was accessed.', - 'file.attributes': 'Array of file attributes.', - 'file.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', - 'file.code_signature.exists': 'Boolean to capture if a signature is present.', - 'file.code_signature.signing_id': 'The identifier used to sign the process.', - 'file.code_signature.status': 'Additional information about the certificate status.', - 'file.code_signature.subject_name': 'Subject name of the code signer', - 'file.code_signature.team_id': 'The team identifier used to sign the process.', - 'file.code_signature.timestamp': 'When the signature was generated and signed.', - 'file.code_signature.trusted': 'Stores the trust status of the certificate chain.', - 'file.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.', - 'file.created': 'File creation time.', - 'file.ctime': 'Last time the file attributes or metadata changed.', - 'file.device': 'Device that is the source of the file.', - 'file.directory': 'Directory where the file is located.', - 'file.drive_letter': 'Drive letter where the file is located.', - 'file.elf.architecture': 'Machine architecture of the ELF file.', - 'file.elf.byte_order': 'Byte sequence of ELF file.', - 'file.elf.cpu_type': 'CPU type of the ELF file.', - 'file.elf.creation_date': 'Build or compile date.', - 'file.elf.exports': 'List of exported element names and types.', - 'file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', - 'file.elf.go_imports': 'List of imported Go language element names and types.', - 'file.elf.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'file.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'file.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', - 'file.elf.header.class': 'Header class of the ELF file.', - 'file.elf.header.data': 'Data table of the ELF header.', - 'file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'file.elf.header.object_version': '"0x1" for original ELF files.', - 'file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'file.elf.header.type': 'Header type of the ELF file.', - 'file.elf.header.version': 'Version of the ELF header.', - 'file.elf.import_hash': 'A hash of the imports in an ELF file.', - 'file.elf.imports': 'List of imported element names and types.', - 'file.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'file.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'file.elf.sections': 'Section information of the ELF file.', - 'file.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'file.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'file.elf.sections.flags': 'ELF Section List flags.', - 'file.elf.sections.name': 'ELF Section List name.', - 'file.elf.sections.physical_offset': 'ELF Section List offset.', - 'file.elf.sections.physical_size': 'ELF Section List physical size.', - 'file.elf.sections.type': 'ELF Section List type.', - 'file.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'file.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'file.elf.segments': 'ELF object segment list.', - 'file.elf.segments.sections': 'ELF object segment sections.', - 'file.elf.segments.type': 'ELF object segment type.', - 'file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'file.elf.telfhash': 'telfhash hash for ELF file.', - 'file.extension': 'File extension, excluding the leading dot.', - 'file.fork_name': 'A fork is additional data associated with a filesystem object.', - 'file.gid': 'Primary group ID (GID) of the file.', - 'file.group': 'Primary group name of the file.', - 'file.hash.md5': 'MD5 hash.', - 'file.hash.sha1': 'SHA1 hash.', - 'file.hash.sha256': 'SHA256 hash.', - 'file.hash.sha384': 'SHA384 hash.', - 'file.hash.sha512': 'SHA512 hash.', - 'file.hash.ssdeep': 'SSDEEP hash.', - 'file.hash.tlsh': 'TLSH hash.', - 'file.inode': 'Inode representing the file in the filesystem.', - 'file.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', - 'file.macho.go_imports': 'List of imported Go language element names and types.', - 'file.macho.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.macho.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'file.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'file.macho.import_hash': 'A hash of the imports in a Mach-O file.', - 'file.macho.imports': 'List of imported element names and types.', - 'file.macho.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'file.macho.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'file.macho.sections': 'Section information of the Mach-O file.', - 'file.macho.sections.entropy': 'Shannon entropy calculation from the section.', - 'file.macho.sections.name': 'Mach-O Section List name.', - 'file.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'file.macho.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.macho.sections.virtual_size': - 'Mach-O Section List virtual size. This is always the same as `physical_size`.', - 'file.macho.symhash': 'A hash of the imports in a Mach-O file.', - 'file.mime_type': 'Media type of file, document, or arrangement of bytes.', - 'file.mode': 'Mode of the file in octal representation.', - 'file.mtime': 'Last time the file content was modified.', - 'file.name': 'Name of the file including the extension, without the directory.', - 'file.owner': 'File owners username.', - 'file.path': 'Full path to the file, including the file name.', - 'file.pe.architecture': 'CPU architecture target for the file.', - 'file.pe.company': 'Internal company name of the file, provided at compile-time.', - 'file.pe.description': 'Internal description of the file, provided at compile-time.', - 'file.pe.file_version': 'Process name.', - 'file.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', - 'file.pe.go_imports': 'List of imported Go language element names and types.', - 'file.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'file.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'file.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'file.pe.imphash': 'A hash of the imports in a PE file.', - 'file.pe.import_hash': 'A hash of the imports in a PE file.', - 'file.pe.imports': 'List of imported element names and types.', - 'file.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'file.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'file.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'file.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', - 'file.pe.product': 'Internal product name of the file, provided at compile-time.', - 'file.pe.sections': 'Section information of the PE file.', - 'file.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'file.pe.sections.name': 'PE Section List name.', - 'file.pe.sections.physical_size': 'PE Section List physical size.', - 'file.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'file.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'file.size': 'File size in bytes.', - 'file.target_path': 'Target path for symlinks.', - 'file.type': 'File type (file, dir, or symlink).', - 'file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', - 'file.x509.alternative_names': 'List of subject alternative names (SAN).', - 'file.x509.issuer.common_name': 'List of common name (CN) of issuing certificate authority.', - 'file.x509.issuer.country': 'List of country (C) codes', - 'file.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'file.x509.issuer.locality': 'List of locality names (L)', - 'file.x509.issuer.organization': 'List of organizations (O) of issuing certificate authority.', - 'file.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'file.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'file.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'file.x509.not_before': 'Time at which the certificate is first considered valid.', - 'file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'file.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific', - 'file.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific', - 'file.x509.public_key_size': 'The size of the public key space in bits.', - 'file.x509.serial_number': 'Unique serial number issued by the certificate authority.', - 'file.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', - 'file.x509.subject.common_name': 'List of common names (CN) of subject.', - 'file.x509.subject.country': 'List of country (C) code', - 'file.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity', - 'file.x509.subject.locality': 'List of locality names (L)', - 'file.x509.subject.organization': 'List of organizations (O) of subject.', - 'file.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'file.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', - 'file.x509.version_number': 'Version of x509 format.', - 'group.domain': 'Name of the directory the group is a member of.', - 'group.id': 'Unique identifier for the group on the system/platform.', - 'group.name': 'Name of the group.', - 'host.architecture': 'Operating system architecture.', - 'host.boot.id': 'Linux boot uuid taken from /proc/sys/kernel/random/boot_id', - 'host.cpu.usage': 'Percent CPU used, between 0 and 1.', - 'host.disk.read.bytes': 'The number of bytes read by all disks.', - 'host.disk.write.bytes': 'The number of bytes written on all disks.', - 'host.domain': 'Name of the directory the group is a member of.', - 'host.geo.city_name': 'City name.', - 'host.geo.continent_code': 'Continent code.', - 'host.geo.continent_name': 'Name of the continent.', - 'host.geo.country_iso_code': 'Country ISO code.', - 'host.geo.country_name': 'Country name.', - 'host.geo.location': 'Longitude and latitude.', - 'host.geo.name': 'User-defined description of a location.', - 'host.geo.postal_code': 'Postal code.', - 'host.geo.region_iso_code': 'Region ISO code.', - 'host.geo.region_name': 'Region name.', - 'host.geo.timezone': 'Time zone.', - 'host.hostname': 'Hostname of the host.', - 'host.id': 'Unique host id.', - 'host.ip': 'Host ip addresses.', - 'host.mac': 'Host MAC addresses.', - 'host.name': 'Name of the host.', - 'host.network.egress.bytes': 'The number of bytes sent on all network interfaces.', - 'host.network.egress.packets': 'The number of packets sent on all network interfaces.', - 'host.network.ingress.bytes': 'The number of bytes received on all network interfaces.', - 'host.network.ingress.packets': 'The number of packets received on all network interfaces.', - 'host.os.family': 'OS family (such as redhat, debian, freebsd, windows).', - 'host.os.full': 'Operating system name, including the version or code name.', - 'host.os.kernel': 'Operating system kernel version as a raw string.', - 'host.os.name': 'Operating system name, without the version.', - 'host.os.platform': 'Operating system platform (such centos, ubuntu, windows).', - 'host.os.type': - 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', - 'host.os.version': 'Operating system version as a raw string.', - 'host.pid_ns_ino': 'Pid namespace inode', - 'host.risk.calculated_level': - 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring', - 'host.risk.calculated_score': - 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring', - 'host.risk.calculated_score_norm': 'A normalized risk score calculated by an internal system', - 'host.risk.static_level': - 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform', - 'host.risk.static_score': - 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform', - 'host.risk.static_score_norm': 'A normalized risk score calculated by an external system.', - 'host.type': 'Type of host.', - 'host.uptime': 'Seconds the host has been up.', - 'http.request.body.bytes': 'Size in bytes of the request body.', - 'http.request.body.content': 'The full HTTP request body.', - 'http.request.bytes': 'Total size in bytes of the request (body and headers).', - 'http.request.id': 'HTTP request ID.', - 'http.request.method': 'HTTP request method.', - 'http.request.mime_type': 'Mime type of the body of the request.', - 'http.request.referrer': 'Referrer for this HTTP request.', - 'http.response.body.bytes': 'Size in bytes of the response body.', - 'http.response.body.content': 'The full HTTP response body.', - 'http.response.bytes': 'Total size in bytes of the response (body and headers).', - 'http.response.mime_type': 'Mime type of the body of the response.', - 'http.response.status_code': 'HTTP response status code.', - 'http.version': 'HTTP version.', - labels: 'Custom key/value pairs.', - 'log.file.path': 'Full path to the log file this event came from.', - 'log.level': 'Log level of the log event.', - 'log.logger': 'Name of the logger.', - 'log.origin.file.line': 'The line number of the file which originated the log event.', - 'log.origin.file.name': 'The code file which originated the log event.', - 'log.origin.function': 'The function which originated the log event.', - 'log.syslog': 'Syslog metadata', - 'log.syslog.appname': 'The device or application that originated the Syslog message.', - 'log.syslog.facility.code': 'Syslog numeric facility of the event.', - 'log.syslog.facility.name': 'Syslog text-based facility of the event.', - 'log.syslog.hostname': 'The host that originated the Syslog message.', - 'log.syslog.msgid': 'An identifier for the type of Syslog message.', - 'log.syslog.priority': 'Syslog priority of the event.', - 'log.syslog.procid': 'The process name or ID that originated the Syslog message.', - 'log.syslog.severity.code': 'Syslog numeric severity of the event.', - 'log.syslog.severity.name': 'Syslog text-based severity of the event.', - 'log.syslog.structured_data': 'Structured data expressed in RFC 5424 messages.', - 'log.syslog.version': 'Syslog protocol version.', - message: 'Log message optimized for viewing in a log viewer.', - 'network.application': 'Application level protocol name.', - 'network.bytes': 'Total bytes transferred in both directions.', - 'network.community_id': 'A hash of source and destination IPs and ports.', - 'network.direction': 'Direction of the network traffic.', - 'network.forwarded_ip': 'Host IP address when the source IP address is the proxy.', - 'network.iana_number': 'IANA Protocol Number.', - 'network.inner': 'Inner VLAN tag information', - 'network.inner.vlan.id': 'VLAN ID as reported by the observer.', - 'network.inner.vlan.name': 'Optional VLAN name as reported by the observer.', - 'network.name': 'Name given by operators to sections of their network.', - 'network.packets': 'Total packets transferred in both directions.', - 'network.protocol': 'Application protocol name.', - 'network.transport': 'Protocol Name corresponding to the field `iana_number`.', - 'network.type': 'In the OSI Model this would be the Network Layer. ipv4, ipv6, ipsec, pim, etc', - 'network.vlan.id': 'VLAN ID as reported by the observer.', - 'network.vlan.name': 'Optional VLAN name as reported by the observer.', - 'observer.egress': 'Object field for egress information', - 'observer.egress.interface.alias': 'Interface alias', - 'observer.egress.interface.id': 'Interface ID', - 'observer.egress.interface.name': 'Interface name', - 'observer.egress.vlan.id': 'VLAN ID as reported by the observer.', - 'observer.egress.vlan.name': 'Optional VLAN name as reported by the observer.', - 'observer.egress.zone': 'Observer Egress zone', - 'observer.geo.city_name': 'City name.', - 'observer.geo.continent_code': 'Continent code.', - 'observer.geo.continent_name': 'Name of the continent.', - 'observer.geo.country_iso_code': 'Country ISO code.', - 'observer.geo.country_name': 'Country name.', - 'observer.geo.location': 'Longitude and latitude.', - 'observer.geo.name': 'User-defined description of a location.', - 'observer.geo.postal_code': 'Postal code.', - 'observer.geo.region_iso_code': 'Region ISO code.', - 'observer.geo.region_name': 'Region name.', - 'observer.geo.timezone': 'Time zone.', - 'observer.hostname': 'Hostname of the observer.', - 'observer.ingress': 'Object field for ingress information', - 'observer.ingress.interface.alias': 'Interface alias', - 'observer.ingress.interface.id': 'Interface ID', - 'observer.ingress.interface.name': 'Interface name', - 'observer.ingress.vlan.id': 'VLAN ID as reported by the observer.', - 'observer.ingress.vlan.name': 'Optional VLAN name as reported by the observer.', - 'observer.ingress.zone': 'Observer ingress zone', - 'observer.ip': 'IP addresses of the observer.', - 'observer.mac': 'MAC addresses of the observer.', - 'observer.name': 'Custom name of the observer.', - 'observer.os.family': 'OS family (such as redhat, debian, freebsd, windows).', - 'observer.os.full': 'Operating system name, including the version or code name.', - 'observer.os.kernel': 'Operating system kernel version as a raw string.', - 'observer.os.name': 'Operating system name, without the version.', - 'observer.os.platform': 'Operating system platform (such centos, ubuntu, windows).', - 'observer.os.type': - 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', - 'observer.os.version': 'Operating system version as a raw string.', - 'observer.product': 'The product name of the observer.', - 'observer.serial_number': 'Observer serial number.', - 'observer.type': 'The type of the observer the data is coming from.', - 'observer.vendor': 'Vendor name of the observer.', - 'observer.version': 'Observer version.', - 'orchestrator.api_version': 'API version being used to carry out the action', - 'orchestrator.cluster.id': 'Unique ID of the cluster.', - 'orchestrator.cluster.name': 'Name of the cluster.', - 'orchestrator.cluster.url': 'URL of the API used to manage the cluster.', - 'orchestrator.cluster.version': 'The version of the cluster.', - 'orchestrator.namespace': 'Namespace in which the action is taking place.', - 'orchestrator.organization': - 'Organization affected by the event (for multi-tenant orchestrator setups).', - 'orchestrator.resource.annotation': 'The list of annotations added to the resource.', - 'orchestrator.resource.id': 'Unique ID of the resource being acted upon.', - 'orchestrator.resource.ip': - 'IP address assigned to the resource associated with the event being observed.', - 'orchestrator.resource.label': 'The list of labels added to the resource.', - 'orchestrator.resource.name': 'Name of the resource being acted upon.', - 'orchestrator.resource.parent.type': - 'Type or kind of the parent resource associated with the event being observed.', - 'orchestrator.resource.type': 'Type of resource being acted upon.', - 'orchestrator.type': 'Orchestrator cluster type (e.g. kubernetes, nomad or cloudfoundry).', - 'organization.id': 'Unique identifier for the organization.', - 'organization.name': 'Organization name.', - 'package.architecture': 'Package architecture.', - 'package.build_version': 'Build version information', - 'package.checksum': 'Checksum of the installed package for verification.', - 'package.description': 'Description of the package.', - 'package.install_scope': 'Indicating how the package was installed, e.g. user-local, global.', - 'package.installed': 'Time when package was installed.', - 'package.license': 'Package license', - 'package.name': 'Package name', - 'package.path': 'Path where the package is installed.', - 'package.reference': 'Package home page or reference URL', - 'package.size': 'Package size in bytes.', - 'package.type': 'Package type', - 'package.version': 'Package version', - 'process.args': 'Array of process arguments.', - 'process.args_count': 'Length of the process.args array.', - 'process.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', - 'process.code_signature.exists': 'Boolean to capture if a signature is present.', - 'process.code_signature.signing_id': 'The identifier used to sign the process.', - 'process.code_signature.status': 'Additional information about the certificate status.', - 'process.code_signature.subject_name': 'Subject name of the code signer', - 'process.code_signature.team_id': 'The team identifier used to sign the process.', - 'process.code_signature.timestamp': 'When the signature was generated and signed.', - 'process.code_signature.trusted': 'Stores the trust status of the certificate chain.', - 'process.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.', - 'process.command_line': 'Full command line that started the process.', - 'process.elf.architecture': 'Machine architecture of the ELF file.', - 'process.elf.byte_order': 'Byte sequence of ELF file.', - 'process.elf.cpu_type': 'CPU type of the ELF file.', - 'process.elf.creation_date': 'Build or compile date.', - 'process.elf.exports': 'List of exported element names and types.', - 'process.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', - 'process.elf.go_imports': 'List of imported Go language element names and types.', - 'process.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'process.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', - 'process.elf.header.class': 'Header class of the ELF file.', - 'process.elf.header.data': 'Data table of the ELF header.', - 'process.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'process.elf.header.object_version': '"0x1" for original ELF files.', - 'process.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'process.elf.header.type': 'Header type of the ELF file.', - 'process.elf.header.version': 'Version of the ELF header.', - 'process.elf.import_hash': 'A hash of the imports in an ELF file.', - 'process.elf.imports': 'List of imported element names and types.', - 'process.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.elf.sections': 'Section information of the ELF file.', - 'process.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'process.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.elf.sections.flags': 'ELF Section List flags.', - 'process.elf.sections.name': 'ELF Section List name.', - 'process.elf.sections.physical_offset': 'ELF Section List offset.', - 'process.elf.sections.physical_size': 'ELF Section List physical size.', - 'process.elf.sections.type': 'ELF Section List type.', - 'process.elf.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'process.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'process.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'process.elf.segments': 'ELF object segment list.', - 'process.elf.segments.sections': 'ELF object segment sections.', - 'process.elf.segments.type': 'ELF object segment type.', - 'process.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'process.elf.telfhash': 'telfhash hash for ELF file.', - 'process.end': 'The time the process ended.', - 'process.entity_id': 'Unique identifier for the process.', - 'process.entry_leader.args': 'Array of process arguments.', - 'process.entry_leader.args_count': 'Length of the process.args array.', - 'process.entry_leader.attested_groups.name': 'Name of the group.', - 'process.entry_leader.attested_user.id': 'Unique identifier of the user.', - 'process.entry_leader.attested_user.name': 'Short name or login of the user.', - 'process.entry_leader.command_line': 'Full command line that started the process.', - 'process.entry_leader.entity_id': 'Unique identifier for the process.', - 'process.entry_leader.entry_meta.source.ip': 'IP address of the source.', - 'process.entry_leader.entry_meta.type': 'The entry type for the entry session leader.', - 'process.entry_leader.executable': 'Absolute path to the process executable.', - 'process.entry_leader.group.id': 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.group.name': 'Name of the group.', - 'process.entry_leader.interactive': 'Whether the process is connected to an interactive shell.', - 'process.entry_leader.name': 'Process name.', - 'process.entry_leader.parent.entity_id': 'Unique identifier for the process.', - 'process.entry_leader.parent.pid': 'Process id.', - 'process.entry_leader.parent.session_leader.entity_id': 'Unique identifier for the process.', - 'process.entry_leader.parent.session_leader.pid': 'Process id.', - 'process.entry_leader.parent.session_leader.start': 'The time the process started.', - 'process.entry_leader.parent.session_leader.vpid': 'Virtual process id.', - 'process.entry_leader.parent.start': 'The time the process started.', - 'process.entry_leader.parent.vpid': 'Virtual process id.', - 'process.entry_leader.pid': 'Process id.', - 'process.entry_leader.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.real_group.name': 'Name of the group.', - 'process.entry_leader.real_user.id': 'Unique identifier of the user.', - 'process.entry_leader.real_user.name': 'Short name or login of the user.', - 'process.entry_leader.same_as_process': - 'This boolean is used to identify if a leader process is the same as the top level process.', - 'process.entry_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.saved_group.name': 'Name of the group.', - 'process.entry_leader.saved_user.id': 'Unique identifier of the user.', - 'process.entry_leader.saved_user.name': 'Short name or login of the user.', - 'process.entry_leader.start': 'The time the process started.', - 'process.entry_leader.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.entry_leader.supplemental_groups.name': 'Name of the group.', - 'process.entry_leader.tty': 'Information about the controlling TTY device.', - 'process.entry_leader.tty.char_device.major': 'The TTY character devices major number.', - 'process.entry_leader.tty.char_device.minor': 'The TTY character devices minor number.', - 'process.entry_leader.user.id': 'Unique identifier of the user.', - 'process.entry_leader.user.name': 'Short name or login of the user.', - 'process.entry_leader.vpid': 'Virtual process id.', - 'process.entry_leader.working_directory': 'The working directory of the process.', - 'process.env_vars': 'Array of environment variable bindings.', - 'process.executable': 'Absolute path to the process executable.', - 'process.exit_code': 'The exit code of the process.', - 'process.group_leader.args': 'Array of process arguments.', - 'process.group_leader.args_count': 'Length of the process.args array.', - 'process.group_leader.command_line': 'Full command line that started the process.', - 'process.group_leader.entity_id': 'Unique identifier for the process.', - 'process.group_leader.executable': 'Absolute path to the process executable.', - 'process.group_leader.group.id': 'Unique identifier for the group on the system/platform.', - 'process.group_leader.group.name': 'Name of the group.', - 'process.group_leader.interactive': 'Whether the process is connected to an interactive shell.', - 'process.group_leader.name': 'Process name.', - 'process.group_leader.pid': 'Process id.', - 'process.group_leader.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.group_leader.real_group.name': 'Name of the group.', - 'process.group_leader.real_user.id': 'Unique identifier of the user.', - 'process.group_leader.real_user.name': 'Short name or login of the user.', - 'process.group_leader.same_as_process': - 'This boolean is used to identify if a leader process is the same as the top level process.', - 'process.group_leader.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.group_leader.saved_group.name': 'Name of the group.', - 'process.group_leader.saved_user.id': 'Unique identifier of the user.', - 'process.group_leader.saved_user.name': 'Short name or login of the user.', - 'process.group_leader.start': 'The time the process started.', - 'process.group_leader.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.group_leader.supplemental_groups.name': 'Name of the group.', - 'process.group_leader.tty': 'Information about the controlling TTY device.', - 'process.group_leader.tty.char_device.major': 'The TTY character devices major number.', - 'process.group_leader.tty.char_device.minor': 'The TTY character devices minor number.', - 'process.group_leader.user.id': 'Unique identifier of the user.', - 'process.group_leader.user.name': 'Short name or login of the user.', - 'process.group_leader.vpid': 'Virtual process id.', - 'process.group_leader.working_directory': 'The working directory of the process.', - 'process.hash.md5': 'MD5 hash.', - 'process.hash.sha1': 'SHA1 hash.', - 'process.hash.sha256': 'SHA256 hash.', - 'process.hash.sha384': 'SHA384 hash.', - 'process.hash.sha512': 'SHA512 hash.', - 'process.hash.ssdeep': 'SSDEEP hash.', - 'process.hash.tlsh': 'TLSH hash.', - 'process.interactive': 'Whether the process is connected to an interactive shell.', - 'process.io': 'A chunk of input or output (IO) from a single process.', - 'process.io.bytes_skipped': - 'An array of byte offsets and lengths denoting where IO data has been skipped.', - 'process.io.bytes_skipped.length': 'The length of bytes skipped.', - 'process.io.bytes_skipped.offset': - 'The byte offset into this events io.text (or io.bytes in the future) where length bytes were skipped.', - 'process.io.max_bytes_per_process_exceeded': - 'If true, the process producing the output has exceeded the max_kilobytes_per_process configuration setting.', - 'process.io.text': 'A chunk of output or input sanitized to UTF-8.', - 'process.io.total_bytes_captured': 'The total number of bytes captured in this event.', - 'process.io.total_bytes_skipped': - 'The total number of bytes that were not captured due to implementation restrictions such as buffer size limits.', - 'process.io.type': 'The type of object on which the IO action (read or write) was taken.', - 'process.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', - 'process.macho.go_imports': 'List of imported Go language element names and types.', - 'process.macho.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.macho.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'process.macho.import_hash': 'A hash of the imports in a Mach-O file.', - 'process.macho.imports': 'List of imported element names and types.', - 'process.macho.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.macho.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.macho.sections': 'Section information of the Mach-O file.', - 'process.macho.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.macho.sections.name': 'Mach-O Section List name.', - 'process.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'process.macho.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.macho.sections.virtual_size': - 'Mach-O Section List virtual size. This is always the same as `physical_size`.', - 'process.macho.symhash': 'A hash of the imports in a Mach-O file.', - 'process.name': 'Process name.', - 'process.parent.args': 'Array of process arguments.', - 'process.parent.args_count': 'Length of the process.args array.', - 'process.parent.code_signature.digest_algorithm': 'Hashing algorithm used to sign the process.', - 'process.parent.code_signature.exists': 'Boolean to capture if a signature is present.', - 'process.parent.code_signature.signing_id': 'The identifier used to sign the process.', - 'process.parent.code_signature.status': 'Additional information about the certificate status.', - 'process.parent.code_signature.subject_name': 'Subject name of the code signer', - 'process.parent.code_signature.team_id': 'The team identifier used to sign the process.', - 'process.parent.code_signature.timestamp': 'When the signature was generated and signed.', - 'process.parent.code_signature.trusted': 'Stores the trust status of the certificate chain.', - 'process.parent.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.', - 'process.parent.command_line': 'Full command line that started the process.', - 'process.parent.elf.architecture': 'Machine architecture of the ELF file.', - 'process.parent.elf.byte_order': 'Byte sequence of ELF file.', - 'process.parent.elf.cpu_type': 'CPU type of the ELF file.', - 'process.parent.elf.creation_date': 'Build or compile date.', - 'process.parent.elf.exports': 'List of exported element names and types.', - 'process.parent.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', - 'process.parent.elf.go_imports': 'List of imported Go language element names and types.', - 'process.parent.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.parent.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.parent.elf.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'process.parent.elf.header.abi_version': 'Version of the ELF Application Binary Interface (ABI).', - 'process.parent.elf.header.class': 'Header class of the ELF file.', - 'process.parent.elf.header.data': 'Data table of the ELF header.', - 'process.parent.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'process.parent.elf.header.object_version': '"0x1" for original ELF files.', - 'process.parent.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'process.parent.elf.header.type': 'Header type of the ELF file.', - 'process.parent.elf.header.version': 'Version of the ELF header.', - 'process.parent.elf.import_hash': 'A hash of the imports in an ELF file.', - 'process.parent.elf.imports': 'List of imported element names and types.', - 'process.parent.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.elf.sections': 'Section information of the ELF file.', - 'process.parent.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'process.parent.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.parent.elf.sections.flags': 'ELF Section List flags.', - 'process.parent.elf.sections.name': 'ELF Section List name.', - 'process.parent.elf.sections.physical_offset': 'ELF Section List offset.', - 'process.parent.elf.sections.physical_size': 'ELF Section List physical size.', - 'process.parent.elf.sections.type': 'ELF Section List type.', - 'process.parent.elf.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.parent.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'process.parent.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'process.parent.elf.segments': 'ELF object segment list.', - 'process.parent.elf.segments.sections': 'ELF object segment sections.', - 'process.parent.elf.segments.type': 'ELF object segment type.', - 'process.parent.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'process.parent.elf.telfhash': 'telfhash hash for ELF file.', - 'process.parent.end': 'The time the process ended.', - 'process.parent.entity_id': 'Unique identifier for the process.', - 'process.parent.executable': 'Absolute path to the process executable.', - 'process.parent.exit_code': 'The exit code of the process.', - 'process.parent.group.id': 'Unique identifier for the group on the system/platform.', - 'process.parent.group.name': 'Name of the group.', - 'process.parent.group_leader.entity_id': 'Unique identifier for the process.', - 'process.parent.group_leader.pid': 'Process id.', - 'process.parent.group_leader.start': 'The time the process started.', - 'process.parent.group_leader.vpid': 'Virtual process id.', - 'process.parent.hash.md5': 'MD5 hash.', - 'process.parent.hash.sha1': 'SHA1 hash.', - 'process.parent.hash.sha256': 'SHA256 hash.', - 'process.parent.hash.sha384': 'SHA384 hash.', - 'process.parent.hash.sha512': 'SHA512 hash.', - 'process.parent.hash.ssdeep': 'SSDEEP hash.', - 'process.parent.hash.tlsh': 'TLSH hash.', - 'process.parent.interactive': 'Whether the process is connected to an interactive shell.', - 'process.parent.macho.go_import_hash': 'A hash of the Go language imports in a Mach-O file.', - 'process.parent.macho.go_imports': 'List of imported Go language element names and types.', - 'process.parent.macho.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.parent.macho.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.parent.macho.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'process.parent.macho.import_hash': 'A hash of the imports in a Mach-O file.', - 'process.parent.macho.imports': 'List of imported element names and types.', - 'process.parent.macho.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.macho.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.macho.sections': 'Section information of the Mach-O file.', - 'process.parent.macho.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.parent.macho.sections.name': 'Mach-O Section List name.', - 'process.parent.macho.sections.physical_size': 'Mach-O Section List physical size.', - 'process.parent.macho.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.parent.macho.sections.virtual_size': - 'Mach-O Section List virtual size. This is always the same as `physical_size`.', - 'process.parent.macho.symhash': 'A hash of the imports in a Mach-O file.', - 'process.parent.name': 'Process name.', - 'process.parent.pe.architecture': 'CPU architecture target for the file.', - 'process.parent.pe.company': 'Internal company name of the file, provided at compile-time.', - 'process.parent.pe.description': 'Internal description of the file, provided at compile-time.', - 'process.parent.pe.file_version': 'Process name.', - 'process.parent.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', - 'process.parent.pe.go_imports': 'List of imported Go language element names and types.', - 'process.parent.pe.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'process.parent.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.parent.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'process.parent.pe.imphash': 'A hash of the imports in a PE file.', - 'process.parent.pe.import_hash': 'A hash of the imports in a PE file.', - 'process.parent.pe.imports': 'List of imported element names and types.', - 'process.parent.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.parent.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'process.parent.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', - 'process.parent.pe.product': 'Internal product name of the file, provided at compile-time.', - 'process.parent.pe.sections': 'Section information of the PE file.', - 'process.parent.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.parent.pe.sections.name': 'PE Section List name.', - 'process.parent.pe.sections.physical_size': 'PE Section List physical size.', - 'process.parent.pe.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'process.parent.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'process.parent.pgid': 'Deprecated identifier of the group of processes the process belongs to.', - 'process.parent.pid': 'Process id.', - 'process.parent.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.parent.real_group.name': 'Name of the group.', - 'process.parent.real_user.id': 'Unique identifier of the user.', - 'process.parent.real_user.name': 'Short name or login of the user.', - 'process.parent.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.parent.saved_group.name': 'Name of the group.', - 'process.parent.saved_user.id': 'Unique identifier of the user.', - 'process.parent.saved_user.name': 'Short name or login of the user.', - 'process.parent.start': 'The time the process started.', - 'process.parent.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.parent.supplemental_groups.name': 'Name of the group.', - 'process.parent.thread.capabilities.effective': - 'Array of capabilities used for permission checks.', - 'process.parent.thread.capabilities.permitted': 'Array of capabilities a thread could assume.', - 'process.parent.thread.id': 'Thread ID.', - 'process.parent.thread.name': 'Thread name.', - 'process.parent.title': 'Process title.', - 'process.parent.tty': 'Information about the controlling TTY device.', - 'process.parent.tty.char_device.major': 'The TTY character devices major number.', - 'process.parent.tty.char_device.minor': 'The TTY character devices minor number.', - 'process.parent.uptime': 'Seconds the process has been up.', - 'process.parent.user.id': 'Unique identifier of the user.', - 'process.parent.user.name': 'Short name or login of the user.', - 'process.parent.vpid': 'Virtual process id.', - 'process.parent.working_directory': 'The working directory of the process.', - 'process.pe.architecture': 'CPU architecture target for the file.', - 'process.pe.company': 'Internal company name of the file, provided at compile-time.', - 'process.pe.description': 'Internal description of the file, provided at compile-time.', - 'process.pe.file_version': 'Process name.', - 'process.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', - 'process.pe.go_imports': 'List of imported Go language element names and types.', - 'process.pe.go_imports_names_entropy': 'Shannon entropy calculation from the list of Go imports.', - 'process.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'process.pe.go_stripped': 'Whether the file is a stripped or obfuscated Go executable.', - 'process.pe.imphash': 'A hash of the imports in a PE file.', - 'process.pe.import_hash': 'A hash of the imports in a PE file.', - 'process.pe.imports': 'List of imported element names and types.', - 'process.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'process.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'process.pe.original_file_name': 'Internal name of the file, provided at compile-time.', - 'process.pe.pehash': 'A hash of the PE header and data from one or more PE sections.', - 'process.pe.product': 'Internal product name of the file, provided at compile-time.', - 'process.pe.sections': 'Section information of the PE file.', - 'process.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'process.pe.sections.name': 'PE Section List name.', - 'process.pe.sections.physical_size': 'PE Section List physical size.', - 'process.pe.sections.var_entropy': 'Variance for Shannon entropy calculation from the section.', - 'process.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'process.pgid': 'Deprecated identifier of the group of processes the process belongs to.', - 'process.pid': 'Process id.', - 'process.previous.args': 'Array of process arguments.', - 'process.previous.args_count': 'Length of the process.args array.', - 'process.previous.executable': 'Absolute path to the process executable.', - 'process.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.real_group.name': 'Name of the group.', - 'process.real_user.id': 'Unique identifier of the user.', - 'process.real_user.name': 'Short name or login of the user.', - 'process.saved_group.id': 'Unique identifier for the group on the system/platform.', - 'process.saved_group.name': 'Name of the group.', - 'process.saved_user.id': 'Unique identifier of the user.', - 'process.saved_user.name': 'Short name or login of the user.', - 'process.session_leader.args': 'Array of process arguments.', - 'process.session_leader.args_count': 'Length of the process.args array.', - 'process.session_leader.command_line': 'Full command line that started the process.', - 'process.session_leader.entity_id': 'Unique identifier for the process.', - 'process.session_leader.executable': 'Absolute path to the process executable.', - 'process.session_leader.group.id': 'Unique identifier for the group on the system/platform.', - 'process.session_leader.group.name': 'Name of the group.', - 'process.session_leader.interactive': 'Whether the process is connected to an interactive shell.', - 'process.session_leader.name': 'Process name.', - 'process.session_leader.parent.entity_id': 'Unique identifier for the process.', - 'process.session_leader.parent.pid': 'Process id.', - 'process.session_leader.parent.session_leader.entity_id': 'Unique identifier for the process.', - 'process.session_leader.parent.session_leader.pid': 'Process id.', - 'process.session_leader.parent.session_leader.start': 'The time the process started.', - 'process.session_leader.parent.session_leader.vpid': 'Virtual process id.', - 'process.session_leader.parent.start': 'The time the process started.', - 'process.session_leader.parent.vpid': 'Virtual process id.', - 'process.session_leader.pid': 'Process id.', - 'process.session_leader.real_group.id': 'Unique identifier for the group on the system/platform.', - 'process.session_leader.real_group.name': 'Name of the group.', - 'process.session_leader.real_user.id': 'Unique identifier of the user.', - 'process.session_leader.real_user.name': 'Short name or login of the user.', - 'process.session_leader.same_as_process': - 'This boolean is used to identify if a leader process is the same as the top level process.', - 'process.session_leader.saved_group.id': - 'Unique identifier for the group on the system/platform.', - 'process.session_leader.saved_group.name': 'Name of the group.', - 'process.session_leader.saved_user.id': 'Unique identifier of the user.', - 'process.session_leader.saved_user.name': 'Short name or login of the user.', - 'process.session_leader.start': 'The time the process started.', - 'process.session_leader.supplemental_groups.id': - 'Unique identifier for the group on the system/platform.', - 'process.session_leader.supplemental_groups.name': 'Name of the group.', - 'process.session_leader.tty': 'Information about the controlling TTY device.', - 'process.session_leader.tty.char_device.major': 'The TTY character devices major number.', - 'process.session_leader.tty.char_device.minor': 'The TTY character devices minor number.', - 'process.session_leader.user.id': 'Unique identifier of the user.', - 'process.session_leader.user.name': 'Short name or login of the user.', - 'process.session_leader.vpid': 'Virtual process id.', - 'process.session_leader.working_directory': 'The working directory of the process.', - 'process.start': 'The time the process started.', - 'process.supplemental_groups.id': 'Unique identifier for the group on the system/platform.', - 'process.supplemental_groups.name': 'Name of the group.', - 'process.thread.capabilities.effective': 'Array of capabilities used for permission checks.', - 'process.thread.capabilities.permitted': 'Array of capabilities a thread could assume.', - 'process.thread.id': 'Thread ID.', - 'process.thread.name': 'Thread name.', - 'process.title': 'Process title.', - 'process.tty': 'Information about the controlling TTY device.', - 'process.tty.char_device.major': 'The TTY character devices major number.', - 'process.tty.char_device.minor': 'The TTY character devices minor number.', - 'process.tty.columns': 'The number of character columns per line. e.g terminal width', - 'process.tty.rows': 'The number of character rows in the terminal. e.g terminal height', - 'process.uptime': 'Seconds the process has been up.', - 'process.user.id': 'Unique identifier of the user.', - 'process.user.name': 'Short name or login of the user.', - 'process.vpid': 'Virtual process id.', - 'process.working_directory': 'The working directory of the process.', - 'registry.data.bytes': 'Original bytes written with base64 encoding.', - 'registry.data.strings': 'List of strings representing what was written to the registry.', - 'registry.data.type': 'Standard registry type for encoding contents', - 'registry.hive': 'Abbreviated name for the hive.', - 'registry.key': 'Hive-relative path of keys.', - 'registry.path': 'Full path, including hive, key and value', - 'registry.value': 'Name of the value written.', - 'related.hash': 'All the hashes seen on your event.', - 'related.hosts': 'All the host identifiers seen on your event.', - 'related.ip': 'All of the IPs seen on your event.', - 'related.user': 'All the user names or other user identifiers seen on the event.', - 'rule.author': 'Rule author', - 'rule.category': 'Rule category', - 'rule.description': 'Rule description', - 'rule.id': 'Rule ID', - 'rule.license': 'Rule license', - 'rule.name': 'Rule name', - 'rule.reference': 'Rule reference URL', - 'rule.ruleset': 'Rule ruleset', - 'rule.uuid': 'Rule UUID', - 'rule.version': 'Rule version', - 'server.address': 'Server network address.', - 'server.as.number': 'Unique number allocated to the autonomous system.', - 'server.as.organization.name': 'Organization name.', - 'server.bytes': 'Bytes sent from the server to the client.', - 'server.domain': 'The domain name of the server.', - 'server.geo.city_name': 'City name.', - 'server.geo.continent_code': 'Continent code.', - 'server.geo.continent_name': 'Name of the continent.', - 'server.geo.country_iso_code': 'Country ISO code.', - 'server.geo.country_name': 'Country name.', - 'server.geo.location': 'Longitude and latitude.', - 'server.geo.name': 'User-defined description of a location.', - 'server.geo.postal_code': 'Postal code.', - 'server.geo.region_iso_code': 'Region ISO code.', - 'server.geo.region_name': 'Region name.', - 'server.geo.timezone': 'Time zone.', - 'server.ip': 'IP address of the server.', - 'server.mac': 'MAC address of the server.', - 'server.nat.ip': 'Server NAT ip', - 'server.nat.port': 'Server NAT port', - 'server.packets': 'Packets sent from the server to the client.', - 'server.port': 'Port of the server.', - 'server.registered_domain': 'The highest registered server domain, stripped of the subdomain.', - 'server.subdomain': 'The subdomain of the domain.', - 'server.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', - 'server.user.domain': 'Name of the directory the user is a member of.', - 'server.user.email': 'User email address.', - 'server.user.full_name': 'Users full name, if available.', - 'server.user.group.domain': 'Name of the directory the group is a member of.', - 'server.user.group.id': 'Unique identifier for the group on the system/platform.', - 'server.user.group.name': 'Name of the group.', - 'server.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', - 'server.user.id': 'Unique identifier of the user.', - 'server.user.name': 'Short name or login of the user.', - 'server.user.roles': 'Array of user roles at the time of the event.', - 'service.address': 'Address of this service.', - 'service.environment': 'Environment of the service.', - 'service.ephemeral_id': 'Ephemeral identifier of this service.', - 'service.id': 'Unique identifier of the running service.', - 'service.name': 'Name of the service.', - 'service.node.name': 'Name of the service node.', - 'service.node.role': 'Deprecated role (singular) of the service node.', - 'service.node.roles': 'Roles of the service node.', - 'service.origin.address': 'Address of this service.', - 'service.origin.environment': 'Environment of the service.', - 'service.origin.ephemeral_id': 'Ephemeral identifier of this service.', - 'service.origin.id': 'Unique identifier of the running service.', - 'service.origin.name': 'Name of the service.', - 'service.origin.node.name': 'Name of the service node.', - 'service.origin.node.role': 'Deprecated role (singular) of the service node.', - 'service.origin.node.roles': 'Roles of the service node.', - 'service.origin.state': 'Current state of the service.', - 'service.origin.type': 'The type of the service.', - 'service.origin.version': 'Version of the service.', - 'service.state': 'Current state of the service.', - 'service.target.address': 'Address of this service.', - 'service.target.environment': 'Environment of the service.', - 'service.target.ephemeral_id': 'Ephemeral identifier of this service.', - 'service.target.id': 'Unique identifier of the running service.', - 'service.target.name': 'Name of the service.', - 'service.target.node.name': 'Name of the service node.', - 'service.target.node.role': 'Deprecated role (singular) of the service node.', - 'service.target.node.roles': 'Roles of the service node.', - 'service.target.state': 'Current state of the service.', - 'service.target.type': 'The type of the service.', - 'service.target.version': 'Version of the service.', - 'service.type': 'The type of the service.', - 'service.version': 'Version of the service.', - 'source.address': 'Source network address.', - 'source.as.number': 'Unique number allocated to the autonomous system.', - 'source.as.organization.name': 'Organization name.', - 'source.bytes': 'Bytes sent from the source to the destination.', - 'source.domain': 'The domain name of the source.', - 'source.geo.city_name': 'City name.', - 'source.geo.continent_code': 'Continent code.', - 'source.geo.continent_name': 'Name of the continent.', - 'source.geo.country_iso_code': 'Country ISO code.', - 'source.geo.country_name': 'Country name.', - 'source.geo.location': 'Longitude and latitude.', - 'source.geo.name': 'User-defined description of a location.', - 'source.geo.postal_code': 'Postal code.', - 'source.geo.region_iso_code': 'Region ISO code.', - 'source.geo.region_name': 'Region name.', - 'source.geo.timezone': 'Time zone.', - 'source.ip': 'IP address of the source.', - 'source.mac': 'MAC address of the source.', - 'source.nat.ip': 'Source NAT ip', - 'source.nat.port': 'Source NAT port', - 'source.packets': 'Packets sent from the source to the destination.', - 'source.port': 'Port of the source.', - 'source.registered_domain': 'The highest registered source domain, stripped of the subdomain.', - 'source.subdomain': 'The subdomain of the domain.', - 'source.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', - 'source.user.domain': 'Name of the directory the user is a member of.', - 'source.user.email': 'User email address.', - 'source.user.full_name': 'Users full name, if available.', - 'source.user.group.domain': 'Name of the directory the group is a member of.', - 'source.user.group.id': 'Unique identifier for the group on the system/platform.', - 'source.user.group.name': 'Name of the group.', - 'source.user.hash': 'Unique user hash to correlate information for a user in anonymized form.', - 'source.user.id': 'Unique identifier of the user.', - 'source.user.name': 'Short name or login of the user.', - 'source.user.roles': 'Array of user roles at the time of the event.', - 'span.id': 'Unique identifier of the span within the scope of its trace.', - tags: 'List of keywords used to tag each event.', - 'threat.enrichments': 'List of objects containing indicators enriching the event.', - 'threat.enrichments.indicator': 'Object containing indicators enriching the event.', - 'threat.enrichments.indicator.as.number': 'Unique number allocated to the autonomous system.', - 'threat.enrichments.indicator.as.organization.name': 'Organization name.', - 'threat.enrichments.indicator.confidence': 'Indicator confidence rating', - 'threat.enrichments.indicator.description': 'Indicator description', - 'threat.enrichments.indicator.email.address': 'Indicator email address', - 'threat.enrichments.indicator.file.accessed': 'Last time the file was accessed.', - 'threat.enrichments.indicator.file.attributes': 'Array of file attributes.', - 'threat.enrichments.indicator.file.code_signature.digest_algorithm': - 'Hashing algorithm used to sign the process.', - 'threat.enrichments.indicator.file.code_signature.exists': - 'Boolean to capture if a signature is present.', - 'threat.enrichments.indicator.file.code_signature.signing_id': - 'The identifier used to sign the process.', - 'threat.enrichments.indicator.file.code_signature.status': - 'Additional information about the certificate status.', - 'threat.enrichments.indicator.file.code_signature.subject_name': - 'Subject name of the code signer', - 'threat.enrichments.indicator.file.code_signature.team_id': - 'The team identifier used to sign the process.', - 'threat.enrichments.indicator.file.code_signature.timestamp': - 'When the signature was generated and signed.', - 'threat.enrichments.indicator.file.code_signature.trusted': - 'Stores the trust status of the certificate chain.', - 'threat.enrichments.indicator.file.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.', - 'threat.enrichments.indicator.file.created': 'File creation time.', - 'threat.enrichments.indicator.file.ctime': 'Last time the file attributes or metadata changed.', - 'threat.enrichments.indicator.file.device': 'Device that is the source of the file.', - 'threat.enrichments.indicator.file.directory': 'Directory where the file is located.', - 'threat.enrichments.indicator.file.drive_letter': 'Drive letter where the file is located.', - 'threat.enrichments.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', - 'threat.enrichments.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', - 'threat.enrichments.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', - 'threat.enrichments.indicator.file.elf.creation_date': 'Build or compile date.', - 'threat.enrichments.indicator.file.elf.exports': 'List of exported element names and types.', - 'threat.enrichments.indicator.file.elf.go_import_hash': - 'A hash of the Go language imports in an ELF file.', - 'threat.enrichments.indicator.file.elf.go_imports': - 'List of imported Go language element names and types.', - 'threat.enrichments.indicator.file.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.elf.go_stripped': - 'Whether the file is a stripped or obfuscated Go executable.', - 'threat.enrichments.indicator.file.elf.header.abi_version': - 'Version of the ELF Application Binary Interface (ABI).', - 'threat.enrichments.indicator.file.elf.header.class': 'Header class of the ELF file.', - 'threat.enrichments.indicator.file.elf.header.data': 'Data table of the ELF header.', - 'threat.enrichments.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'threat.enrichments.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', - 'threat.enrichments.indicator.file.elf.header.os_abi': - 'Application Binary Interface (ABI) of the Linux OS.', - 'threat.enrichments.indicator.file.elf.header.type': 'Header type of the ELF file.', - 'threat.enrichments.indicator.file.elf.header.version': 'Version of the ELF header.', - 'threat.enrichments.indicator.file.elf.import_hash': 'A hash of the imports in an ELF file.', - 'threat.enrichments.indicator.file.elf.imports': 'List of imported element names and types.', - 'threat.enrichments.indicator.file.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.elf.sections': 'Section information of the ELF file.', - 'threat.enrichments.indicator.file.elf.sections.chi2': - 'Chi-square probability distribution of the section.', - 'threat.enrichments.indicator.file.elf.sections.entropy': - 'Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.elf.sections.flags': 'ELF Section List flags.', - 'threat.enrichments.indicator.file.elf.sections.name': 'ELF Section List name.', - 'threat.enrichments.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', - 'threat.enrichments.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', - 'threat.enrichments.indicator.file.elf.sections.type': 'ELF Section List type.', - 'threat.enrichments.indicator.file.elf.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.elf.sections.virtual_address': - 'ELF Section List virtual address.', - 'threat.enrichments.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'threat.enrichments.indicator.file.elf.segments': 'ELF object segment list.', - 'threat.enrichments.indicator.file.elf.segments.sections': 'ELF object segment sections.', - 'threat.enrichments.indicator.file.elf.segments.type': 'ELF object segment type.', - 'threat.enrichments.indicator.file.elf.shared_libraries': - 'List of shared libraries used by this ELF object.', - 'threat.enrichments.indicator.file.elf.telfhash': 'telfhash hash for ELF file.', - 'threat.enrichments.indicator.file.extension': 'File extension, excluding the leading dot.', - 'threat.enrichments.indicator.file.fork_name': - 'A fork is additional data associated with a filesystem object.', - 'threat.enrichments.indicator.file.gid': 'Primary group ID (GID) of the file.', - 'threat.enrichments.indicator.file.group': 'Primary group name of the file.', - 'threat.enrichments.indicator.file.hash.md5': 'MD5 hash.', - 'threat.enrichments.indicator.file.hash.sha1': 'SHA1 hash.', - 'threat.enrichments.indicator.file.hash.sha256': 'SHA256 hash.', - 'threat.enrichments.indicator.file.hash.sha384': 'SHA384 hash.', - 'threat.enrichments.indicator.file.hash.sha512': 'SHA512 hash.', - 'threat.enrichments.indicator.file.hash.ssdeep': 'SSDEEP hash.', - 'threat.enrichments.indicator.file.hash.tlsh': 'TLSH hash.', - 'threat.enrichments.indicator.file.inode': 'Inode representing the file in the filesystem.', - 'threat.enrichments.indicator.file.mime_type': - 'Media type of file, document, or arrangement of bytes.', - 'threat.enrichments.indicator.file.mode': 'Mode of the file in octal representation.', - 'threat.enrichments.indicator.file.mtime': 'Last time the file content was modified.', - 'threat.enrichments.indicator.file.name': - 'Name of the file including the extension, without the directory.', - 'threat.enrichments.indicator.file.owner': 'File owners username.', - 'threat.enrichments.indicator.file.path': 'Full path to the file, including the file name.', - 'threat.enrichments.indicator.file.pe.architecture': 'CPU architecture target for the file.', - 'threat.enrichments.indicator.file.pe.company': - 'Internal company name of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.description': - 'Internal description of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.file_version': 'Process name.', - 'threat.enrichments.indicator.file.pe.go_import_hash': - 'A hash of the Go language imports in a PE file.', - 'threat.enrichments.indicator.file.pe.go_imports': - 'List of imported Go language element names and types.', - 'threat.enrichments.indicator.file.pe.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.enrichments.indicator.file.pe.go_stripped': - 'Whether the file is a stripped or obfuscated Go executable.', - 'threat.enrichments.indicator.file.pe.imphash': 'A hash of the imports in a PE file.', - 'threat.enrichments.indicator.file.pe.import_hash': 'A hash of the imports in a PE file.', - 'threat.enrichments.indicator.file.pe.imports': 'List of imported element names and types.', - 'threat.enrichments.indicator.file.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.enrichments.indicator.file.pe.original_file_name': - 'Internal name of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.pehash': - 'A hash of the PE header and data from one or more PE sections.', - 'threat.enrichments.indicator.file.pe.product': - 'Internal product name of the file, provided at compile-time.', - 'threat.enrichments.indicator.file.pe.sections': 'Section information of the PE file.', - 'threat.enrichments.indicator.file.pe.sections.entropy': - 'Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.pe.sections.name': 'PE Section List name.', - 'threat.enrichments.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', - 'threat.enrichments.indicator.file.pe.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.enrichments.indicator.file.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'threat.enrichments.indicator.file.size': 'File size in bytes.', - 'threat.enrichments.indicator.file.target_path': 'Target path for symlinks.', - 'threat.enrichments.indicator.file.type': 'File type (file, dir, or symlink).', - 'threat.enrichments.indicator.file.uid': - 'The user ID (UID) or security identifier (SID) of the file owner.', - 'threat.enrichments.indicator.file.x509.alternative_names': - 'List of subject alternative names (SAN).', - 'threat.enrichments.indicator.file.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.country': 'List of country (C) codes', - 'threat.enrichments.indicator.file.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.file.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.enrichments.indicator.file.x509.issuer.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.file.x509.not_after': - 'Time at which the certificate is no longer considered valid.', - 'threat.enrichments.indicator.file.x509.not_before': - 'Time at which the certificate is first considered valid.', - 'threat.enrichments.indicator.file.x509.public_key_algorithm': - 'Algorithm used to generate the public key.', - 'threat.enrichments.indicator.file.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.enrichments.indicator.file.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.enrichments.indicator.file.x509.public_key_size': - 'The size of the public key space in bits.', - 'threat.enrichments.indicator.file.x509.serial_number': - 'Unique serial number issued by the certificate authority.', - 'threat.enrichments.indicator.file.x509.signature_algorithm': - 'Identifier for certificate signature algorithm.', - 'threat.enrichments.indicator.file.x509.subject.common_name': - 'List of common names (CN) of subject.', - 'threat.enrichments.indicator.file.x509.subject.country': 'List of country (C) code', - 'threat.enrichments.indicator.file.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.enrichments.indicator.file.x509.subject.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.file.x509.subject.organization': - 'List of organizations (O) of subject.', - 'threat.enrichments.indicator.file.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.enrichments.indicator.file.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.file.x509.version_number': 'Version of x509 format.', - 'threat.enrichments.indicator.first_seen': 'Date/time indicator was first reported.', - 'threat.enrichments.indicator.geo.city_name': 'City name.', - 'threat.enrichments.indicator.geo.continent_code': 'Continent code.', - 'threat.enrichments.indicator.geo.continent_name': 'Name of the continent.', - 'threat.enrichments.indicator.geo.country_iso_code': 'Country ISO code.', - 'threat.enrichments.indicator.geo.country_name': 'Country name.', - 'threat.enrichments.indicator.geo.location': 'Longitude and latitude.', - 'threat.enrichments.indicator.geo.name': 'User-defined description of a location.', - 'threat.enrichments.indicator.geo.postal_code': 'Postal code.', - 'threat.enrichments.indicator.geo.region_iso_code': 'Region ISO code.', - 'threat.enrichments.indicator.geo.region_name': 'Region name.', - 'threat.enrichments.indicator.geo.timezone': 'Time zone.', - 'threat.enrichments.indicator.ip': 'Indicator IP address', - 'threat.enrichments.indicator.last_seen': 'Date/time indicator was last reported.', - 'threat.enrichments.indicator.marking.tlp': 'Indicator TLP marking', - 'threat.enrichments.indicator.marking.tlp_version': 'Indicator TLP version', - 'threat.enrichments.indicator.modified_at': 'Date/time indicator was last updated.', - 'threat.enrichments.indicator.name': 'Indicator display name', - 'threat.enrichments.indicator.port': 'Indicator port', - 'threat.enrichments.indicator.provider': 'Indicator provider', - 'threat.enrichments.indicator.reference': 'Indicator reference URL', - 'threat.enrichments.indicator.registry.data.bytes': - 'Original bytes written with base64 encoding.', - 'threat.enrichments.indicator.registry.data.strings': - 'List of strings representing what was written to the registry.', - 'threat.enrichments.indicator.registry.data.type': 'Standard registry type for encoding contents', - 'threat.enrichments.indicator.registry.hive': 'Abbreviated name for the hive.', - 'threat.enrichments.indicator.registry.key': 'Hive-relative path of keys.', - 'threat.enrichments.indicator.registry.path': 'Full path, including hive, key and value', - 'threat.enrichments.indicator.registry.value': 'Name of the value written.', - 'threat.enrichments.indicator.scanner_stats': 'Scanner statistics', - 'threat.enrichments.indicator.sightings': 'Number of times indicator observed', - 'threat.enrichments.indicator.type': 'Type of indicator', - 'threat.enrichments.indicator.url.domain': 'Domain of the url.', - 'threat.enrichments.indicator.url.extension': - 'File extension from the request url, excluding the leading dot.', - 'threat.enrichments.indicator.url.fragment': 'Portion of the url after the `#`.', - 'threat.enrichments.indicator.url.full': 'Full unparsed URL.', - 'threat.enrichments.indicator.url.original': - 'Unmodified original url as seen in the event source.', - 'threat.enrichments.indicator.url.password': 'Password of the request.', - 'threat.enrichments.indicator.url.path': 'Path of the request, such as "/search".', - 'threat.enrichments.indicator.url.port': 'Port of the request, such as 443.', - 'threat.enrichments.indicator.url.query': 'Query string of the request.', - 'threat.enrichments.indicator.url.registered_domain': - 'The highest registered url domain, stripped of the subdomain.', - 'threat.enrichments.indicator.url.scheme': 'Scheme of the url.', - 'threat.enrichments.indicator.url.subdomain': 'The subdomain of the domain.', - 'threat.enrichments.indicator.url.top_level_domain': - 'The effective top level domain (com, org, net, co.uk).', - 'threat.enrichments.indicator.url.username': 'Username of the request.', - 'threat.enrichments.indicator.x509.alternative_names': 'List of subject alternative names (SAN).', - 'threat.enrichments.indicator.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.country': 'List of country (C) codes', - 'threat.enrichments.indicator.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.enrichments.indicator.x509.issuer.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.x509.not_after': - 'Time at which the certificate is no longer considered valid.', - 'threat.enrichments.indicator.x509.not_before': - 'Time at which the certificate is first considered valid.', - 'threat.enrichments.indicator.x509.public_key_algorithm': - 'Algorithm used to generate the public key.', - 'threat.enrichments.indicator.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.enrichments.indicator.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.enrichments.indicator.x509.public_key_size': 'The size of the public key space in bits.', - 'threat.enrichments.indicator.x509.serial_number': - 'Unique serial number issued by the certificate authority.', - 'threat.enrichments.indicator.x509.signature_algorithm': - 'Identifier for certificate signature algorithm.', - 'threat.enrichments.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', - 'threat.enrichments.indicator.x509.subject.country': 'List of country (C) code', - 'threat.enrichments.indicator.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.enrichments.indicator.x509.subject.locality': 'List of locality names (L)', - 'threat.enrichments.indicator.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.enrichments.indicator.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.enrichments.indicator.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.enrichments.indicator.x509.version_number': 'Version of x509 format.', - 'threat.enrichments.matched.atomic': 'Matched indicator value', - 'threat.enrichments.matched.field': 'Matched indicator field', - 'threat.enrichments.matched.id': 'Matched indicator identifier', - 'threat.enrichments.matched.index': 'Matched indicator index', - 'threat.enrichments.matched.occurred': 'Date of match', - 'threat.enrichments.matched.type': 'Type of indicator match', - 'threat.feed.dashboard_id': 'Feed dashboard ID.', - 'threat.feed.description': 'Description of the threat feed.', - 'threat.feed.name': 'Name of the threat feed.', - 'threat.feed.reference': 'Reference for the threat feed.', - 'threat.framework': 'Threat classification framework.', - 'threat.group.alias': 'Alias of the group.', - 'threat.group.id': 'ID of the group.', - 'threat.group.name': 'Name of the group.', - 'threat.group.reference': 'Reference URL of the group.', - 'threat.indicator.as.number': 'Unique number allocated to the autonomous system.', - 'threat.indicator.as.organization.name': 'Organization name.', - 'threat.indicator.confidence': 'Indicator confidence rating', - 'threat.indicator.description': 'Indicator description', - 'threat.indicator.email.address': 'Indicator email address', - 'threat.indicator.file.accessed': 'Last time the file was accessed.', - 'threat.indicator.file.attributes': 'Array of file attributes.', - 'threat.indicator.file.code_signature.digest_algorithm': - 'Hashing algorithm used to sign the process.', - 'threat.indicator.file.code_signature.exists': 'Boolean to capture if a signature is present.', - 'threat.indicator.file.code_signature.signing_id': 'The identifier used to sign the process.', - 'threat.indicator.file.code_signature.status': - 'Additional information about the certificate status.', - 'threat.indicator.file.code_signature.subject_name': 'Subject name of the code signer', - 'threat.indicator.file.code_signature.team_id': 'The team identifier used to sign the process.', - 'threat.indicator.file.code_signature.timestamp': 'When the signature was generated and signed.', - 'threat.indicator.file.code_signature.trusted': - 'Stores the trust status of the certificate chain.', - 'threat.indicator.file.code_signature.valid': - 'Boolean to capture if the digital signature is verified against the binary content.', - 'threat.indicator.file.created': 'File creation time.', - 'threat.indicator.file.ctime': 'Last time the file attributes or metadata changed.', - 'threat.indicator.file.device': 'Device that is the source of the file.', - 'threat.indicator.file.directory': 'Directory where the file is located.', - 'threat.indicator.file.drive_letter': 'Drive letter where the file is located.', - 'threat.indicator.file.elf.architecture': 'Machine architecture of the ELF file.', - 'threat.indicator.file.elf.byte_order': 'Byte sequence of ELF file.', - 'threat.indicator.file.elf.cpu_type': 'CPU type of the ELF file.', - 'threat.indicator.file.elf.creation_date': 'Build or compile date.', - 'threat.indicator.file.elf.exports': 'List of exported element names and types.', - 'threat.indicator.file.elf.go_import_hash': 'A hash of the Go language imports in an ELF file.', - 'threat.indicator.file.elf.go_imports': 'List of imported Go language element names and types.', - 'threat.indicator.file.elf.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.elf.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.elf.go_stripped': - 'Whether the file is a stripped or obfuscated Go executable.', - 'threat.indicator.file.elf.header.abi_version': - 'Version of the ELF Application Binary Interface (ABI).', - 'threat.indicator.file.elf.header.class': 'Header class of the ELF file.', - 'threat.indicator.file.elf.header.data': 'Data table of the ELF header.', - 'threat.indicator.file.elf.header.entrypoint': 'Header entrypoint of the ELF file.', - 'threat.indicator.file.elf.header.object_version': '"0x1" for original ELF files.', - 'threat.indicator.file.elf.header.os_abi': 'Application Binary Interface (ABI) of the Linux OS.', - 'threat.indicator.file.elf.header.type': 'Header type of the ELF file.', - 'threat.indicator.file.elf.header.version': 'Version of the ELF header.', - 'threat.indicator.file.elf.import_hash': 'A hash of the imports in an ELF file.', - 'threat.indicator.file.elf.imports': 'List of imported element names and types.', - 'threat.indicator.file.elf.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.elf.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.elf.sections': 'Section information of the ELF file.', - 'threat.indicator.file.elf.sections.chi2': 'Chi-square probability distribution of the section.', - 'threat.indicator.file.elf.sections.entropy': 'Shannon entropy calculation from the section.', - 'threat.indicator.file.elf.sections.flags': 'ELF Section List flags.', - 'threat.indicator.file.elf.sections.name': 'ELF Section List name.', - 'threat.indicator.file.elf.sections.physical_offset': 'ELF Section List offset.', - 'threat.indicator.file.elf.sections.physical_size': 'ELF Section List physical size.', - 'threat.indicator.file.elf.sections.type': 'ELF Section List type.', - 'threat.indicator.file.elf.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.indicator.file.elf.sections.virtual_address': 'ELF Section List virtual address.', - 'threat.indicator.file.elf.sections.virtual_size': 'ELF Section List virtual size.', - 'threat.indicator.file.elf.segments': 'ELF object segment list.', - 'threat.indicator.file.elf.segments.sections': 'ELF object segment sections.', - 'threat.indicator.file.elf.segments.type': 'ELF object segment type.', - 'threat.indicator.file.elf.shared_libraries': 'List of shared libraries used by this ELF object.', - 'threat.indicator.file.elf.telfhash': 'telfhash hash for ELF file.', - 'threat.indicator.file.extension': 'File extension, excluding the leading dot.', - 'threat.indicator.file.fork_name': - 'A fork is additional data associated with a filesystem object.', - 'threat.indicator.file.gid': 'Primary group ID (GID) of the file.', - 'threat.indicator.file.group': 'Primary group name of the file.', - 'threat.indicator.file.hash.md5': 'MD5 hash.', - 'threat.indicator.file.hash.sha1': 'SHA1 hash.', - 'threat.indicator.file.hash.sha256': 'SHA256 hash.', - 'threat.indicator.file.hash.sha384': 'SHA384 hash.', - 'threat.indicator.file.hash.sha512': 'SHA512 hash.', - 'threat.indicator.file.hash.ssdeep': 'SSDEEP hash.', - 'threat.indicator.file.hash.tlsh': 'TLSH hash.', - 'threat.indicator.file.inode': 'Inode representing the file in the filesystem.', - 'threat.indicator.file.mime_type': 'Media type of file, document, or arrangement of bytes.', - 'threat.indicator.file.mode': 'Mode of the file in octal representation.', - 'threat.indicator.file.mtime': 'Last time the file content was modified.', - 'threat.indicator.file.name': 'Name of the file including the extension, without the directory.', - 'threat.indicator.file.owner': 'File owners username.', - 'threat.indicator.file.path': 'Full path to the file, including the file name.', - 'threat.indicator.file.pe.architecture': 'CPU architecture target for the file.', - 'threat.indicator.file.pe.company': - 'Internal company name of the file, provided at compile-time.', - 'threat.indicator.file.pe.description': - 'Internal description of the file, provided at compile-time.', - 'threat.indicator.file.pe.file_version': 'Process name.', - 'threat.indicator.file.pe.go_import_hash': 'A hash of the Go language imports in a PE file.', - 'threat.indicator.file.pe.go_imports': 'List of imported Go language element names and types.', - 'threat.indicator.file.pe.go_imports_names_entropy': - 'Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.pe.go_imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of Go imports.', - 'threat.indicator.file.pe.go_stripped': - 'Whether the file is a stripped or obfuscated Go executable.', - 'threat.indicator.file.pe.imphash': 'A hash of the imports in a PE file.', - 'threat.indicator.file.pe.import_hash': 'A hash of the imports in a PE file.', - 'threat.indicator.file.pe.imports': 'List of imported element names and types.', - 'threat.indicator.file.pe.imports_names_entropy': - 'Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.pe.imports_names_var_entropy': - 'Variance for Shannon entropy calculation from the list of imported element names and types.', - 'threat.indicator.file.pe.original_file_name': - 'Internal name of the file, provided at compile-time.', - 'threat.indicator.file.pe.pehash': - 'A hash of the PE header and data from one or more PE sections.', - 'threat.indicator.file.pe.product': - 'Internal product name of the file, provided at compile-time.', - 'threat.indicator.file.pe.sections': 'Section information of the PE file.', - 'threat.indicator.file.pe.sections.entropy': 'Shannon entropy calculation from the section.', - 'threat.indicator.file.pe.sections.name': 'PE Section List name.', - 'threat.indicator.file.pe.sections.physical_size': 'PE Section List physical size.', - 'threat.indicator.file.pe.sections.var_entropy': - 'Variance for Shannon entropy calculation from the section.', - 'threat.indicator.file.pe.sections.virtual_size': - 'PE Section List virtual size. This is always the same as `physical_size`.', - 'threat.indicator.file.size': 'File size in bytes.', - 'threat.indicator.file.target_path': 'Target path for symlinks.', - 'threat.indicator.file.type': 'File type (file, dir, or symlink).', - 'threat.indicator.file.uid': 'The user ID (UID) or security identifier (SID) of the file owner.', - 'threat.indicator.file.x509.alternative_names': 'List of subject alternative names (SAN).', - 'threat.indicator.file.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.country': 'List of country (C) codes', - 'threat.indicator.file.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.locality': 'List of locality names (L)', - 'threat.indicator.file.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.indicator.file.x509.issuer.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.indicator.file.x509.not_after': - 'Time at which the certificate is no longer considered valid.', - 'threat.indicator.file.x509.not_before': - 'Time at which the certificate is first considered valid.', - 'threat.indicator.file.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'threat.indicator.file.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.indicator.file.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.indicator.file.x509.public_key_size': 'The size of the public key space in bits.', - 'threat.indicator.file.x509.serial_number': - 'Unique serial number issued by the certificate authority.', - 'threat.indicator.file.x509.signature_algorithm': - 'Identifier for certificate signature algorithm.', - 'threat.indicator.file.x509.subject.common_name': 'List of common names (CN) of subject.', - 'threat.indicator.file.x509.subject.country': 'List of country (C) code', - 'threat.indicator.file.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.indicator.file.x509.subject.locality': 'List of locality names (L)', - 'threat.indicator.file.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.indicator.file.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.indicator.file.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.indicator.file.x509.version_number': 'Version of x509 format.', - 'threat.indicator.first_seen': 'Date/time indicator was first reported.', - 'threat.indicator.geo.city_name': 'City name.', - 'threat.indicator.geo.continent_code': 'Continent code.', - 'threat.indicator.geo.continent_name': 'Name of the continent.', - 'threat.indicator.geo.country_iso_code': 'Country ISO code.', - 'threat.indicator.geo.country_name': 'Country name.', - 'threat.indicator.geo.location': 'Longitude and latitude.', - 'threat.indicator.geo.name': 'User-defined description of a location.', - 'threat.indicator.geo.postal_code': 'Postal code.', - 'threat.indicator.geo.region_iso_code': 'Region ISO code.', - 'threat.indicator.geo.region_name': 'Region name.', - 'threat.indicator.geo.timezone': 'Time zone.', - 'threat.indicator.ip': 'Indicator IP address', - 'threat.indicator.last_seen': 'Date/time indicator was last reported.', - 'threat.indicator.marking.tlp': 'Indicator TLP marking', - 'threat.indicator.marking.tlp_version': 'Indicator TLP version', - 'threat.indicator.modified_at': 'Date/time indicator was last updated.', - 'threat.indicator.name': 'Indicator display name', - 'threat.indicator.port': 'Indicator port', - 'threat.indicator.provider': 'Indicator provider', - 'threat.indicator.reference': 'Indicator reference URL', - 'threat.indicator.registry.data.bytes': 'Original bytes written with base64 encoding.', - 'threat.indicator.registry.data.strings': - 'List of strings representing what was written to the registry.', - 'threat.indicator.registry.data.type': 'Standard registry type for encoding contents', - 'threat.indicator.registry.hive': 'Abbreviated name for the hive.', - 'threat.indicator.registry.key': 'Hive-relative path of keys.', - 'threat.indicator.registry.path': 'Full path, including hive, key and value', - 'threat.indicator.registry.value': 'Name of the value written.', - 'threat.indicator.scanner_stats': 'Scanner statistics', - 'threat.indicator.sightings': 'Number of times indicator observed', - 'threat.indicator.type': 'Type of indicator', - 'threat.indicator.url.domain': 'Domain of the url.', - 'threat.indicator.url.extension': - 'File extension from the request url, excluding the leading dot.', - 'threat.indicator.url.fragment': 'Portion of the url after the `#`.', - 'threat.indicator.url.full': 'Full unparsed URL.', - 'threat.indicator.url.original': 'Unmodified original url as seen in the event source.', - 'threat.indicator.url.password': 'Password of the request.', - 'threat.indicator.url.path': 'Path of the request, such as "/search".', - 'threat.indicator.url.port': 'Port of the request, such as 443.', - 'threat.indicator.url.query': 'Query string of the request.', - 'threat.indicator.url.registered_domain': - 'The highest registered url domain, stripped of the subdomain.', - 'threat.indicator.url.scheme': 'Scheme of the url.', - 'threat.indicator.url.subdomain': 'The subdomain of the domain.', - 'threat.indicator.url.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', - 'threat.indicator.url.username': 'Username of the request.', - 'threat.indicator.x509.alternative_names': 'List of subject alternative names (SAN).', - 'threat.indicator.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'threat.indicator.x509.issuer.country': 'List of country (C) codes', - 'threat.indicator.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'threat.indicator.x509.issuer.locality': 'List of locality names (L)', - 'threat.indicator.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'threat.indicator.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'threat.indicator.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'threat.indicator.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'threat.indicator.x509.not_before': 'Time at which the certificate is first considered valid.', - 'threat.indicator.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'threat.indicator.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'threat.indicator.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'threat.indicator.x509.public_key_size': 'The size of the public key space in bits.', - 'threat.indicator.x509.serial_number': - 'Unique serial number issued by the certificate authority.', - 'threat.indicator.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', - 'threat.indicator.x509.subject.common_name': 'List of common names (CN) of subject.', - 'threat.indicator.x509.subject.country': 'List of country (C) code', - 'threat.indicator.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'threat.indicator.x509.subject.locality': 'List of locality names (L)', - 'threat.indicator.x509.subject.organization': 'List of organizations (O) of subject.', - 'threat.indicator.x509.subject.organizational_unit': - 'List of organizational units (OU) of subject.', - 'threat.indicator.x509.subject.state_or_province': - 'List of state or province names (ST, S, or P)', - 'threat.indicator.x509.version_number': 'Version of x509 format.', - 'threat.software.alias': 'Alias of the software', - 'threat.software.id': 'ID of the software', - 'threat.software.name': 'Name of the software.', - 'threat.software.platforms': 'Platforms of the software.', - 'threat.software.reference': 'Software reference URL.', - 'threat.software.type': 'Software type.', - 'threat.tactic.id': 'Threat tactic id.', - 'threat.tactic.name': 'Threat tactic.', - 'threat.tactic.reference': 'Threat tactic URL reference.', - 'threat.technique.id': 'Threat technique id.', - 'threat.technique.name': 'Threat technique name.', - 'threat.technique.reference': 'Threat technique URL reference.', - 'threat.technique.subtechnique.id': 'Threat subtechnique id.', - 'threat.technique.subtechnique.name': 'Threat subtechnique name.', - 'threat.technique.subtechnique.reference': 'Threat subtechnique URL reference.', - 'tls.cipher': 'String indicating the cipher used during the current connection.', - 'tls.client.certificate': 'PEM-encoded stand-alone certificate offered by the client.', - 'tls.client.certificate_chain': - 'Array of PEM-encoded certificates that make up the certificate chain offered by the client.', - 'tls.client.hash.md5': - 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the client.', - 'tls.client.hash.sha1': - 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the client.', - 'tls.client.hash.sha256': - 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the client.', - 'tls.client.issuer': - 'Distinguished name of subject of the issuer of the x.509 certificate presented by the client.', - 'tls.client.ja3': - 'A hash that identifies clients based on how they perform an SSL/TLS handshake.', - 'tls.client.not_after': - 'Date/Time indicating when client certificate is no longer considered valid.', - 'tls.client.not_before': - 'Date/Time indicating when client certificate is first considered valid.', - 'tls.client.server_name': 'Hostname the client is trying to connect to. Also called the SNI.', - 'tls.client.subject': - 'Distinguished name of subject of the x.509 certificate presented by the client.', - 'tls.client.supported_ciphers': 'Array of ciphers offered by the client during the client hello.', - 'tls.client.x509.alternative_names': 'List of subject alternative names (SAN).', - 'tls.client.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'tls.client.x509.issuer.country': 'List of country (C) codes', - 'tls.client.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'tls.client.x509.issuer.locality': 'List of locality names (L)', - 'tls.client.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'tls.client.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'tls.client.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.client.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'tls.client.x509.not_before': 'Time at which the certificate is first considered valid.', - 'tls.client.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'tls.client.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'tls.client.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'tls.client.x509.public_key_size': 'The size of the public key space in bits.', - 'tls.client.x509.serial_number': 'Unique serial number issued by the certificate authority.', - 'tls.client.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', - 'tls.client.x509.subject.common_name': 'List of common names (CN) of subject.', - 'tls.client.x509.subject.country': 'List of country (C) code', - 'tls.client.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'tls.client.x509.subject.locality': 'List of locality names (L)', - 'tls.client.x509.subject.organization': 'List of organizations (O) of subject.', - 'tls.client.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'tls.client.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.client.x509.version_number': 'Version of x509 format.', - 'tls.curve': 'String indicating the curve used for the given cipher, when applicable.', - 'tls.established': - 'Boolean flag indicating if the TLS negotiation was successful and transitioned to an encrypted tunnel.', - 'tls.next_protocol': 'String indicating the protocol being tunneled.', - 'tls.resumed': - 'Boolean flag indicating if this TLS connection was resumed from an existing TLS negotiation.', - 'tls.server.certificate': 'PEM-encoded stand-alone certificate offered by the server.', - 'tls.server.certificate_chain': - 'Array of PEM-encoded certificates that make up the certificate chain offered by the server.', - 'tls.server.hash.md5': - 'Certificate fingerprint using the MD5 digest of DER-encoded version of certificate offered by the server.', - 'tls.server.hash.sha1': - 'Certificate fingerprint using the SHA1 digest of DER-encoded version of certificate offered by the server.', - 'tls.server.hash.sha256': - 'Certificate fingerprint using the SHA256 digest of DER-encoded version of certificate offered by the server.', - 'tls.server.issuer': 'Subject of the issuer of the x.509 certificate presented by the server.', - 'tls.server.ja3s': - 'A hash that identifies servers based on how they perform an SSL/TLS handshake.', - 'tls.server.not_after': - 'Timestamp indicating when server certificate is no longer considered valid.', - 'tls.server.not_before': - 'Timestamp indicating when server certificate is first considered valid.', - 'tls.server.subject': 'Subject of the x.509 certificate presented by the server.', - 'tls.server.x509.alternative_names': 'List of subject alternative names (SAN).', - 'tls.server.x509.issuer.common_name': - 'List of common name (CN) of issuing certificate authority.', - 'tls.server.x509.issuer.country': 'List of country (C) codes', - 'tls.server.x509.issuer.distinguished_name': - 'Distinguished name (DN) of issuing certificate authority.', - 'tls.server.x509.issuer.locality': 'List of locality names (L)', - 'tls.server.x509.issuer.organization': - 'List of organizations (O) of issuing certificate authority.', - 'tls.server.x509.issuer.organizational_unit': - 'List of organizational units (OU) of issuing certificate authority.', - 'tls.server.x509.issuer.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.server.x509.not_after': 'Time at which the certificate is no longer considered valid.', - 'tls.server.x509.not_before': 'Time at which the certificate is first considered valid.', - 'tls.server.x509.public_key_algorithm': 'Algorithm used to generate the public key.', - 'tls.server.x509.public_key_curve': - 'The curve used by the elliptic curve public key algorithm. This is algorithm specific.', - 'tls.server.x509.public_key_exponent': - 'Exponent used to derive the public key. This is algorithm specific.', - 'tls.server.x509.public_key_size': 'The size of the public key space in bits.', - 'tls.server.x509.serial_number': 'Unique serial number issued by the certificate authority.', - 'tls.server.x509.signature_algorithm': 'Identifier for certificate signature algorithm.', - 'tls.server.x509.subject.common_name': 'List of common names (CN) of subject.', - 'tls.server.x509.subject.country': 'List of country (C) code', - 'tls.server.x509.subject.distinguished_name': - 'Distinguished name (DN) of the certificate subject entity.', - 'tls.server.x509.subject.locality': 'List of locality names (L)', - 'tls.server.x509.subject.organization': 'List of organizations (O) of subject.', - 'tls.server.x509.subject.organizational_unit': 'List of organizational units (OU) of subject.', - 'tls.server.x509.subject.state_or_province': 'List of state or province names (ST, S, or P)', - 'tls.server.x509.version_number': 'Version of x509 format.', - 'tls.version': 'Numeric part of the version parsed from the original string.', - 'tls.version_protocol': 'Normalized lowercase protocol name parsed from original string.', - 'trace.id': 'Unique identifier of the trace.', - 'transaction.id': 'Unique identifier of the transaction within the scope of its trace.', - 'url.domain': 'Domain of the url.', - 'url.extension': 'File extension from the request url, excluding the leading dot.', - 'url.fragment': 'Portion of the url after the `#`.', - 'url.full': 'Full unparsed URL.', - 'url.original': 'Unmodified original url as seen in the event source.', - 'url.password': 'Password of the request.', - 'url.path': 'Path of the request, such as "/search".', - 'url.port': 'Port of the request, such as 443.', - 'url.query': 'Query string of the request.', - 'url.registered_domain': 'The highest registered url domain, stripped of the subdomain.', - 'url.scheme': 'Scheme of the url.', - 'url.subdomain': 'The subdomain of the domain.', - 'url.top_level_domain': 'The effective top level domain (com, org, net, co.uk).', - 'url.username': 'Username of the request.', - 'user.changes.domain': 'Name of the directory the user is a member of.', - 'user.changes.email': 'User email address.', - 'user.changes.full_name': 'Users full name, if available.', - 'user.changes.group.domain': 'Name of the directory the group is a member of.', - 'user.changes.group.id': 'Unique identifier for the group on the system/platform.', - 'user.changes.group.name': 'Name of the group.', - 'user.changes.hash': 'Unique user hash to correlate information for a user in anonymized form.', - 'user.changes.id': 'Unique identifier of the user.', - 'user.changes.name': 'Short name or login of the user.', - 'user.changes.roles': 'Array of user roles at the time of the event.', - 'user.domain': 'Name of the directory the user is a member of.', - 'user.effective.domain': 'Name of the directory the user is a member of.', - 'user.effective.email': 'User email address.', - 'user.effective.full_name': 'Users full name, if available.', - 'user.effective.group.domain': 'Name of the directory the group is a member of.', - 'user.effective.group.id': 'Unique identifier for the group on the system/platform.', - 'user.effective.group.name': 'Name of the group.', - 'user.effective.hash': 'Unique user hash to correlate information for a user in anonymized form.', - 'user.effective.id': 'Unique identifier of the user.', - 'user.effective.name': 'Short name or login of the user.', - 'user.effective.roles': 'Array of user roles at the time of the event.', - 'user.email': 'User email address.', - 'user.full_name': 'Users full name, if available.', - 'user.group.domain': 'Name of the directory the group is a member of.', - 'user.group.id': 'Unique identifier for the group on the system/platform.', - 'user.group.name': 'Name of the group.', - 'user.hash': 'Unique user hash to correlate information for a user in anonymized form.', - 'user.id': 'Unique identifier of the user.', - 'user.name': 'Short name or login of the user.', - 'user.risk.calculated_level': - 'A risk classification level calculated by an internal system as part of entity analytics and entity risk scoring.', - 'user.risk.calculated_score': - 'A risk classification score calculated by an internal system as part of entity analytics and entity risk scoring.', - 'user.risk.calculated_score_norm': 'A normalized risk score calculated by an internal system.', - 'user.risk.static_level': - 'A risk classification level obtained from outside the system, such as from some external Threat Intelligence Platform.', - 'user.risk.static_score': - 'A risk classification score obtained from outside the system, such as from some external Threat Intelligence Platform.', - 'user.risk.static_score_norm': 'A normalized risk score calculated by an external system.', - 'user.roles': 'Array of user roles at the time of the event.', - 'user.target.domain': 'Name of the directory the user is a member of.', - 'user.target.email': 'User email address.', - 'user.target.full_name': 'Users full name, if available.', - 'user.target.group.domain': 'Name of the directory the group is a member of.', - 'user.target.group.id': 'Unique identifier for the group on the system/platform.', - 'user.target.group.name': 'Name of the group.', - 'user.target.hash': 'Unique user hash to correlate information for a user in anonymized form.', - 'user.target.id': 'Unique identifier of the user.', - 'user.target.name': 'Short name or login of the user.', - 'user.target.roles': 'Array of user roles at the time of the event.', - 'user_agent.device.name': 'Name of the device.', - 'user_agent.name': 'Name of the user agent.', - 'user_agent.original': 'Unparsed user_agent string.', - 'user_agent.os.family': 'OS family (such as redhat, debian, freebsd, windows).', - 'user_agent.os.full': 'Operating system name, including the version or code name.', - 'user_agent.os.kernel': 'Operating system kernel version as a raw string.', - 'user_agent.os.name': 'Operating system name, without the version.', - 'user_agent.os.platform': 'Operating system platform (such centos, ubuntu, windows).', - 'user_agent.os.type': - 'Which commercial OS family (one of: linux, macos, unix, windows, ios or android).', - 'user_agent.os.version': 'Operating system version as a raw string.', - 'user_agent.version': 'Version of the user agent.', - 'volume.bus_type': 'Bus type of the device.', - 'volume.default_access': 'Bus type of the device.', - 'volume.device_name': 'Device name of the volume.', - 'volume.device_type': 'Volume device type.', - 'volume.dos_name': 'DOS name of the device.', - 'volume.file_system_type': 'Volume device file system type.', - 'volume.mount_name': 'Mount name of the volume.', - 'volume.nt_name': 'NT name of the device.', - 'volume.product_id': 'ProductID of the device.', - 'volume.product_name': 'Produce name of the volume.', - 'volume.removable': 'Indicates if the volume is removable.', - 'volume.serial_number': 'Serial number of the device.', - 'volume.size': 'Size of the volume device in bytes.', - 'volume.vendor_id': 'VendorID of the device.', - 'volume.vendor_name': 'Vendor name of the device.', - 'volume.writable': 'Indicates if the volume is writable.', - 'vulnerability.category': 'Category of a vulnerability.', - 'vulnerability.classification': 'Classification of the vulnerability.', - 'vulnerability.description': 'Description of the vulnerability.', - 'vulnerability.enumeration': 'Identifier of the vulnerability.', - 'vulnerability.id': 'ID of the vulnerability.', - 'vulnerability.reference': 'Reference of the vulnerability.', - 'vulnerability.report_id': 'Scan identification number.', - 'vulnerability.scanner.vendor': 'Name of the scanner vendor.', - 'vulnerability.score.base': 'Vulnerability Base score.', - 'vulnerability.score.environmental': 'Vulnerability Environmental score.', - 'vulnerability.score.temporal': 'Vulnerability Temporal score.', - 'vulnerability.score.version': 'CVSS version.', - 'vulnerability.severity': 'Severity of the vulnerability.', -}; - export const ECS_TYPES: EcsFields = { '@timestamp': 'date', 'agent.build.original': 'keyword', diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts index d23d4dcb2cd9d..3856c089e254d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { ECS_FULL } from './constants'; +import { ECS_FULL } from '../../../common'; import { EcsMappingState } from '../../types'; const valueFieldKeys = new Set(['target', 'confidence', 'date_formats', 'type']); diff --git a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts index f2becac6c9d39..87dc8e077c440 100644 --- a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts +++ b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts @@ -6,6 +6,7 @@ */ import { BedrockChat } from '@langchain/community/chat_models/bedrock/web'; +// TODO: This function is here temporarily during development, it is supposed to be replaced with the same connector used by Security Assistant. export function getModel(): BedrockChat { const model = new BedrockChat({ model: 'anthropic.claude-3-opus-20240229-v1:0', diff --git a/x-pack/plugins/integration_assistant/server/util/es.ts b/x-pack/plugins/integration_assistant/server/util/es.ts index c79092ca0c800..f3a2d84316fa0 100644 --- a/x-pack/plugins/integration_assistant/server/util/es.ts +++ b/x-pack/plugins/integration_assistant/server/util/es.ts @@ -1,7 +1,11 @@ -import { EcsMappingState } from "../types/EcsMapping"; -import { CategorizationState } from "../types/Categorization"; -import { RelatedState } from "../types/Related"; -import { Client } from "@elastic/elasticsearch"; +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import { Client } from '@elastic/elasticsearch'; +import { EcsMappingState, CategorizationState, RelatedState } from '../types'; interface DocTemplate { _index: string; @@ -13,9 +17,9 @@ interface DocTemplate { function formatSample(sample: string): DocTemplate { const docsTemplate: DocTemplate = { - _index: "index", - _id: "id", - _source: { message: "" }, + _index: 'index', + _id: 'id', + _source: { message: '' }, }; const formatted: DocTemplate = { ...docsTemplate }; formatted._source.message = sample; @@ -24,10 +28,10 @@ function formatSample(sample: string): DocTemplate { function newClient(): Client { const client = new Client({ - node: "https://localhost:9200", + node: 'https://localhost:9200', auth: { - username: "elastic", - password: "changeme", + username: 'elastic', + password: 'changeme', }, tls: { rejectUnauthorized: false, @@ -36,10 +40,7 @@ function newClient(): Client { return client; } -async function _testPipeline( - samples: string[], - pipeline: object -): Promise<[any[], any[]]> { +async function _testPipeline(samples: string[], pipeline: object): Promise<[any[], any[]]> { const docs = samples.map((sample) => formatSample(sample)); const results: object[] = []; const errors: object[] = []; @@ -63,21 +64,14 @@ async function _testPipeline( export async function handleValidatePipeline( state: EcsMappingState | CategorizationState | RelatedState -): Promise< - | Partial - | Partial - | Partial -> { - const [errors, results] = await _testPipeline( - state.rawSamples, - state.currentPipeline - ); - console.log("testing validate pipeline"); - console.log("errors", errors); +): Promise | Partial | Partial> { + const [errors, results] = await _testPipeline(state.rawSamples, state.currentPipeline); + console.log('testing validate pipeline'); + console.log('errors', errors); //console.log("results", results); return { errors, pipelineResults: results, - lastExecutedChain: "validate_pipeline", + lastExecutedChain: 'validate_pipeline', }; } From 813e342ecafd1671256368553463a42b30cff13b Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Sat, 25 May 2024 22:23:15 +0200 Subject: [PATCH 09/62] stashing changes --- .../integration_assistant/common/ecs.ts | 2 +- .../integration_assistant/common/index.ts | 12 +- .../integration_assistant/common/types.ts | 72 +++-- .../integration_assistant/public/app.tsx | 138 ++++++-- .../integration_assistant/public/services.ts | 24 +- .../integration_assistant/public/types.ts | 9 - .../integration_assistant/server/constants.ts | 3 + .../server/graphs/categorization/prompts.ts | 303 +++++++++--------- .../server/graphs/ecs/graph.ts | 2 +- .../server/graphs/ecs/pipeline.ts | 5 +- .../server/graphs/ecs/prompts.ts | 249 +++++++------- .../server/integration_builder/agent.ts | 33 ++ .../integration_builder/build_integration.ts | 127 ++++++++ .../server/integration_builder/data_stream.ts | 131 ++++++++ .../server/integration_builder/dev_folders.ts | 62 ++++ .../server/integration_builder/fields.ts | 52 +++ .../server/integration_builder/index.ts | 8 + .../server/integration_builder/manifest.ts | 45 +++ .../server/providers/bedrock.ts | 1 + .../server/routes/build_integration_routes.ts | 6 +- .../server/routes/categorization_routes.ts | 16 +- .../server/routes/ecs_routes.ts | 15 +- .../server/routes/related_routes.ts | 16 +- .../integration_assistant/server/util/es.ts | 6 +- .../server/util/samples.ts | 112 +++++++ .../integration_assistant/server/util/util.ts | 12 + 26 files changed, 1101 insertions(+), 360 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/server/constants.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/agent.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/fields.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/index.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts diff --git a/x-pack/plugins/integration_assistant/common/ecs.ts b/x-pack/plugins/integration_assistant/common/ecs.ts index 3c3c249258db7..bdc2b885febe1 100644 --- a/x-pack/plugins/integration_assistant/common/ecs.ts +++ b/x-pack/plugins/integration_assistant/common/ecs.ts @@ -6,7 +6,7 @@ */ interface EcsFields { - [key: string]: any; + [key: string]: string; } export const ECS_FULL: EcsFields = { diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts index 5ea92a5497188..b4f162da98fc8 100644 --- a/x-pack/plugins/integration_assistant/common/index.ts +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -6,15 +6,21 @@ */ export type { - Pipeline, BuildIntegrationApiRequest, EcsMappingApiRequest, CategorizationApiRequest, RelatedApiRequest, + CategorizationApiResponse, + RelatedApiResponse, + EcsMappingApiResponse, + Pipeline, + ESProcessorItem, + ESProcessorOptions, + DataStream, + Integration, + InputTypes, } from './types'; -export type { CategorizationApiResponse, RelatedApiResponse, EcsMappingApiResponse } from './types'; - export { PLUGIN_ID, INTEGRATION_ASSISTANT_APP_ROUTE, diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index d35bcecc23fd3..d13e85ad15214 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -5,58 +5,90 @@ * 2.0. */ -export interface ProcessorObject { - on_failure?: ProcessorKey[]; +export interface ESProcessorOptions { + on_failure?: ESProcessorItem[]; ignore_failure?: boolean; if?: string; tag?: string; [key: string]: any; } -export interface ProcessorKey { - [processorName: string]: ProcessorObject; +export interface ESProcessorItem { + [processorName: string]: ESProcessorOptions; } export interface Pipeline { name?: string; description?: string; version?: number; - processors: ProcessorKey[]; - on_failure?: ProcessorKey[]; + processors: ESProcessorItem[]; + on_failure?: ESProcessorItem[]; +} + +export enum InputTypes { + Cloudwatch = 'aws-cloudwatch', + S3 = 'aws-s3', + AzureBlobStorage = 'azure-blob-storage', + EventHub = 'azure-eventhub', + Cloudfoundry = 'cloudfoundry', + FileStream = 'filestream', + PubSub = 'gcp-pubsub', + GoogleCloudStorage = 'gcs', + HTTPListener = 'http_endpoint', + Journald = 'journald', + Kafka = 'kafka', + TCP = 'tcp', + UDP = 'udp', +} + +export interface DataStream { + name: string; + title: string; + description: string; + inputTypes: InputTypes[]; + rawSamples: string[]; + pipeline: Pipeline; + docs: object[]; +} + +export interface Integration { + name: string; + title: string; + description: string; + version: string; + dataStreams: DataStream[]; + streamVersion?: string; + dockerComposeVersion?: string; + initialVersion: string; + formatVersion: string; + owner: string; + minKibanaVersion: string; } // Server Request Schemas export interface BuildIntegrationApiRequest { - packageName: string; - packageTitle: string; - packageVersion: string; - dataStreamName: string; - dataStreamTitle: string; - inputTypes: string[]; - formSamples: string[]; - ingestPipeline: object; - docs: object[]; + integration: Integration; } export interface EcsMappingApiRequest { packageName: string; dataStreamName: string; - formSamples: string[]; + rawSamples: string[]; mapping?: object; } export interface CategorizationApiRequest { packageName: string; dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; + rawSamples: string[]; + currentPipeline: object; } export interface RelatedApiRequest { packageName: string; dataStreamName: string; - formSamples: string[]; - ingestPipeline: object; + rawSamples: string[]; + currentPipeline: object; } // Server Response Schemas diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx index 7a371ddce09fc..28894f5d42e70 100644 --- a/x-pack/plugins/integration_assistant/public/app.tsx +++ b/x-pack/plugins/integration_assistant/public/app.tsx @@ -9,8 +9,22 @@ import React, { useState } from 'react'; import ReactDOM from 'react-dom'; import { AppMountParameters } from '@kbn/core/public'; import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { EuiPageTemplate, EuiText, EuiButton } from '@elastic/eui'; -import { EcsMappingApiRequest, EcsMappingApiResponse } from '../common'; +import { + EuiPageTemplate, + EuiText, + EuiButton, + EuiFlexGroup, + EuiFlexItem, + EuiCodeBlock, +} from '@elastic/eui'; +import { + EcsMappingApiRequest, + EcsMappingApiResponse, + CategorizationApiRequest, + CategorizationApiResponse, + RelatedApiRequest, + RelatedApiResponse, +} from '../common'; import { Services } from './services'; @@ -22,34 +36,92 @@ function RoutingExplorer({ runRelatedGraph, runIntegrationBuilder, }: Props) { + const [lastResponse, setLastResponse] = useState( + {} as EcsMappingApiResponse | CategorizationApiResponse | RelatedApiResponse + ); + const [currentPipeline, setCurrentPipeline] = useState({} as object); + const [resultDocs, setResultDocs] = useState([] as object[]); + const packageName = 'teleport'; + const dataStreamName = 'audit'; + const rawSamples = [ + '{"ei":0,"event":"user.login","uid":"b675d102-fc25-4f7a-bf5d-96468cc176ea","code":"T1000I","time":"2024-02-23T18:56:50.628Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","required_private_key_policy":"none","success":true,"method":"local","mfa_device":{"mfa_device_name":"otp-device","mfa_device_uuid":"d07bf388-af49-4ec2-b8a4-c8a9e785b70b","mfa_device_type":"TOTP"},"user_agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36","addr.remote":"136.61.214.196:50332"}', + '{"ei":0,"event":"cert.create","uid":"efd326fc-dd13-4df8-acef-3102c2d717d3","code":"TC000I","time":"2024-02-23T18:56:50.653Z","cluster_name":"teleport.ericbeahan.com","cert_type":"user","identity":{"user":"teleport-admin","roles":["access","editor"],"logins":["root","ubuntu","ec2-user","-teleport-internal-join"],"expires":"2024-02-24T06:56:50.648137154Z","route_to_cluster":"teleport.ericbeahan.com","traits":{"aws_role_arns":null,"azure_identities":null,"db_names":null,"db_roles":null,"db_users":null,"gcp_service_accounts":null,"host_user_gid":[""],"host_user_uid":[""],"kubernetes_groups":null,"kubernetes_users":null,"logins":["root","ubuntu","ec2-user"],"windows_logins":null},"teleport_cluster":"teleport.ericbeahan.com","client_ip":"136.61.214.196","prev_identity_expires":"0001-01-01T00:00:00Z","private_key_policy":"none"}}', + '{"ei":0,"event":"session.start","uid":"fff30583-13be-49e8-b159-32952c6ea34f","code":"T2000I","time":"2024-02-23T18:56:57.199Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","login":"ec2-user","user_kind":1,"sid":"293fda2d-2266-4d4d-b9d1-bd5ea9dd9fc3","private_key_policy":"none","namespace":"default","server_id":"face0091-2bf1-43fd-a16a-f1514b4119f4","server_hostname":"ip-172-31-8-163.us-east-2.compute.internal","server_labels":{"hostname":"ip-172-31-8-163.us-east-2.compute.internal","teleport.internal/resource-id":"dccb2999-9fb8-4169-aded-ec7a1c0a26de"},"addr.remote":"136.61.214.196:50339","proto":"ssh","size":"80:25","initial_command":[""],"session_recording":"node"}', + ]; + // TODO: Just a quick way to test the return type const isFetchError = (response: any): response is IHttpFetchError => { return 'message' in response; }; - const [ecsResponseState, setEcsResponseState] = useState({} as EcsMappingApiResponse); - const [errorResponse, setErrorResponse] = useState({} as IHttpFetchError); - async function onEcsButtonClick(req: EcsMappingApiRequest) { + + // TODO: All these basic functions and UI is only here for testing purposes + async function onEcsButtonClick() { + const request = { + packageName, + dataStreamName, + rawSamples, + } as EcsMappingApiRequest; + try { + const ecsResponse = await runEcsGraph(request); + if (!isFetchError(ecsResponse)) { + if (Object.keys(ecsResponse?.results).length > 0) { + setCurrentPipeline(ecsResponse.results.pipeline); + setLastResponse(ecsResponse); + console.log('finished ecs graph successfully'); + } else { + console.log('finished ecs graph without errors, but no results'); + } + } + } catch (e) { + console.log(e); + } + } + async function onCategorizationButtonClick() { + const request = { + packageName, + dataStreamName, + rawSamples, + currentPipeline, + } as CategorizationApiRequest; try { - const ecsResponse = await runEcsGraph(req); - if (isFetchError(ecsResponse)) { - setErrorResponse(ecsResponse); - console.log('finished with error'); - } else if (Object.keys(ecsResponse?.results).length > 0) { - setEcsResponseState(ecsResponse as EcsMappingApiResponse); - console.log('finished ecs graph'); + const categorizationResponse = await runCategorizationGraph(request); + if (!isFetchError(categorizationResponse)) { + if (Object.keys(categorizationResponse?.results).length > 0) { + setCurrentPipeline(categorizationResponse.results.pipeline); + setResultDocs(categorizationResponse.results.docs); + setLastResponse(categorizationResponse); + console.log('finished categorization graph successfully'); + } else { + console.log('finished categorization graph without errors, but no results'); + } } } catch (e) { - setErrorResponse(e); + console.log(e); } } - const req = { - packageName: 'teleport', - dataStreamName: 'audit', - formSamples: [ - '{"ei":0,"event":"user.login","uid":"b675d102-fc25-4f7a-bf5d-96468cc176ea","code":"T1000I","time":"2024-02-23T18:56:50.628Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","required_private_key_policy":"none","success":true,"method":"local","mfa_device":{"mfa_device_name":"otp-device","mfa_device_uuid":"d07bf388-af49-4ec2-b8a4-c8a9e785b70b","mfa_device_type":"TOTP"},"user_agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36","addr.remote":"136.61.214.196:50332"}', - '{"ei":0,"event":"cert.create","uid":"efd326fc-dd13-4df8-acef-3102c2d717d3","code":"TC000I","time":"2024-02-23T18:56:50.653Z","cluster_name":"teleport.ericbeahan.com","cert_type":"user","identity":{"user":"teleport-admin","roles":["access","editor"],"logins":["root","ubuntu","ec2-user","-teleport-internal-join"],"expires":"2024-02-24T06:56:50.648137154Z","route_to_cluster":"teleport.ericbeahan.com","traits":{"aws_role_arns":null,"azure_identities":null,"db_names":null,"db_roles":null,"db_users":null,"gcp_service_accounts":null,"host_user_gid":[""],"host_user_uid":[""],"kubernetes_groups":null,"kubernetes_users":null,"logins":["root","ubuntu","ec2-user"],"windows_logins":null},"teleport_cluster":"teleport.ericbeahan.com","client_ip":"136.61.214.196","prev_identity_expires":"0001-01-01T00:00:00Z","private_key_policy":"none"}}', - '{"ei":0,"event":"session.start","uid":"fff30583-13be-49e8-b159-32952c6ea34f","code":"T2000I","time":"2024-02-23T18:56:57.199Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","login":"ec2-user","user_kind":1,"sid":"293fda2d-2266-4d4d-b9d1-bd5ea9dd9fc3","private_key_policy":"none","namespace":"default","server_id":"face0091-2bf1-43fd-a16a-f1514b4119f4","server_hostname":"ip-172-31-8-163.us-east-2.compute.internal","server_labels":{"hostname":"ip-172-31-8-163.us-east-2.compute.internal","teleport.internal/resource-id":"dccb2999-9fb8-4169-aded-ec7a1c0a26de"},"addr.remote":"136.61.214.196:50339","proto":"ssh","size":"80:25","initial_command":[""],"session_recording":"node"}', - ], - } as EcsMappingApiRequest; + async function onRelatedButtonClick() { + const request = { + packageName, + dataStreamName, + rawSamples, + currentPipeline, + } as RelatedApiRequest; + try { + const relatedResponse = await runRelatedGraph(request); + if (!isFetchError(relatedResponse)) { + if (Object.keys(relatedResponse?.results).length > 0) { + setCurrentPipeline(relatedResponse.results.pipeline); + setResultDocs(relatedResponse.results.docs); + setLastResponse(relatedResponse); + console.log('finished related graph successfully'); + } else { + console.log('finished related graph without errors, but no results'); + } + } + } catch (e) { + console.log(e); + } + } + return ( @@ -58,7 +130,27 @@ function RoutingExplorer({ - onEcsButtonClick(req)}>Run ECS Graph + + + onEcsButtonClick()}>Run ECS Graph + + + onCategorizationButtonClick()}> + Run Categorization Graph + + + + onRelatedButtonClick()}>Run Related Graph + + + + + +

Last Response/Error

+
+ {JSON.stringify(lastResponse, null, 2)} +
+
); diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts index 4347c66c51536..0b49b1888721a 100644 --- a/x-pack/plugins/integration_assistant/public/services.ts +++ b/x-pack/plugins/integration_assistant/public/services.ts @@ -39,31 +39,41 @@ export function getServices(core: CoreStart): Services { const response = await core.http.post(ECS_GRAPH_PATH, { body: JSON.stringify({ ...req }), }); - console.log(response); return response; } catch (e) { return e; } }, - runCategorizationGraph: async (req: CategorizationApiRequest) => { + runCategorizationGraph: async ( + req: CategorizationApiRequest + ): Promise => { try { - const response = await core.http.post(CATEGORIZATION_GRAPH_PATH, {}); + const response = await core.http.post( + CATEGORIZATION_GRAPH_PATH, + { + body: JSON.stringify({ ...req }), + } + ); return response; } catch (e) { return e; } }, - runRelatedGraph: async (req: RelatedApiRequest) => { + runRelatedGraph: async (req: RelatedApiRequest): Promise => { try { - const response = await core.http.post(RELATED_GRAPH_PATH, {}); + const response = await core.http.post(RELATED_GRAPH_PATH, { + body: JSON.stringify({ ...req }), + }); return response; } catch (e) { return e; } }, - runIntegrationBuilder: async (req: BuildIntegrationApiRequest) => { + runIntegrationBuilder: async (req: BuildIntegrationApiRequest): Promise => { try { - const response = await core.http.post(INTEGRATION_BUILDER_PATH, {}); + const response = await core.http.post(INTEGRATION_BUILDER_PATH, { + body: JSON.stringify({ ...req }), + }); return response; } catch (e) { return e; diff --git a/x-pack/plugins/integration_assistant/public/types.ts b/x-pack/plugins/integration_assistant/public/types.ts index 7ddae65359abc..a7b8a413213c3 100644 --- a/x-pack/plugins/integration_assistant/public/types.ts +++ b/x-pack/plugins/integration_assistant/public/types.ts @@ -15,12 +15,3 @@ export interface IntegrationAssistantPluginStart {} export interface AppPluginStartDependencies { navigation: NavigationPublicPluginStart; } - -export interface EcsMappingTableItem { - sourceField: string; - destinationField: string; - isEcs: boolean; - description: string; - id: string; - exampleValue: any; -} diff --git a/x-pack/plugins/integration_assistant/server/constants.ts b/x-pack/plugins/integration_assistant/server/constants.ts new file mode 100644 index 0000000000000..c40d0e02a2ba5 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/constants.ts @@ -0,0 +1,3 @@ +export const ROUTE_HANDLER_TIMEOUT = 10 * 60 * 1000; // 10 * 60 seconds = 10 minutes +export const LANG_CHAIN_TIMEOUT = ROUTE_HANDLER_TIMEOUT - 10_000; // 9 minutes 50 seconds +export const CONNECTOR_TIMEOUT = LANG_CHAIN_TIMEOUT - 10_000; // 9 minutes 40 seconds diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts index 4b6649a663c11..a03f373499aea 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/prompts.ts @@ -10,57 +10,57 @@ export const CATEGORIZATION_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ [ 'system', `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on providing append processors that can be used to enrich samples with all relevant event.type and event.category values. - Here are some context for you to reference for your task, read it carefully as you will get questions about it later: - - - Event Category (event.category): - Purpose: It is the second level in the ECS category hierarchy, representing the primary category or "big bucket" for event classification. - Type: It's a keyword type and can have multiple values (list). - Relationship: Works alongside event.type, which acts as a subcategory. - Allowed categories and their descriptions: - {ecs_categories} - - Event Type (event.type): - Purpose: It is the third level in the ECS category hierarchy, represents a categorization "sub-bucket". - Type: It's a keyword type and can have multiple values (list). - Relationship: Works alongside event.category, which acts as a subcategory. - Allowed types and their descriptions: - {ecs_types} - - `, +Here are some context for you to reference for your task, read it carefully as you will get questions about it later: + + +Event Category (event.category): +Purpose: It is the second level in the ECS category hierarchy, representing the primary category or "big bucket" for event classification. +Type: It's a keyword type and can have multiple values (list). +Relationship: Works alongside event.type, which acts as a subcategory. +Allowed categories and their descriptions: +{ecs_categories} + +Event Type (event.type): +Purpose: It is the third level in the ECS category hierarchy, represents a categorization "sub-bucket". +Type: It's a keyword type and can have multiple values (list). +Relationship: Works alongside event.category, which acts as a subcategory. +Allowed types and their descriptions: +{ecs_types} + +`, ], [ 'human', `Please help me by providing all relevant append processors for any detected event.category and event.type combinations that would fit the below pipeline results as an array of JSON objects. - - - {pipeline_results} - - - Go through each of the pipeline results above step by step and do the following to add all relevant event.type and event.category combinations. - 1. Try to understand what is unique about each pipeline result, and what sort of event.categorization and event.type combinations that fit best, and if there is any unique values for each result. - 2. For for each combination of event.category and event.type that you find, add a new append processor to your array of JSON objects. - 3. If only certain results are relevant to the event.category and event.type combination, add an if condition similar to the above example processors, that describes what value or field needs to be available for this categorization to take place. The if condition should be inside the processor object. - 4. Always check if the combination of event.category and event.type is common in the ecs context above. - 5. Always make sure the value for event.category and event.type is strictly from the allowed categories and allowed types in the ecs context above. - 6. The value argument for the append processor is an array of one or more types and categories. - - You ALWAYS follow these guidelines when writing your response: - - - You can add as many append processors you need to cover all the unique combinations that you detected. - - If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. - - When an if condition is not needed the argument should not be used for the processor object. - - When using a range based if condition like > 0, you first need to check that the field is not null, for example: ctx.somefield?.production != null && ctx.somefield?.production > 0 - - Do not respond with anything except the array of processors as a valid JSON objects enclosed with 3 backticks (\`), see example response below. - - - Example response format: - - A: Please find the Categorization processors below: - \`\`\`json - {ex_answer} - \`\`\` - `, + + +{pipeline_results} + + +Go through each of the pipeline results above step by step and do the following to add all relevant event.type and event.category combinations. +1. Try to understand what is unique about each pipeline result, and what sort of event.categorization and event.type combinations that fit best, and if there is any unique values for each result. +2. For for each combination of event.category and event.type that you find, add a new append processor to your array of JSON objects. +3. If only certain results are relevant to the event.category and event.type combination, add an if condition similar to the above example processors, that describes what value or field needs to be available for this categorization to take place. The if condition should be inside the processor object. +4. Always check if the combination of event.category and event.type is common in the ecs context above. +5. Always make sure the value for event.category and event.type is strictly from the allowed categories and allowed types in the ecs context above. +6. The value argument for the append processor is an array of one or more types and categories. + +You ALWAYS follow these guidelines when writing your response: + +- You can add as many append processors you need to cover all the unique combinations that you detected. +- If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. +- When an if condition is not needed the argument should not be used for the processor object. +- When using a range based if condition like > 0, you first need to check that the field is not null, for example: ctx.somefield?.production != null && ctx.somefield?.production > 0 +- Do not respond with anything except the array of processors as a valid JSON objects enclosed with 3 backticks (\`), see example response below. + + +Example response format: + +A: Please find the Categorization processors below: +\`\`\`json +{ex_answer} +\`\`\` +`, ], ['ai', 'Please find the Categorization processors below:'], ]); @@ -69,49 +69,49 @@ export const CATEGORIZATION_REVIEW_PROMPT = ChatPromptTemplate.fromMessages([ [ 'system', `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on adding improvements to the provided array of processors and reviewing the current results. - - Here is some context that you can reference for your task, read it carefully as you will get questions about it later: - - - {current_processors} - - - {compatibility_matrix} - - `, + +Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + +{current_processors} + + +{compatibility_matrix} + +`, ], [ 'human', `Testing my current pipeline returned me with the results: - - {pipeline_results} - - - Please review the pipeline results and the array of current processors, ensuring to identify all the possible event.type and event.category combinatinations that would match each pipeline result document. If any event.type or event.category is missing from any of the pipeline results, add them by updating the array of current processors and return the whole updated array of processors. - - For each pipeline result you review step by step, remember the below steps: - 1. Check if each of the pipeline results have at least one event.category and event.type added to them. If not then try to correlate the results with the current processors and see if either a new append processor should be added to the list with a matching if condition, or if any of the if conditions should be modified as they are not matching that is in the results. - 2. If the results have at least one event.category and event.type value, see if more of them could match, if so it could be added to the relevant append processor which added the initial values. - 3. When adding more values to event.type and event.category please keep in mind the compatibility_matrix in the context to make sure only compatible event.type , event.category pairs that are compatible are created. - 4. Ensure that all append processors has allow_duplicates: false, as seen in the example response. - - You ALWAYS follow these guidelines when writing your response: - - - You can use as many append processors as you need to add all relevant ECS categories and types combinations. - - If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. - - When an if condition is not needed the argument should not be used for the processor object. - - If not updates are needed you respond with the initially provided current processors. - - Each append processor needs to have the allow_duplicates: false argument, as shown in the below example response. - - Do not respond with anything except updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. - - - Example response format: - - A: Please find the updated ECS categorization append processors below: - \`\`\` - {ex_answer} - \`\`\` - `, + +{pipeline_results} + + +Please review the pipeline results and the array of current processors, ensuring to identify all the possible event.type and event.category combinatinations that would match each pipeline result document. If any event.type or event.category is missing from any of the pipeline results, add them by updating the array of current processors and return the whole updated array of processors. + +For each pipeline result you review step by step, remember the below steps: +1. Check if each of the pipeline results have at least one event.category and event.type added to them. If not then try to correlate the results with the current processors and see if either a new append processor should be added to the list with a matching if condition, or if any of the if conditions should be modified as they are not matching that is in the results. +2. If the results have at least one event.category and event.type value, see if more of them could match, if so it could be added to the relevant append processor which added the initial values. +3. When adding more values to event.type and event.category please keep in mind the compatibility_matrix in the context to make sure only compatible event.type , event.category pairs that are compatible are created. +4. Ensure that all append processors has allow_duplicates: false, as seen in the example response. + +You ALWAYS follow these guidelines when writing your response: + +- You can use as many append processors as you need to add all relevant ECS categories and types combinations. +- If conditions should always use a ? character when accessing nested fields, in case the field might not always be available, see example processors above. +- When an if condition is not needed the argument should not be used for the processor object. +- If not updates are needed you respond with the initially provided current processors. +- Each append processor needs to have the allow_duplicates: false argument, as shown in the below example response. +- Do not respond with anything except updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + +Example response format: + +A: Please find the updated ECS categorization append processors below: +\`\`\` +{ex_answer} +\`\`\` +`, ], ['ai', 'Please find the updated ECS categorization append processors below:'], ]); @@ -120,39 +120,42 @@ export const CATEGORIZATION_VALIDATION_PROMPT = ChatPromptTemplate.fromMessages( [ 'system', `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on resolving errors and issues with append processors used for categorization. - Here is some context that you can reference for your task, read it carefully as you will get questions about it later: - - - {current_processors} - - - {compatible_types} - - - {invalid_categorization} - - "], - ["human", "Please go through each error above, carefully review the provided current processors, and resolve the most likely cause to the supplied error by returning an updated version of the current_processors. - - Follow these steps to help resolve the current ingest pipeline issues: - 1. Try to fix all related errors before responding. - 2. Apply all fixes to the provided array of current append processors. - 3. If you do not know how to fix an error, then continue to the next and return the complete updated array of current append processors. - - You ALWAYS follow these guidelines when writing your response: - - - If the error complains about having event.type or event.category not in the allowed values , fix the corresponding append processors to use the allowed values mentioned in the error. - - If the error is about event.type not compatible with any event.category, please refer to the 'compatible_types' in the context to fix the corresponding append processors to use valid combination of event.type and event.category - - Do not respond with anything except the complete updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. - - - Example response format: - - A: Please find the updated ECS categorization append processors below: - \`\`\`json - {ex_answer} - \`\`\` - `, +Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + +{current_processors} + + +{compatible_types} + + +{invalid_categorization} + +`, + ], + [ + 'human', + `Please go through each error above, carefully review the provided current processors, and resolve the most likely cause to the supplied error by returning an updated version of the current_processors. + +Follow these steps to help resolve the current ingest pipeline issues: +1. Try to fix all related errors before responding. +2. Apply all fixes to the provided array of current append processors. +3. If you do not know how to fix an error, then continue to the next and return the complete updated array of current append processors. + +You ALWAYS follow these guidelines when writing your response: + +- If the error complains about having event.type or event.category not in the allowed values , fix the corresponding append processors to use the allowed values mentioned in the error. +- If the error is about event.type not compatible with any event.category, please refer to the 'compatible_types' in the context to fix the corresponding append processors to use valid combination of event.type and event.category +- Do not respond with anything except the complete updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + +Example response format: + +A: Please find the updated ECS categorization append processors below: +\`\`\`json +{ex_answer} +\`\`\` +`, ], ['ai', 'Please find the updated ECS categorization append processors below:'], ]); @@ -161,41 +164,41 @@ export const CATEGORIZATION_ERROR_PROMPT = ChatPromptTemplate.fromMessages([ [ 'system', `You are a helpful, expert assistant on Elasticsearch Ingest Pipelines, focusing on resolving errors and issues with append processors used for categorization. - Here is some context that you can reference for your task, read it carefully as you will get questions about it later: - - - {current_processors} - - - {errors} - - `, +Here is some context that you can reference for your task, read it carefully as you will get questions about it later: + + +{current_processors} + + +{errors} + +`, ], [ 'human', `Please go through each error above, carefully review the provided current processors, and resolve the most likely cause to the supplied error by returning an updated version of the current_processors. - - Follow these steps to help resolve the current ingest pipeline issues: - 1. Try to fix all related errors before responding. - 2. Apply all fixes to the provided array of current append processors. - 3. If you do not know how to fix an error, then continue to the next and return the complete updated array of current append processors. - - You ALWAYS follow these guidelines when writing your response: - - - When checking for the existance of multiple values in a single variable, use this format: "if": "['value1', 'value2'].contains(ctx.{package_name}?.{data_stream_name}?.field)" - - If conditions should never be in a format like "if": "true". If it exist in the current array of append processors, remove only the redundant if condition. - - If the error complains that it is a null point exception, always ensure the if conditions uses a ? when accessing nested fields. For example ctx.field1?.nestedfield1?.nestedfield2. - - If the error complains about having values not in the list of allowed values , fix the corresponding append processors to use the allowed values as mentioned in the error. - - Do not respond with anything except the complete updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. - - - Example response format: - - A: Please find the updated ECS categorization append processors below: - \`\`\`json - {ex_answer} - \`\`\` - `, + +Follow these steps to help resolve the current ingest pipeline issues: +1. Try to fix all related errors before responding. +2. Apply all fixes to the provided array of current append processors. +3. If you do not know how to fix an error, then continue to the next and return the complete updated array of current append processors. + +You ALWAYS follow these guidelines when writing your response: + +- When checking for the existance of multiple values in a single variable, use this format: "if": "['value1', 'value2'].contains(ctx.{package_name}?.{data_stream_name}?.field)" +- If conditions should never be in a format like "if": "true". If it exist in the current array of append processors, remove only the redundant if condition. +- If the error complains that it is a null point exception, always ensure the if conditions uses a ? when accessing nested fields. For example ctx.field1?.nestedfield1?.nestedfield2. +- If the error complains about having values not in the list of allowed values , fix the corresponding append processors to use the allowed values as mentioned in the error. +- Do not respond with anything except the complete updated array of processors as a valid JSON object enclosed with 3 backticks (\`), see example response below. + + +Example response format: + +A: Please find the updated ECS categorization append processors below: +\`\`\`json +{ex_answer} +\`\`\` +`, ], ['ai', 'Please find the updated ECS categorization append processors below:'], ]); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index a431e8d6892e2..70ede615e93d5 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -4,6 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ + import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { ECS_EXAMPLE_ANSWER, ECS_FIELDS } from './constants'; import { modifySamples, mergeSamples } from '../../util/samples'; @@ -113,7 +114,6 @@ function modelOutput(state: EcsMappingState): Partial { function inputRouter(state: EcsMappingState): string { if (Object.keys(state.currentMapping).length === 0) { - console.log('No current mapping found'); return 'ecsMapping'; } return 'modelOutput'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts index 1c41b17a73273..1cd0524f486f0 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -44,7 +44,7 @@ function generateProcessor( field: currentPath, target_field: ecsField.target, formats: ecsField.date_formats, - if: currentPath.replace('.', '?.'), + if: currentPath.replace(/\./g, '?.'), }, }; } @@ -133,7 +133,7 @@ function generateProcessors(ecsMapping: object, samples: object, basePath: strin for (const [key, value] of Object.entries(ecsMapping)) { const currentPath = basePath ? `${basePath}.${key}` : key; - if (typeof value === 'object' && value !== null) { + if (value !== null && typeof value === 'object' && value?.target !== null) { const valueKeys = new Set(Object.keys(value)); if ([...valueFieldKeys].every((k) => valueKeys.has(k))) { const processor = generateProcessor( @@ -157,7 +157,6 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { const processors = generateProcessors(state.currentMapping, samples); // Retrieve all source field names from convert processors to populate single remove processor: const fieldsToRemove = processors.filter((p: any) => p.convert).map((p: any) => p.convert.field); - const templatesPath = join(__dirname, '../../templates/pipeline'); const mappedValues = { processors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts index b4464670e3662..56b985d2e0b7c 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/prompts.ts @@ -9,49 +9,52 @@ export const ECS_MAIN_PROMPT = ChatPromptTemplate.fromMessages([ [ 'system', `You are a helpful, expert assistant in Elastic Common Schema (ECS), focusing only on helping users with translating their provided combined samples to Elastic Common Schema (ECS). - - Here is some context for you to reference for your task, read it carefully as you will get questions about it later: - - - {ecs} - - - {formatted_samples} - - `, + +Here is some context for you to reference for your task, read it carefully as you will get questions about it later: + + +{ecs} + + +{formatted_samples} + +`, ], [ 'human', `Looking at the combined sample from {package_name} {data_stream_name} provided above. The combined sample is a JSON object that includes all unique fields from the log samples sent by {package_name} {data_stream_name}. - - Go through each value step by step and modify it with the following process: - 1. Check if the name of each key and its current value matches the description and usecase of any of the above ECS fields. - 2. If one or more relevant ECS field is found, pick the one you are most confident about. - 3. Replace the value with a new object, and set the nested key "target" to be the full path of the ECS field name. If no confident match is found, the value should always be replaced with null. Also set the nested key "type" to be either "string", "boolean", "number" or "date" depending on what was detected as the example value. - 4. If the type "date" is used, then set date_format to be an array of one or more of the equivilant JAVA date formats that fits the example value. If the type is not date then date_format should be set to an empty array []. - 5. For each key that you set a target ECS field, also score the confidence you have in that the target field is correct, use a float between 0.0 and 1.0 and set the value in the nested "confidence" key. - 6. When you want to use an ECS field as a value for a target, but another field already has the same ECS field as its target, try to find another fitting ECS field. If none is found then the least confident key/value should be null instead. - 7. If you are not confident for a specific field, you should always set the value to null. - 8. These {package_name} log samples are based on source and destination type data, prioritize these compared to other related ECS fields like host.* and observer.*. - - You ALWAYS follow these guidelines when writing your response: - - - Never use \`event.category\` or \`event.type\` as target ECS fields. - - Never use the same ECS target multiple times. If no other field is found that you are confident in, it should always be null. - - All keys should be under the {package_name} {data_stream_name} parent fields, same as the original combined sample above. - - All target key values should be ECS field names only from the above ECS fields provided as context. - - All original keys from the combined sample object needs to be in your response. - - Only when a target value is set should type, date_format and confidence be filled out. If no target value then the value should simply be null. - - Do not respond with anything except the ecs maping JSON object enclosed with 3 backticks (\`), see example response below. - - - Example response format: - - A: Please find the JSON object below: - \`\`\`json - {ex_answer} - \`\`\` - "`, + +Go through each value step by step and modify it with the following process: +1. Check if the name of each key and its current value matches the description and usecase of any of the above ECS fields. +2. If one or more relevant ECS field is found, pick the one you are most confident about. +3. If no relevant ECS field is found, the value should just be replaced with "null" rather than a new object. +4. Only if a relevant ECS field is found replace the value with a new object that has the keys "target", "confidence", "date_format" and "type". +5. The object key "target" should be set to be the full path of the ECS field name you think it matches. Set the object key "type" to be either "string", "boolean", "number" or "date" depending on what was detected as the example value. +6. If the type "date" is used, then set date_format to be an array of one or more of the equivilant JAVA date formats that fits the example value. If the type is not date then date_format should be set to an empty array []. +7. For each key that you set a target ECS field, also score the confidence you have in that the target field is correct, use a float between 0.0 and 1.0 and set the value in the nested "confidence" key. +8. When you want to use an ECS field as a value for a target, but another field already has the same ECS field as its target, try to find another fitting ECS field. If none is found then the one you are least confident about should have the object replaced with null. +9. If you are not confident for a specific field, you should always set the value to null. +10. These {package_name} log samples are based on source and destination type data, prioritize these compared to other related ECS fields like host.* and observer.*. + +You ALWAYS follow these guidelines when writing your response: + +- Never use \`event.category\` or \`event.type\` as target ECS fields. +- The target key should never have a null value, if no matching target ECS field is found, the whole key value should be set to null. +- Never use the same ECS target multiple times. If no other field is found that you are confident in, it should always be null. +- All keys should be under the {package_name} {data_stream_name} parent fields, same as the original combined sample above. +- All target key values should be ECS field names only from the above ECS fields provided as context. +- All original keys from the combined sample object needs to be in your response. +- Only when a target value is set should type, date_format and confidence be filled out. If no target value then the value should simply be null. +- Do not respond with anything except the ecs maping JSON object enclosed with 3 backticks (\`), see example response below. + + +Example response format: + +A: Please find the JSON object below: +\`\`\`json +{ex_answer} +\`\`\` +"`, ], ['ai', 'Please find the JSON object below:'], ]); @@ -60,39 +63,39 @@ export const ECS_INVALID_PROMPT = ChatPromptTemplate.fromMessages([ [ 'system', `You are a helpful, expert assistant in Elastic Common Schema (ECS), you help review and try to resolve incorrect field mappings. - - Here is some context for you to reference your task, read it carefully as you will get questions about it later: - - - {ecs} - - - {formatted_samples} - - - {current_mapping} - - `, + +Here is some context for you to reference your task, read it carefully as you will get questions about it later: + + +{ecs} + + +{formatted_samples} + + +{current_mapping} + +`, ], [ 'human', `The following fields are mapped incorrectly in the current mapping, please help me resolve this: - - {invalid_ecs_fields} - - To resolve the invalid ecs fields, go through each key and value defined in the invalid fields, and modify the current mapping step by step, and ensure they follow these guidelines: - - - Update the provided current mapping object, the value should be the corresponding Elastic Common Schema field name. If no good or valid match is found the value should always be null. - - Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. - - - Example response format: - - A: Please find the JSON object below: - \`\`\`json - {ex_answer} - \`\`\` - `, + +{invalid_ecs_fields} + +To resolve the invalid ecs fields, go through each key and value defined in the invalid fields, and modify the current mapping step by step, and ensure they follow these guidelines: + +- Update the provided current mapping object, the value should be the corresponding Elastic Common Schema field name. If no good or valid match is found the value should always be null. +- Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. + + +Example response format: + +A: Please find the JSON object below: +\`\`\`json +{ex_answer} +\`\`\` +`, ], ['ai', 'Please find the JSON object below:'], ]); @@ -101,40 +104,40 @@ export const ECS_MISSING_KEYS_PROMPT = ChatPromptTemplate.fromMessages([ [ 'system', `You are a helpful, expert assistant in Elastic Common Schema (ECS), you help review and try to resolve missing fields in the current mapping. - - Here is some context for you to reference for your task, read it carefully as you will get questions about it later: - - - {ecs} - - - {formatted_samples} - - - {current_mapping} - - `, + +Here is some context for you to reference for your task, read it carefully as you will get questions about it later: + + +{ecs} + + +{formatted_samples} + + +{current_mapping} + +`, ], [ 'human', `The following keys are missing from the current mapping: - - {missing_keys} - - - Help resolve the issue by adding the missing keys, look up example values from the formatted samples, and go through each missing key step by step, resolve it by following these guidelines: - - - Update the provided current mapping object with all the missing keys, the value should be the corresponding Elastic Common Schema field name. If no good match is found the value should always be null. - - Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. - + +{missing_keys} + - Example response format: - - A: Please find the JSON object below: - \`\`\`json - {ex_answer} - \`\`\` - `, +Help resolve the issue by adding the missing keys, look up example values from the formatted samples, and go through each missing key step by step, resolve it by following these guidelines: + +- Update the provided current mapping object with all the missing keys, the value should be the corresponding Elastic Common Schema field name. If no good match is found the value should always be null. +- Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. + + +Example response format: + +A: Please find the JSON object below: +\`\`\`json +{ex_answer} +\`\`\` +`, ], ['ai', 'Please find the JSON object below:'], ]); @@ -143,37 +146,37 @@ export const ECS_DUPLICATES_PROMPT = ChatPromptTemplate.fromMessages([ [ 'system', `You are a helpful, expert assistant in Elastic Common Schema (ECS), you help review and try to resolve incorrect duplicate fields in the current mapping. - - Here is some context for you to reference for your task, read it carefully as you will get questions about it later: - - - {ecs} - - - {current_mapping} - - `, + +Here is some context for you to reference for your task, read it carefully as you will get questions about it later: + + +{ecs} + + +{current_mapping} + +`, ], [ 'human', `The following duplicate fields are mapped to the same ECS fields in the current mapping, please help me resolve this: - - {duplicate_fields} - - - To resolve the duplicate mappings, go through each key and value defined in the duplicate fields, and modify the current mapping step by step, and ensure they follow these guidelines: - - - Multiple keys should not have the same value (ECS field it will be mapped to). If multiple keys do have the same value then always choose the best match for the ECS field, while the other duplicates should have their value changed to null. - - Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. - + +{duplicate_fields} + - Example response format: - - A: Please find the JSON object below: - \`\`\`json - {ex_answer} - \`\`\` - `, +To resolve the duplicate mappings, go through each key and value defined in the duplicate fields, and modify the current mapping step by step, and ensure they follow these guidelines: + +- Multiple keys should not have the same value (ECS field it will be mapped to). If multiple keys do have the same value then always choose the best match for the ECS field, while the other duplicates should have their value changed to null. +- Do not respond with anything except the updated current mapping JSON object enclosed with 3 backticks (\`). See example response below. + + +Example response format: + +A: Please find the JSON object below: +\`\`\`json +{ex_answer} +\`\`\` +`, ], ['ai', 'Please find the JSON object below:'], ]); diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts new file mode 100644 index 0000000000000..37bdb10a2ca4b --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import * as fs from 'fs'; +import * as path from 'path'; + +export function createAgentInput(specificDataStreamDir: string, inputTypes: string[]): void { + const agentDir = path.join(specificDataStreamDir, 'agent', 'stream'); + const agentTemplatesDir = path.join(__dirname, '../templates/agent'); + fs.mkdirSync(agentDir, { recursive: true }); + + // Load common options that exists for all .yml.hbs files, to be merged with each specific input file + const commonFilePath = path.join(agentTemplatesDir, 'common.yml.hbs'); + const commonFile = fs.readFileSync(commonFilePath, 'utf-8'); + + for (const inputType of inputTypes) { + // TODO: Skip httpjson and cel input types for now, requires new prompts. + if (inputType === 'httpjson' || inputType === 'cel') { + continue; + } + const inputTypeFilePath = path.join(agentTemplatesDir, `${inputType}.yml.hbs`); + const inputTypeFile = fs.readFileSync(inputTypeFilePath, 'utf-8'); + + const combinedContents = `${inputTypeFile}\n${commonFile}`; + + const destinationFilePath = path.join(agentDir, `${inputType}.yml.hbs`); + fs.writeFileSync(destinationFilePath, combinedContents, 'utf-8'); + } +} diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts new file mode 100644 index 0000000000000..c61d2be7e0f7c --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -0,0 +1,127 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { copyFileSync, writeFileSync, mkdirSync } from 'fs'; +import { join as joinPath } from 'path'; +import { tmpdir } from 'os'; +import nunjucks from 'nunjucks'; +import yaml from 'js-yaml'; +import AdmZip from 'adm-zip'; +import { Integration, DataStream } from '../../common'; +import { ensureDir } from '../util/util'; +import { createPackageManifest } from './manifest'; +import { createPackageSystemTests } from './dev_folders'; +import { createDatastream } from './data_stream'; +import { createAgentInput } from './agent'; +import { createFieldMapping } from './fields'; +import { generateUniqueId } from '../util/util'; + +export function buildPackage(integration: Integration): File { + const tmpDir = joinPath(tmpdir(), `integration-assistant-${generateUniqueId()}`); + const packageDir = createDirectories(tmpDir, integration); + const dataStreamsDir = joinPath(packageDir, 'data_stream'); + + for (const dataStream of integration.dataStreams) { + const dataStreamName = dataStream.name; + const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName); + + createDatastream(integration.name, specificDataStreamDir, dataStream); + createAgentInput(specificDataStreamDir, dataStream.inputTypes); + createPipeline(specificDataStreamDir, dataStream.pipeline); + createFieldMapping(integration.name, dataStreamName, specificDataStreamDir, dataStream.docs); + } + + const packageTempPath = joinPath( + tmpDir, + tmpPackageDir, + `${integration.name}-${integration.initialVersion}` + ); + const zipBuffer = createZipArchive(tmpDir, tmpPackageDir); + + return zipBuffer; +} + +function createDirectories(tmpDir: string, integration: Integration) { + mkdirSync(tmpDir, { recursive: true }); + + const packageDir = joinPath(tmpDir, `${integration.name}-${integration.initialVersion}`); + + mkdirSync(packageDir, { recursive: true }); + + createPackage(packageDir, integration); + + return packageDir; +} + +function createZipArchive(tmpDir: string, tmpPackageDir: string) { + const zip = new AdmZip(); + const directoryPath = joinPath(tmpDir, tmpPackageDir); + + zip.addLocalFolder(directoryPath); + + return zip.toBuffer(); +} + +export function createPackage(packageDir: string, integration: Integration) { + createReadme(packageDir, integration); + createChangelog(packageDir, integration); + createBuildFile(packageDir); + createPackageManifest(packageDir, integration); + createPackageSystemTests(packageDir, integration); + createDefaultLogo(packageDir); +} + +function createDefaultLogo(packageDir: string) { + const logoDir = joinPath(packageDir, 'img'); + ensureDir(logoDir); + + const imgTemplateDir = joinPath(__dirname, '../templates/img'); + copyFileSync(joinPath(imgTemplateDir, 'logo.svg'), joinPath(logoDir, 'logo.svg')); +} + +function createBuildFile(packageDir: string) { + const buildTemplateDir = joinPath(__dirname, '../templates/build'); + + nunjucks.configure(buildTemplateDir, { autoescape: true }); + const buildFile = nunjucks.render('build.yml.j2', { ecs_version: '8.11.0' }); + + const buildDir = joinPath(packageDir, '_dev/build'); + ensureDir(buildDir); + writeFileSync(joinPath(buildDir, 'build.yml'), buildFile, 'utf-8'); +} + +function createChangelog(packageDir: string, integration: Integration): void { + const changelogTemplateDir = joinPath(__dirname, '../templates/img'); + nunjucks.configure(changelogTemplateDir, { autoescape: true }); + + const changelogTemplate = nunjucks.render('changelog.yml.j2', { + initial_version: integration.initialVersion, + }); + + writeFileSync(joinPath(packageDir, 'changelog.yml'), changelogTemplate, 'utf-8'); +} + +function createReadme(packageDir: string, integration: Integration) { + const readmeDir = joinPath(packageDir, '_dev/build/docs/'); + mkdirSync(readmeDir, { recursive: true }); + + const readmeTemplatesDir = joinPath(__dirname, '../templates/readme'); + nunjucks.configure(readmeTemplatesDir, { autoescape: true }); + + const readmeTemplate = nunjucks.render('README.md.j2', { + package_name: integration.name, + data_streams: integration.dataStreams, + }); + + writeFileSync(joinPath(readmeDir, 'README.md'), readmeTemplate, { encoding: 'utf-8' }); +} + +export function createPipeline(specificDataStreamDir: string, pipeline: object) { + const filePath = joinPath(specificDataStreamDir, 'elasticsearch/ingest_pipeline/default.yml'); + const yamlContent = '---\n' + yaml.dump(pipeline, { sortKeys: false }); + writeFileSync(filePath, yamlContent, 'utf-8'); +} diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts new file mode 100644 index 0000000000000..626c05074442e --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import fs from 'fs'; +import path from 'path'; +import nunjucks from 'nunjucks'; +import { DataStream } from '../../common'; + +function createDatastream( + packageName: string, + specificDataStreamDir: string, + dataStream: DataStream +): void { + nunjucks.configure({ autoescape: true }); + const dataStreamName = dataStream.name; + const manifestTemplatesDir = path.join(__dirname, '../templates/manifest'); + const pipelineDir = path.join(specificDataStreamDir, 'elasticsearch', 'ingest_pipeline'); + const env = new nunjucks.Environment(new nunjucks.FileSystemLoader(manifestTemplatesDir)); + const title = dataStream.title; + const description = dataStream.description; + + fs.mkdirSync(specificDataStreamDir, { recursive: true }); + createDataStreamFolders(specificDataStreamDir, pipelineDir); + createPipelineTests(specificDataStreamDir, dataStream.rawSamples, packageName, dataStreamName); + + const dsStreams: string[] = []; + for (const inputType of dataStream.inputTypes) { + // Skip httpjson and cel input types for now, requires new prompts. + const inputEntryTemplate = env.getTemplate(`${inputType}_manifest.yml.j2`); + const mappedValues = { + data_stream_title: title, + data_stream_description: description, + package_name: packageName, + data_stream_name: dataStreamName, + }; + const dataStreamManifest = inputEntryTemplate.render(mappedValues); + + const commonTemplate = env.getTemplate('common.yml.j2'); + const commonManifest = commonTemplate.render(mappedValues); + + const combinedManifest = `${dataStreamManifest}\n${commonManifest}`; + dsStreams.push(combinedManifest); + createDataStreamSystemTests( + specificDataStreamDir, + inputType, + mappedValues, + packageName, + dataStreamName + ); + } + + const finalManifestTemplate = env.getTemplate('data_stream.yml.j2'); + const finalManifest = finalManifestTemplate.render({ title, data_streams: dsStreams }); + + fs.writeFileSync(path.join(specificDataStreamDir, 'manifest.yml'), finalManifest, 'utf-8'); +} + +function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): void { + const dataStreamTemplatesDir = path.join(__dirname, '../templates/data_stream'); + for (const item of fs.readdirSync(dataStreamTemplatesDir)) { + const s = path.join(dataStreamTemplatesDir, item); + const d = path.join(specificDataStreamDir, item); + if (fs.lstatSync(s).isDirectory()) { + fs.cpSync(s, d, { recursive: true }); + } else { + fs.copyFileSync(s, d); + } + } + fs.mkdirSync(pipelineDir, { recursive: true }); +} + +function createPipelineTests( + specificDataStreamDir: string, + rawSamples: string[], + packageName: string, + dataStreamName: string +): void { + const pipelineTestTemplatesDir = path.join(__dirname, '../templates/pipeline_tests'); + const pipelineTestsDir = path.join(specificDataStreamDir, '_dev/test/pipeline'); + fs.mkdirSync(pipelineTestsDir, { recursive: true }); + for (const item of fs.readdirSync(pipelineTestTemplatesDir)) { + const s = path.join(pipelineTestTemplatesDir, item); + const d = path.join(pipelineTestsDir, item); + if (fs.lstatSync(s).isDirectory()) { + fs.cpSync(s, d, { recursive: true }); + } else { + fs.copyFileSync(s, d); + } + } + const formattedPackageName = packageName.replace(/_/g, '-'); + const formattedDataStreamName = dataStreamName.replace(/_/g, '-'); + const testFileName = path.join( + pipelineTestsDir, + `test-${formattedPackageName}-${formattedDataStreamName}.log` + ); + fs.writeFileSync(testFileName, rawSamples.join('\n'), 'utf-8'); +} + +function createDataStreamSystemTests( + specificDataStreamDir: string, + inputType: string, + mappedValues: Record, + packageName: string, + dataStreamName: string +): void { + const systemTestTemplatesDir = path.join(__dirname, '../templates/system_tests'); + nunjucks.configure({ autoescape: true }); + const env = new nunjucks.Environment(new nunjucks.FileSystemLoader(systemTestTemplatesDir)); + mappedValues.package_name = packageName.replace(/_/g, '-'); + mappedValues.data_stream_name = dataStreamName.replace(/_/g, '-'); + const systemTestFolder = path.join(specificDataStreamDir, '_dev/test/system'); + + fs.mkdirSync(systemTestFolder, { recursive: true }); + + const systemTestTemplate = env.getTemplate(`test-${inputType}-config.yml.j2`); + const systemTestRendered = systemTestTemplate.render(mappedValues); + + const systemTestFileName = path.join(systemTestFolder, `test-${inputType}-config.yml`); + fs.writeFileSync(systemTestFileName, systemTestRendered, 'utf-8'); +} + +export { + createDatastream, + createDataStreamFolders, + createPipelineTests, + createDataStreamSystemTests, +}; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts new file mode 100644 index 0000000000000..7784ee91bc3b3 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import * as fs from 'fs'; +import * as path from 'path'; +import nunjucks from 'nunjucks'; +import { Integration } from '../../common'; +import { ensureDir } from '../util/util'; + +export function createPackageSystemTests(integrationDir: string, integration: Integration) { + const systemTestsTemplatesDir = path.join(__dirname, '../templates/system_tests'); + const systemTestsDockerDir = path.join(integrationDir, '_dev/deploy/docker/'); + const systemTestsSamplesDir = path.join(systemTestsDockerDir, 'sample_logs'); + ensureDir(systemTestsSamplesDir); + + nunjucks.configure(systemTestsTemplatesDir, { autoescape: true }); + + const systemTestDockerTemplate = fs.readFileSync( + path.join(systemTestsTemplatesDir, 'docker-compose.yml.j2'), + 'utf-8' + ); + const streamVersion = integration.streamVersion || '0.13.0'; + const dockerComposeVersion = integration.dockerComposeVersion || '2.3'; + const dockerServices: string[] = []; + for (const stream of integration.dataStreams) { + const packageName = integration.name.replace(/_/g, '-'); + const dataStreamName = stream.name.replace(/_/g, '-'); + + const systemTestFileName = path.join( + systemTestsSamplesDir, + `test-${packageName}-${dataStreamName}.log` + ); + const rawSamplesContent = stream.rawSamples.join('\n'); + fs.writeFileSync(systemTestFileName, rawSamplesContent, 'utf-8'); + + for (const inputType of stream.inputTypes) { + const systemTestServiceTemplate = fs.readFileSync( + path.join(systemTestsTemplatesDir, `service-${inputType}.j2`), + 'utf-8' + ); + const mappedValues = { + package_name: packageName, + data_stream_name: dataStreamName, + stream_version: streamVersion, + }; + const renderedService = nunjucks.renderString(systemTestServiceTemplate, mappedValues); + dockerServices.push(renderedService); + } + } + + const renderedDockerCompose = nunjucks.renderString(systemTestDockerTemplate, { + services: dockerServices.join('\n'), + docker_compose_version: dockerComposeVersion, + }); + + const dockerComposeFileName = path.join(systemTestsDockerDir, 'docker-compose.yml'); + fs.writeFileSync(dockerComposeFileName, renderedDockerCompose, 'utf-8'); +} diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts new file mode 100644 index 0000000000000..5613ff6ef6f17 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import * as fs from 'fs'; +import * as path from 'path'; +import nunjucks from 'nunjucks'; +import { mergeSamples } from '../util/samples'; +import { generateFields } from '../util/samples'; + +interface Doc { + [key: string]: any; +} + +function createFieldMapping( + packageName: string, + dataStreamName: string, + specificDataStreamDir: string, + docs: Doc[] +): void { + const fieldsTemplatesDir = path.join(__dirname, '../templates/fields'); + + const env = nunjucks.configure(fieldsTemplatesDir, { autoescape: true }); + + createBaseFields(specificDataStreamDir, packageName, dataStreamName, env); + createCustomFields(specificDataStreamDir, docs); +} + +function createBaseFields( + specificDataStreamDir: string, + packageName: string, + dataStreamName: string, + env: nunjucks.Environment +): void { + const baseFieldsTemplate = env.getTemplate('base-fields.yml.njk'); + const datasetName = `${packageName}.${dataStreamName}`; + const baseFieldsResult = baseFieldsTemplate.render({ module: packageName, dataset: datasetName }); + + fs.writeFileSync(`${specificDataStreamDir}/fields/base-fields.yml`, baseFieldsResult, { + encoding: 'utf-8', + }); +} + +function createCustomFields(specificDataStreamDir: string, pipelineResults: Doc[]): void { + const mergedResults = mergeSamples(pipelineResults); + const fieldKeys = generateFields(mergedResults); + fs.writeFileSync(`${specificDataStreamDir}/fields/fields.yml`, fieldKeys, { encoding: 'utf-8' }); +} + +export { createFieldMapping }; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/index.ts b/x-pack/plugins/integration_assistant/server/integration_builder/index.ts new file mode 100644 index 0000000000000..8de03051d75c5 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/index.ts @@ -0,0 +1,8 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export { buildPackage } from './build_integration'; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts b/x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts new file mode 100644 index 0000000000000..dbea3c5c57673 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import * as fs from 'fs'; +import * as path from 'path'; +import nunjucks from 'nunjucks'; +import { Integration, DataStream } from '../../common'; + +export function createPackageManifest(packageDir: string, integration: Integration) { + const manifestTemplatesDir = path.join(__dirname, '../templates/manifest'); + const uniqueInputs: { [key: string]: { type: string; title: string; description: string } } = {}; + + integration.dataStreams.forEach((dataStream: DataStream) => { + dataStream.inputTypes.forEach((inputType: string) => { + if (!uniqueInputs[inputType]) { + uniqueInputs[inputType] = { + type: inputType, + title: dataStream.title, + description: dataStream.description, + }; + } + }); + }); + + const uniqueInputsList = Object.values(uniqueInputs); + + nunjucks.configure(manifestTemplatesDir, { autoescape: true }); + + const template = nunjucks.render('package.yml.j2', { + format_version: integration.formatVersion, + package_title: integration.title, + package_name: integration.name, + package_version: integration.initialVersion, + package_description: integration.description, + package_owner: integration.owner, + min_version: integration.minKibanaVersion, + inputs: uniqueInputsList, + }); + + fs.writeFileSync(path.join(packageDir, 'manifest.yml'), template, { encoding: 'utf-8' }); +} diff --git a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts index 87dc8e077c440..7b5590d4a3041 100644 --- a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts +++ b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts @@ -19,6 +19,7 @@ export function getModel(): BedrockChat { }, modelKwargs: { top_k: 200, + temperature: 0.05, top_p: 0.4, stop_sequences: ['Human:'], }, diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 45779442c500d..34cf10acad850 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -9,6 +9,7 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { INTEGRATION_BUILDER_PATH } from '../../common'; +// TODO: Currently not implemented export function registerIntegrationBuilderRoutes(router: IRouter) { router.post( { @@ -20,14 +21,15 @@ export function registerIntegrationBuilderRoutes(router: IRouter) { dataStreamName: schema.string(), dataStreamTitle: schema.string(), inputTypes: schema.arrayOf(schema.string()), - formSamples: schema.arrayOf(schema.string()), + rawSamples: schema.arrayOf(schema.string()), ingestPipeline: schema.any(), docs: schema.arrayOf(schema.any()), }), }, }, async (_, req, res) => { - return res.ok(); + // TODO: Switch out if/when implemented + return res.custom({ statusCode: 501 }); } ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index d9e21b568d4c9..f72412f964a2f 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -10,22 +10,28 @@ import { schema } from '@kbn/config-schema'; import { CATEGORIZATION_GRAPH_PATH } from '../../common'; import { CategorizationApiRequest, CategorizationApiResponse } from '../../common/types'; import { getCategorizationGraph } from '../graphs/categorization'; +import { ROUTE_HANDLER_TIMEOUT } from '../constants'; export function registerCategorizationRoutes(router: IRouter) { router.post( { path: `${CATEGORIZATION_GRAPH_PATH}`, + options: { + timeout: { + idleSocket: ROUTE_HANDLER_TIMEOUT, + }, + }, validate: { body: schema.object({ packageName: schema.string(), dataStreamName: schema.string(), - formSamples: schema.arrayOf(schema.string()), - ingestPipeline: schema.maybe(schema.any()), + rawSamples: schema.arrayOf(schema.string()), + currentPipeline: schema.any(), }), }, }, async (_, req, res) => { - const { packageName, dataStreamName, formSamples, ingestPipeline } = + const { packageName, dataStreamName, rawSamples, currentPipeline } = req.body as CategorizationApiRequest; const graph = await getCategorizationGraph(); let results = { results: { docs: {}, pipeline: {} } }; @@ -33,8 +39,8 @@ export function registerCategorizationRoutes(router: IRouter) { results = (await graph.invoke({ packageName, dataStreamName, - formSamples, - ingestPipeline, + rawSamples, + currentPipeline, })) as CategorizationApiResponse; } catch (e) { // TODO: Better error responses? diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts index 4de757ff5394b..6d289e5e53397 100644 --- a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -10,24 +10,29 @@ import { schema } from '@kbn/config-schema'; import { ECS_GRAPH_PATH } from '../../common'; import { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common/types'; import { getEcsGraph } from '../graphs/ecs'; +import { ROUTE_HANDLER_TIMEOUT } from '../constants'; export function registerEcsRoutes(router: IRouter) { router.post( { path: `${ECS_GRAPH_PATH}`, + options: { + timeout: { + idleSocket: ROUTE_HANDLER_TIMEOUT, + }, + }, validate: { body: schema.object({ packageName: schema.string(), dataStreamName: schema.string(), - formSamples: schema.arrayOf(schema.string()), + rawSamples: schema.arrayOf(schema.string()), // TODO: This is a single nested object of any key or shape, any better schema? mapping: schema.maybe(schema.any()), }), }, }, async (_, req, res) => { - const { packageName, dataStreamName, formSamples, mapping } = - req.body as EcsMappingApiRequest; + const { packageName, dataStreamName, rawSamples, mapping } = req.body as EcsMappingApiRequest; const graph = await getEcsGraph(); let results = { results: { mapping: {}, pipeline: {} } }; try { @@ -35,14 +40,14 @@ export function registerEcsRoutes(router: IRouter) { results = (await graph.invoke({ packageName, dataStreamName, - formSamples, + rawSamples, mapping, })) as EcsMappingApiResponse; } else results = (await graph.invoke({ packageName, dataStreamName, - formSamples, + rawSamples, })) as EcsMappingApiResponse; } catch (e) { // TODO: Better error responses? diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index 83b0d3a4e875d..b439478f23a3c 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -10,23 +10,29 @@ import { schema } from '@kbn/config-schema'; import { RELATED_GRAPH_PATH } from '../../common'; import { RelatedApiRequest, RelatedApiResponse } from '../../common/types'; import { getRelatedGraph } from '../graphs/related'; +import { ROUTE_HANDLER_TIMEOUT } from '../constants'; export function registerRelatedRoutes(router: IRouter) { router.post( { path: `${RELATED_GRAPH_PATH}`, + options: { + timeout: { + idleSocket: ROUTE_HANDLER_TIMEOUT, + }, + }, validate: { body: schema.object({ packageName: schema.string(), dataStreamName: schema.string(), - formSamples: schema.arrayOf(schema.string()), + rawSamples: schema.arrayOf(schema.string()), // TODO: This is a single nested object of any key or shape, any better schema? - ingestPipeline: schema.maybe(schema.any()), + currentPipeline: schema.maybe(schema.any()), }), }, }, async (_, req, res) => { - const { packageName, dataStreamName, formSamples, ingestPipeline } = + const { packageName, dataStreamName, rawSamples, currentPipeline } = req.body as RelatedApiRequest; const graph = await getRelatedGraph(); let results = { results: { docs: {}, pipeline: {} } }; @@ -34,8 +40,8 @@ export function registerRelatedRoutes(router: IRouter) { results = (await graph.invoke({ packageName, dataStreamName, - formSamples, - ingestPipeline, + rawSamples, + currentPipeline, })) as RelatedApiResponse; } catch (e) { // TODO: Better error responses? diff --git a/x-pack/plugins/integration_assistant/server/util/es.ts b/x-pack/plugins/integration_assistant/server/util/es.ts index f3a2d84316fa0..7483a7b690680 100644 --- a/x-pack/plugins/integration_assistant/server/util/es.ts +++ b/x-pack/plugins/integration_assistant/server/util/es.ts @@ -28,7 +28,7 @@ function formatSample(sample: string): DocTemplate { function newClient(): Client { const client = new Client({ - node: 'https://localhost:9200', + node: 'http://localhost:9200', auth: { username: 'elastic', password: 'changeme', @@ -40,7 +40,7 @@ function newClient(): Client { return client; } -async function _testPipeline(samples: string[], pipeline: object): Promise<[any[], any[]]> { +async function testPipeline(samples: string[], pipeline: object): Promise<[any[], any[]]> { const docs = samples.map((sample) => formatSample(sample)); const results: object[] = []; const errors: object[] = []; @@ -65,7 +65,7 @@ async function _testPipeline(samples: string[], pipeline: object): Promise<[any[ export async function handleValidatePipeline( state: EcsMappingState | CategorizationState | RelatedState ): Promise | Partial | Partial> { - const [errors, results] = await _testPipeline(state.rawSamples, state.currentPipeline); + const [errors, results] = await testPipeline(state.rawSamples, state.currentPipeline); console.log('testing validate pipeline'); console.log('errors', errors); //console.log("results", results); diff --git a/x-pack/plugins/integration_assistant/server/util/samples.ts b/x-pack/plugins/integration_assistant/server/util/samples.ts index a01304c8402fc..23782c7a02d24 100644 --- a/x-pack/plugins/integration_assistant/server/util/samples.ts +++ b/x-pack/plugins/integration_assistant/server/util/samples.ts @@ -4,6 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import * as yaml from 'js-yaml'; import { CategorizationState, EcsMappingState, RelatedState } from '../types'; interface SampleObj { @@ -16,6 +17,12 @@ interface NewObj { }; } +interface Field { + name: string; + type: string; + fields?: Field[]; +} + export function modifySamples(state: EcsMappingState | CategorizationState | RelatedState) { const modifiedSamples: string[] = []; const rawSamples = state.rawSamples; @@ -92,3 +99,108 @@ export function formatSamples(samples: string[]): string { return JSON.stringify(formattedSamples, null, 2); } + +function determineType(value: any): string { + if (typeof value === 'object' && value !== null) { + if (Array.isArray(value)) { + return 'group'; + } + return 'group'; + } + if (typeof value === 'string') { + return 'keyword'; + } + if (typeof value === 'boolean') { + return 'boolean'; + } + if (typeof value === 'number') { + return 'long'; + } + return 'keyword'; // Default type for null or other undetermined types +} + +function recursiveParse(obj: any, path: string[]): Field { + if (typeof obj === 'object' && obj !== null) { + if (Array.isArray(obj)) { + // Assume list elements are uniform and use the first element as representative + if (obj.length > 0) { + return recursiveParse(obj[0], path); + } + return { name: path[path.length - 1], type: 'group', fields: [] }; + } + const fields: Field[] = []; + for (const [key, value] of Object.entries(obj)) { + fields.push(recursiveParse(value, path.concat(key))); + } + return { name: path[path.length - 1], type: 'group', fields }; + } + return { name: path[path.length - 1], type: determineType(obj) }; +} + +export function generateFields(mergedDocs: string): string { + const ecsTopKeysSet: Set = new Set([ + '@timestamp', + 'agent', + 'as', + 'base', + 'client', + 'cloud', + 'code_signature', + 'container', + 'data_stream', + 'destination', + 'device', + 'dll', + 'dns', + 'ecs', + 'elf', + 'email', + 'error', + 'event', + 'faas', + 'file', + 'geo', + 'group', + 'hash', + 'host', + 'http', + 'interface', + 'labels', + 'log', + 'macho', + 'message', + 'network', + 'observer', + 'orchestrator', + 'organization', + 'os', + 'package', + 'pe', + 'process', + 'registry', + 'related', + 'risk', + 'rule', + 'server', + 'service', + 'source', + 'tags', + 'threat', + 'tls', + 'tracing', + 'url', + 'user', + 'user_agent', + 'vlan', + 'volume', + 'vulnerability', + 'x509', + ]); + + const doc: SampleObj = JSON.parse(mergedDocs); + const fieldsStructure: Field[] = Object.keys(doc) + .filter((key) => !ecsTopKeysSet.has(key)) + .map((key) => recursiveParse(doc[key], [key])); + + return yaml.dump(fieldsStructure, { sortKeys: false }); +} diff --git a/x-pack/plugins/integration_assistant/server/util/util.ts b/x-pack/plugins/integration_assistant/server/util/util.ts index cacc21c0391ba..92a802149904b 100644 --- a/x-pack/plugins/integration_assistant/server/util/util.ts +++ b/x-pack/plugins/integration_assistant/server/util/util.ts @@ -4,6 +4,18 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import { existsSync, mkdirSync } from 'fs'; + export function deepCopy(obj: T): T { return JSON.parse(JSON.stringify(obj)); } + +export function ensureDir(dir: string): void { + if (!existsSync(dir)) { + mkdirSync(dir, { recursive: true }); + } +} + +export function generateUniqueId() { + return `${Date.now() + Math.floor(Math.random() * 1e13)}`; +} From 540882c188e79b8a00bdadb6d218e01c9de1bb86 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Sun, 26 May 2024 19:52:19 +0200 Subject: [PATCH 10/62] stashing many changes with a non-descriptive commit message :) --- .../integration_assistant/common/index.ts | 2 - .../integration_assistant/common/types.ts | 7 +- .../integration_assistant/jest.config.js | 18 ++ .../integration_assistant/public/app.tsx | 130 +++++--------- .../components/build_integration_button.tsx | 149 ++++++++++++++++ .../components/categorization_button.tsx | 73 ++++++++ .../public/components/ecs_button.tsx | 67 ++++++++ .../public/components/related_button.tsx | 74 ++++++++ .../integration_assistant/public/services.ts | 6 +- .../graphs/categorization/categorization.ts | 1 - .../server/graphs/categorization/errors.ts | 1 - .../server/graphs/categorization/graph.ts | 1 - .../server/graphs/categorization/invalid.ts | 1 - .../server/graphs/categorization/review.ts | 1 - .../server/graphs/ecs/duplicates.ts | 1 - .../server/graphs/ecs/invalid.ts | 1 - .../server/graphs/ecs/mapping.ts | 1 - .../server/graphs/ecs/missing.ts | 1 - .../server/graphs/ecs/pipeline.ts | 8 +- .../server/graphs/ecs/validate.ts | 2 +- .../server/graphs/related/errors.ts | 1 - .../server/graphs/related/graph.ts | 1 - .../server/graphs/related/related.ts | 1 - .../server/graphs/related/review.ts | 1 - .../server/integration_builder/agent.ts | 32 ++-- .../integration_builder/build_integration.ts | 160 ++++++++++-------- .../server/integration_builder/data_stream.ts | 122 ++++++------- .../server/integration_builder/dev_folders.ts | 36 ++-- .../server/integration_builder/fields.ts | 45 +++-- .../server/integration_builder/manifest.ts | 45 ----- .../server/integration_builder/pipeline.ts | 15 ++ .../server/routes/build_integration_routes.ts | 55 ++++-- .../server/routes/categorization_routes.ts | 2 +- .../templates/{readme => }/README.md.njk | 0 ...udwatch.yml.njk => aws-cloudwatch.yml.hbs} | 0 .../agent/{aws-s3.yml.njk => aws-s3.yml.hbs} | 0 ...age.yml.njk => azure-blob-storage.yml.hbs} | 0 ...venthub.yml.njk => azure-eventhub.yml.hbs} | 0 ...udfoundry.yml.njk => cloudfoundry.yml.hbs} | 0 .../agent/{common.yml.njk => common.yml.hbs} | 0 ...{filestream.yml.njk => filestream.yml.hbs} | 0 ...{gcp-pubsub.yml.njk => gcp-pubsub.yml.hbs} | 0 .../agent/{gcs.yml.njk => gcs.yml.hbs} | 0 ...endpoint.yml.njk => http_endpoint.yml.hbs} | 0 .../{journald.yml.njk => journald.yml.hbs} | 0 .../agent/{kafka.yml.njk => kafka.yml.hbs} | 0 .../{logfile.yml.njk => logfile.yml.hbs} | 0 .../agent/{tcp.yml.njk => tcp.yml.hbs} | 0 .../agent/{udp.yml.njk => udp.yml.hbs} | 0 .../{fields => }/base-fields.yml.njk | 0 .../templates/{build => }/build.yml.njk | 0 .../{changelog => }/changelog.yml.njk | 0 ...common.yml.njk => common_manifest.yml.njk} | 0 ...ckage.yml.njk => package_manifest.yml.njk} | 0 .../{ssl.yml.njk => ssl_manifest.yml.njk} | 0 .../templates/{pipeline => }/pipeline.yml.njk | 0 .../server/util/async_file.ts | 52 ++++++ .../integration_assistant/server/util/es.ts | 4 +- .../server/util/index.ts | 18 ++ .../server/util/pipeline.ts | 12 +- .../integration_assistant/server/util/util.ts | 7 - 61 files changed, 764 insertions(+), 390 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/jest.config.js create mode 100644 x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/categorization_button.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/ecs_button.tsx create mode 100644 x-pack/plugins/integration_assistant/public/components/related_button.tsx delete mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts create mode 100644 x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts rename x-pack/plugins/integration_assistant/server/templates/{readme => }/README.md.njk (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{aws-cloudwatch.yml.njk => aws-cloudwatch.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{aws-s3.yml.njk => aws-s3.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{azure-blob-storage.yml.njk => azure-blob-storage.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{azure-eventhub.yml.njk => azure-eventhub.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{cloudfoundry.yml.njk => cloudfoundry.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{common.yml.njk => common.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{filestream.yml.njk => filestream.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{gcp-pubsub.yml.njk => gcp-pubsub.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{gcs.yml.njk => gcs.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{http_endpoint.yml.njk => http_endpoint.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{journald.yml.njk => journald.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{kafka.yml.njk => kafka.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{logfile.yml.njk => logfile.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{tcp.yml.njk => tcp.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{udp.yml.njk => udp.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/{fields => }/base-fields.yml.njk (100%) rename x-pack/plugins/integration_assistant/server/templates/{build => }/build.yml.njk (100%) rename x-pack/plugins/integration_assistant/server/templates/{changelog => }/changelog.yml.njk (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{common.yml.njk => common_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{package.yml.njk => package_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{ssl.yml.njk => ssl_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/{pipeline => }/pipeline.yml.njk (100%) create mode 100644 x-pack/plugins/integration_assistant/server/util/async_file.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/index.ts diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts index b4f162da98fc8..6398a2c495779 100644 --- a/x-pack/plugins/integration_assistant/common/index.ts +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -30,5 +30,3 @@ export { INTEGRATION_BUILDER_PATH, INTEGRATION_ASSISTANT_BASE_PATH, } from './constants'; - -export { ECS_FULL } from './ecs'; diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index d13e85ad15214..86dd55cab9c2e 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -47,7 +47,8 @@ export interface DataStream { description: string; inputTypes: InputTypes[]; rawSamples: string[]; - pipeline: Pipeline; + // TODO: figure out why changing this to `Pipeline` makes the frontend test objects complain about types. + pipeline: object; docs: object[]; } @@ -59,8 +60,8 @@ export interface Integration { dataStreams: DataStream[]; streamVersion?: string; dockerComposeVersion?: string; - initialVersion: string; - formatVersion: string; + initialVersion?: string; + formatVersion?: string; owner: string; minKibanaVersion: string; } diff --git a/x-pack/plugins/integration_assistant/jest.config.js b/x-pack/plugins/integration_assistant/jest.config.js new file mode 100644 index 0000000000000..3c0a3a0899c95 --- /dev/null +++ b/x-pack/plugins/integration_assistant/jest.config.js @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +module.exports = { + preset: '@kbn/test', + rootDir: '../../..', + roots: ['/x-pack/plugins/integration_assistant'], + coverageDirectory: '/target/kibana-coverage/jest/x-pack/plugins/integration_assistant', + coverageReporters: ['text', 'html'], + collectCoverageFrom: [ + '/x-pack/plugins/integration_assistant/{common,public,server}/**/*.{ts,tsx}', + ], + setupFiles: ['jest-canvas-mock'], +}; diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx index 28894f5d42e70..d60db9c77ac30 100644 --- a/x-pack/plugins/integration_assistant/public/app.tsx +++ b/x-pack/plugins/integration_assistant/public/app.tsx @@ -9,24 +9,14 @@ import React, { useState } from 'react'; import ReactDOM from 'react-dom'; import { AppMountParameters } from '@kbn/core/public'; import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { - EuiPageTemplate, - EuiText, - EuiButton, - EuiFlexGroup, - EuiFlexItem, - EuiCodeBlock, -} from '@elastic/eui'; -import { - EcsMappingApiRequest, - EcsMappingApiResponse, - CategorizationApiRequest, - CategorizationApiResponse, - RelatedApiRequest, - RelatedApiResponse, -} from '../common'; +import { EuiPageTemplate, EuiText, EuiFlexGroup, EuiFlexItem, EuiCodeBlock } from '@elastic/eui'; +import { EcsMappingApiResponse, CategorizationApiResponse, RelatedApiResponse } from '../common'; import { Services } from './services'; +import { EcsButton } from './components/ecs_button'; +import { CategorizationButton } from './components/categorization_button'; +import { RelatedButton } from './components/related_button'; +import { BuildIntegrationButton } from './components/build_integration_button'; type Props = Services; @@ -41,8 +31,6 @@ function RoutingExplorer({ ); const [currentPipeline, setCurrentPipeline] = useState({} as object); const [resultDocs, setResultDocs] = useState([] as object[]); - const packageName = 'teleport'; - const dataStreamName = 'audit'; const rawSamples = [ '{"ei":0,"event":"user.login","uid":"b675d102-fc25-4f7a-bf5d-96468cc176ea","code":"T1000I","time":"2024-02-23T18:56:50.628Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","required_private_key_policy":"none","success":true,"method":"local","mfa_device":{"mfa_device_name":"otp-device","mfa_device_uuid":"d07bf388-af49-4ec2-b8a4-c8a9e785b70b","mfa_device_type":"TOTP"},"user_agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36","addr.remote":"136.61.214.196:50332"}', '{"ei":0,"event":"cert.create","uid":"efd326fc-dd13-4df8-acef-3102c2d717d3","code":"TC000I","time":"2024-02-23T18:56:50.653Z","cluster_name":"teleport.ericbeahan.com","cert_type":"user","identity":{"user":"teleport-admin","roles":["access","editor"],"logins":["root","ubuntu","ec2-user","-teleport-internal-join"],"expires":"2024-02-24T06:56:50.648137154Z","route_to_cluster":"teleport.ericbeahan.com","traits":{"aws_role_arns":null,"azure_identities":null,"db_names":null,"db_roles":null,"db_users":null,"gcp_service_accounts":null,"host_user_gid":[""],"host_user_uid":[""],"kubernetes_groups":null,"kubernetes_users":null,"logins":["root","ubuntu","ec2-user"],"windows_logins":null},"teleport_cluster":"teleport.ericbeahan.com","client_ip":"136.61.214.196","prev_identity_expires":"0001-01-01T00:00:00Z","private_key_policy":"none"}}', @@ -53,75 +41,6 @@ function RoutingExplorer({ return 'message' in response; }; - // TODO: All these basic functions and UI is only here for testing purposes - async function onEcsButtonClick() { - const request = { - packageName, - dataStreamName, - rawSamples, - } as EcsMappingApiRequest; - try { - const ecsResponse = await runEcsGraph(request); - if (!isFetchError(ecsResponse)) { - if (Object.keys(ecsResponse?.results).length > 0) { - setCurrentPipeline(ecsResponse.results.pipeline); - setLastResponse(ecsResponse); - console.log('finished ecs graph successfully'); - } else { - console.log('finished ecs graph without errors, but no results'); - } - } - } catch (e) { - console.log(e); - } - } - async function onCategorizationButtonClick() { - const request = { - packageName, - dataStreamName, - rawSamples, - currentPipeline, - } as CategorizationApiRequest; - try { - const categorizationResponse = await runCategorizationGraph(request); - if (!isFetchError(categorizationResponse)) { - if (Object.keys(categorizationResponse?.results).length > 0) { - setCurrentPipeline(categorizationResponse.results.pipeline); - setResultDocs(categorizationResponse.results.docs); - setLastResponse(categorizationResponse); - console.log('finished categorization graph successfully'); - } else { - console.log('finished categorization graph without errors, but no results'); - } - } - } catch (e) { - console.log(e); - } - } - async function onRelatedButtonClick() { - const request = { - packageName, - dataStreamName, - rawSamples, - currentPipeline, - } as RelatedApiRequest; - try { - const relatedResponse = await runRelatedGraph(request); - if (!isFetchError(relatedResponse)) { - if (Object.keys(relatedResponse?.results).length > 0) { - setCurrentPipeline(relatedResponse.results.pipeline); - setResultDocs(relatedResponse.results.docs); - setLastResponse(relatedResponse); - console.log('finished related graph successfully'); - } else { - console.log('finished related graph without errors, but no results'); - } - } - } catch (e) { - console.log(e); - } - } - return ( @@ -132,15 +51,42 @@ function RoutingExplorer({ - onEcsButtonClick()}>Run ECS Graph + + + + - onCategorizationButtonClick()}> - Run Categorization Graph - + - onRelatedButtonClick()}>Run Related Graph + diff --git a/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx b/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx new file mode 100644 index 0000000000000..d927edd7a3b5b --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx @@ -0,0 +1,149 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useState } from 'react'; +import { EuiButton } from '@elastic/eui'; +import type { IHttpFetchError } from '@kbn/core-http-browser'; +import { BuildIntegrationApiRequest } from '../../common'; +// TODO: Temp test button while UI development is in progress +interface BuildIntegrationButtonProps { + runIntegrationBuilder: ( + req: BuildIntegrationApiRequest + ) => Promise>; + rawSamples: any[]; + isFetchError: (response: any) => boolean; +} +export const BuildIntegrationButton = ({ + runIntegrationBuilder, + rawSamples, + isFetchError, +}: BuildIntegrationButtonProps) => { + const [isLoading, setIsLoading] = useState(false); + const [isDisabled, setIsDisabled] = useState(false); + const testdocs = [ + { + ecs: { + version: '8.11.0', + }, + related: { + user: ['', 'teleport-admin', '{0=access, 1=editor}'], + ip: ['136.61.214.196'], + }, + teleport: { + audit: { + cluster_name: 'teleport.ericbeahan.com', + cert_type: 'user', + }, + }, + }, + { + ecs: { + version: '8.11.0', + }, + related: { + user: ['', 'teleport-admin', '{0=access, 1=editor}'], + ip: ['136.61.214.196'], + }, + teleport: { + audit: { + cluster_name: 'teleport.ericbeahan.com', + cert_type: 'user', + }, + }, + }, + ]; + const testPipeline = { + description: 'Pipeline to process teleport audit logs', + processors: [ + { + set: { + field: 'ecs.version', + tag: 'set_ecs_version', + value: '8.11.0', + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + tag: 'rename_message', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + ], + on_failure: [ + { + append: { + field: 'error.message', + value: + 'Processor {{{_ingest.on_failure_processor_type}}} with tag {{{_ingest.on_failure_processor_tag}}} in pipeline {{{_ingest.on_failure_pipeline}}} failed with message: {{{_ingest.on_failure_message}}}', + }, + }, + { + set: { + field: 'event.kind', + value: 'pipeline_error', + }, + }, + ], + }; + async function onBuildIntegrationButtonClick() { + const request = { + integration: { + name: 'teleport', + title: 'Test Package Title', + description: 'Test Package Description', + initialVersion: '0.1.0', + dataStreams: [ + { + title: 'Datastream 1 Test Title', + name: 'audit', + description: 'Datastream 1 Test Description', + inputTypes: ['filestream'], + pipeline: testPipeline, + docs: testdocs, + rawSamples, + }, + { + title: 'Datastream 2 Test Title', + name: 'session', + description: 'Datastream 2 Test Description', + inputTypes: ['gcs'], + pipeline: testPipeline, + docs: testdocs, + rawSamples, + }, + ], + streamVersion: '0.13.0', + dockerComposeVersion: '2.3', + formatVersion: '3.11.0', + owner: '@elastic/test-team', + minKibanaVersion: '8.13.0', + }, + } as BuildIntegrationApiRequest; + try { + const builIntegrationResponse = await runIntegrationBuilder(request); + if (!isFetchError(builIntegrationResponse)) { + console.log('finished building integration successfully'); + } + } catch (e) { + console.log(e); + } + } + return ( + + {isLoading ? 'Building Integration' : 'Build Integration'} + + ); +}; diff --git a/x-pack/plugins/integration_assistant/public/components/categorization_button.tsx b/x-pack/plugins/integration_assistant/public/components/categorization_button.tsx new file mode 100644 index 0000000000000..3f717cff6ea23 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/categorization_button.tsx @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useState } from 'react'; +import { EuiButton } from '@elastic/eui'; +import type { IHttpFetchError } from '@kbn/core-http-browser'; +import { CategorizationApiRequest, CategorizationApiResponse } from '../../common'; +// TODO: Temp test button while UI development is in progress +interface CategorizationButtonProps { + runCategorizationGraph: ( + req: CategorizationApiRequest + ) => Promise>; + rawSamples: any[]; + currentPipeline: any; + setCurrentPipeline: (pipeline: any) => void; + setLastResponse: (response: any) => void; + setResultDocs: (docs: any) => void; + isFetchError: (response: any) => boolean; +} +export const CategorizationButton = ({ + runCategorizationGraph, + rawSamples, + currentPipeline, + setCurrentPipeline, + setLastResponse, + setResultDocs, + isFetchError, +}: CategorizationButtonProps) => { + const [isLoading, setIsLoading] = useState(false); + const [isDisabled, setIsDisabled] = useState(false); + async function onCategorizationButtonClick() { + setIsLoading(true); + const request = { + packageName: 'teleport', + dataStreamName: 'audit', + rawSamples, + currentPipeline, + } as CategorizationApiRequest; + try { + const categorizationResponse = await runCategorizationGraph(request); + if (!isFetchError(categorizationResponse)) { + if (Object.keys(categorizationResponse?.results).length > 0) { + setCurrentPipeline(categorizationResponse.results.pipeline); + setResultDocs(categorizationResponse.results.docs); + setLastResponse(categorizationResponse); + console.log('finished categorization graph successfully'); + } else { + console.log('finished categorization graph without errors, but no results'); + } + setIsLoading(false); + setIsDisabled(true); + } + } catch (e) { + setIsLoading(false); + console.log(e); + } + } + return ( + + {isLoading ? 'Running Categorization Graph' : 'Run Categorization Graph'} + + ); +}; diff --git a/x-pack/plugins/integration_assistant/public/components/ecs_button.tsx b/x-pack/plugins/integration_assistant/public/components/ecs_button.tsx new file mode 100644 index 0000000000000..e06951b95d858 --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/ecs_button.tsx @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useState } from 'react'; +import { EuiButton } from '@elastic/eui'; +import type { IHttpFetchError } from '@kbn/core-http-browser'; +import { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common'; +// TODO: Temp test button while UI development is in progress +interface EcsButtonProps { + runEcsGraph: ( + req: EcsMappingApiRequest + ) => Promise>; + rawSamples: any[]; + setCurrentPipeline: (pipeline: any) => void; + setLastResponse: (response: any) => void; + isFetchError: (response: any) => boolean; +} +export const EcsButton = ({ + runEcsGraph, + rawSamples, + setCurrentPipeline, + setLastResponse, + isFetchError, +}: EcsButtonProps) => { + const [isLoading, setIsLoading] = useState(false); + const [isDisabled, setIsDisabled] = useState(false); + async function onEcsButtonClick() { + setIsLoading(true); + const request = { + packageName: 'teleport', + dataStreamName: 'audit', + rawSamples, + } as EcsMappingApiRequest; + try { + const ecsResponse = await runEcsGraph(request); + if (!isFetchError(ecsResponse)) { + if (Object.keys(ecsResponse?.results).length > 0) { + setCurrentPipeline(ecsResponse.results.pipeline); + setLastResponse(ecsResponse); + console.log('finished running ecs graph successfully'); + } else { + console.log('finished running ecs graph without errors, but no results'); + } + setIsLoading(false); + setIsDisabled(true); + } + } catch (e) { + setIsLoading(false); + console.log(e); + } + } + return ( + + {isLoading ? 'Running ECS Graph' : 'Run ECS Graph'} + + ); +}; diff --git a/x-pack/plugins/integration_assistant/public/components/related_button.tsx b/x-pack/plugins/integration_assistant/public/components/related_button.tsx new file mode 100644 index 0000000000000..9c29455e2de0e --- /dev/null +++ b/x-pack/plugins/integration_assistant/public/components/related_button.tsx @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import React, { useState } from 'react'; +import { EuiButton } from '@elastic/eui'; +import type { IHttpFetchError } from '@kbn/core-http-browser'; +import { RelatedApiRequest, RelatedApiResponse } from '../../common'; + +// TODO: Temp test button while UI development is in progress +interface RelatedButtonProps { + runRelatedGraph: ( + req: RelatedApiRequest + ) => Promise>; + rawSamples: any[]; + currentPipeline: any; + setCurrentPipeline: (pipeline: any) => void; + setLastResponse: (response: any) => void; + setResultDocs: (docs: any) => void; + isFetchError: (response: any) => boolean; +} +export const RelatedButton = ({ + runRelatedGraph, + rawSamples, + currentPipeline, + setCurrentPipeline, + setLastResponse, + setResultDocs, + isFetchError, +}: RelatedButtonProps) => { + const [isLoading, setIsLoading] = useState(false); + const [isDisabled, setIsDisabled] = useState(false); + async function onRelatedButtonClick() { + setIsLoading(true); + const request = { + packageName: 'teleport', + dataStreamName: 'audit', + rawSamples, + currentPipeline, + } as RelatedApiRequest; + try { + const relatedResponse = await runRelatedGraph(request); + if (!isFetchError(relatedResponse)) { + if (Object.keys(relatedResponse?.results).length > 0) { + setCurrentPipeline(relatedResponse.results.pipeline); + setResultDocs(relatedResponse.results.docs); + setLastResponse(relatedResponse); + console.log('finished related graph successfully'); + } else { + console.log('finished related graph without errors, but no results'); + } + setIsLoading(false); + setIsDisabled(true); + } + } catch (e) { + setIsLoading(false); + console.log(e); + } + } + return ( + + {isLoading ? 'Running Related Graph' : 'Run Related Graph'} + + ); +}; diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts index 0b49b1888721a..d35bece2b8d31 100644 --- a/x-pack/plugins/integration_assistant/public/services.ts +++ b/x-pack/plugins/integration_assistant/public/services.ts @@ -29,7 +29,7 @@ export interface Services { req: CategorizationApiRequest ) => Promise; runRelatedGraph: (req: RelatedApiRequest) => Promise; - runIntegrationBuilder: (req: BuildIntegrationApiRequest) => Promise; + runIntegrationBuilder: (req: BuildIntegrationApiRequest) => Promise; } export function getServices(core: CoreStart): Services { @@ -69,9 +69,9 @@ export function getServices(core: CoreStart): Services { return e; } }, - runIntegrationBuilder: async (req: BuildIntegrationApiRequest): Promise => { + runIntegrationBuilder: async (req: BuildIntegrationApiRequest): Promise => { try { - const response = await core.http.post(INTEGRATION_BUILDER_PATH, { + const response = await core.http.post(INTEGRATION_BUILDER_PATH, { body: JSON.stringify({ ...req }), }); return response; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts index ad6dcac358228..130de2094d166 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts @@ -14,7 +14,6 @@ import { CATEGORIZATION_MAIN_PROMPT } from './prompts'; export async function handleCategorization(state: CategorizationState) { const categorizationMainPrompt = CATEGORIZATION_MAIN_PROMPT; const model = getModel(); - console.log('testing cat main'); const outputParser = new JsonOutputParser(); const categorizationMainGraph = categorizationMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts index ec840e644879b..695ce727ed58b 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts @@ -14,7 +14,6 @@ import { Pipeline } from '../../../common'; export async function handleErrors(state: CategorizationState) { const categorizationErrorPrompt = CATEGORIZATION_ERROR_PROMPT; const model = getModel(); - console.log('testing cat error'); const outputParser = new JsonOutputParser(); const categorizationErrorGraph = categorizationErrorPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index 74fe6ff9db4b1..056dc48cf7c62 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -124,7 +124,6 @@ function modelOutput(state: CategorizationState): Partial { function validationRouter(state: CategorizationState): string { if (Object.keys(state.currentProcessors).length === 0) { - console.log('No current processors found'); return 'categorization'; } return 'validateCategorization'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts index 69c47474af7c2..14063eb7064e4 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -15,7 +15,6 @@ import { Pipeline } from '../../../common'; export async function handleInvalidCategorization(state: CategorizationState) { const categorizationInvalidPrompt = CATEGORIZATION_VALIDATION_PROMPT; const model = getModel(); - console.log('testing cat invalid'); const outputParser = new JsonOutputParser(); const categorizationInvalidGraph = categorizationInvalidPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts index 8f0860b1362bc..9c6150dfcb381 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts @@ -15,7 +15,6 @@ import { Pipeline } from '../../../common'; export async function handleReview(state: CategorizationState) { const categorizationReviewPrompt = CATEGORIZATION_REVIEW_PROMPT; const model = getModel(); - console.log('testing cat review'); const outputParser = new JsonOutputParser(); const categorizationReview = categorizationReviewPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts index dba8fffd7ff11..73393114f043c 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts @@ -12,7 +12,6 @@ import { EcsMappingState } from '../../types'; export async function handleDuplicates(state: EcsMappingState) { const ecsDuplicatesPrompt = ECS_DUPLICATES_PROMPT; const model = getModel(); - console.log('testing ecs duplicate'); const outputParser = new JsonOutputParser(); const ecsDuplicatesGraph = ecsDuplicatesPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts index 3e790106f3a7a..2abb87d35b58b 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts @@ -12,7 +12,6 @@ import { EcsMappingState } from '../../types'; export async function handleInvalidEcs(state: EcsMappingState) { const ecsInvalidEcsPrompt = ECS_INVALID_PROMPT; const model = getModel(); - console.log('testing ecs invalid'); const outputParser = new JsonOutputParser(); const ecsInvalidEcsGraph = ecsInvalidEcsPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts index 4fc5685d09392..7df23d3d0e267 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts @@ -12,7 +12,6 @@ import { EcsMappingState } from '../../types'; export async function handleEcsMapping(state: EcsMappingState) { const ecsMainPrompt = ECS_MAIN_PROMPT; const model = getModel(); - console.log('testing ecs mapping'); const outputParser = new JsonOutputParser(); const ecsMainGraph = ecsMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts index 39f5409941a18..f390f28365deb 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts @@ -12,7 +12,6 @@ import { EcsMappingState } from '../../types'; export async function handleMissingKeys(state: EcsMappingState) { const ecsMissingPrompt = ECS_MISSING_KEYS_PROMPT; const model = getModel(); - console.log('testing ecs missing'); const outputParser = new JsonOutputParser(); const ecsMissingGraph = ecsMissingPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts index 1cd0524f486f0..a2e342ce37128 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -70,8 +70,7 @@ function getSampleValue(key: string, samples: Record): any { } return value; } catch (e) { - console.log(e); - return null; + throw e; } } @@ -177,8 +176,7 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { const renderedTemplate = template.render(mappedValues); const ingestPipeline = load(renderedTemplate) as IngestPipeline; return ingestPipeline; - } catch (error) { - console.error('Error rendering template:', error); - throw error; + } catch (e) { + throw e; } } diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts index 3856c089e254d..078eae31d36f8 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { ECS_FULL } from '../../../common'; +import { ECS_FULL } from '../../../common/ecs'; import { EcsMappingState } from '../../types'; const valueFieldKeys = new Set(['target', 'confidence', 'date_formats', 'type']); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts index 696eae4dee503..097df08ba8387 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts @@ -14,7 +14,6 @@ import { Pipeline } from '../../../common'; export async function handleErrors(state: RelatedState) { const relatedErrorPrompt = RELATED_ERROR_PROMPT; const model = getModel(); - console.log('testing related error'); const outputParser = new JsonOutputParser(); const relatedErrorGraph = relatedErrorPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index c0380925264a2..15d47106e7581 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -113,7 +113,6 @@ function modelOutput(state: RelatedState): Partial { function inputRouter(state: RelatedState): string { if (Object.keys(state.pipelineResults).length === 0) { - console.log('No pipeline results found'); return 'validatePipeline'; } return 'related'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts index 05281ca6bea9d..b93597064bbd9 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts @@ -14,7 +14,6 @@ import { Pipeline } from '../../../common'; export async function handleRelated(state: RelatedState) { const relatedMainPrompt = RELATED_MAIN_PROMPT; const model = getModel(); - console.log('testing related main'); const outputParser = new JsonOutputParser(); const relatedMainGraph = relatedMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts index 5bd3898cf18d2..d50acefa694fa 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts @@ -14,7 +14,6 @@ import { Pipeline } from '../../../common'; export async function handleReview(state: RelatedState) { const relatedReviewPrompt = RELATED_REVIEW_PROMPT; const model = getModel(); - console.log('testing related review'); const outputParser = new JsonOutputParser(); const relatedReviewGraph = relatedReviewPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts index 37bdb10a2ca4b..e524619d8ca2e 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts @@ -5,29 +5,29 @@ * 2.0. */ -import * as fs from 'fs'; -import * as path from 'path'; +import { join as joinPath } from 'path'; +import { InputTypes } from '../../common'; +import { asyncEnsureDir, asyncCreate, asyncRead } from '../util'; -export function createAgentInput(specificDataStreamDir: string, inputTypes: string[]): void { - const agentDir = path.join(specificDataStreamDir, 'agent', 'stream'); - const agentTemplatesDir = path.join(__dirname, '../templates/agent'); - fs.mkdirSync(agentDir, { recursive: true }); +export async function createAgentInput( + specificDataStreamDir: string, + inputTypes: InputTypes[] +): Promise { + const agentDir = joinPath(specificDataStreamDir, 'agent', 'stream'); + const agentTemplatesDir = joinPath(__dirname, '../templates/agent'); + await asyncEnsureDir(agentDir); // Load common options that exists for all .yml.hbs files, to be merged with each specific input file - const commonFilePath = path.join(agentTemplatesDir, 'common.yml.hbs'); - const commonFile = fs.readFileSync(commonFilePath, 'utf-8'); + const commonFilePath = joinPath(agentTemplatesDir, 'common.yml.hbs'); + const commonFile = await asyncRead(commonFilePath); for (const inputType of inputTypes) { - // TODO: Skip httpjson and cel input types for now, requires new prompts. - if (inputType === 'httpjson' || inputType === 'cel') { - continue; - } - const inputTypeFilePath = path.join(agentTemplatesDir, `${inputType}.yml.hbs`); - const inputTypeFile = fs.readFileSync(inputTypeFilePath, 'utf-8'); + const inputTypeFilePath = joinPath(agentTemplatesDir, `${inputType}.yml.hbs`); + const inputTypeFile = await asyncRead(inputTypeFilePath); const combinedContents = `${inputTypeFile}\n${commonFile}`; - const destinationFilePath = path.join(agentDir, `${inputType}.yml.hbs`); - fs.writeFileSync(destinationFilePath, combinedContents, 'utf-8'); + const destinationFilePath = joinPath(agentDir, `${inputType}.yml.hbs`); + await asyncCreate(destinationFilePath, combinedContents); } } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts index c61d2be7e0f7c..d3f23bcdb88a2 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -5,123 +5,139 @@ * 2.0. */ -import { copyFileSync, writeFileSync, mkdirSync } from 'fs'; import { join as joinPath } from 'path'; import { tmpdir } from 'os'; import nunjucks from 'nunjucks'; -import yaml from 'js-yaml'; import AdmZip from 'adm-zip'; import { Integration, DataStream } from '../../common'; -import { ensureDir } from '../util/util'; -import { createPackageManifest } from './manifest'; import { createPackageSystemTests } from './dev_folders'; import { createDatastream } from './data_stream'; import { createAgentInput } from './agent'; import { createFieldMapping } from './fields'; -import { generateUniqueId } from '../util/util'; +import { createPipeline } from './pipeline'; +import { generateUniqueId, asyncEnsureDir, asyncCopy, asyncCreate } from '../util'; + +export async function buildPackage(integration: Integration): Promise { + const templateDir = joinPath(__dirname, '../templates'); + const agentTemplates = joinPath(templateDir, 'agent'); + const manifestTemplates = joinPath(templateDir, 'manifest'); + const systemTestTemplates = joinPath(templateDir, 'system_tests'); + // TODO: A bit unsure how we are going to translate this autoescape false or not, needs to be investigated before merging. + nunjucks.configure([templateDir, agentTemplates, manifestTemplates, systemTestTemplates], { + autoescape: false, + }); -export function buildPackage(integration: Integration): File { const tmpDir = joinPath(tmpdir(), `integration-assistant-${generateUniqueId()}`); - const packageDir = createDirectories(tmpDir, integration); + const packageDir = await createDirectories(tmpDir, integration); const dataStreamsDir = joinPath(packageDir, 'data_stream'); for (const dataStream of integration.dataStreams) { const dataStreamName = dataStream.name; const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName); - createDatastream(integration.name, specificDataStreamDir, dataStream); - createAgentInput(specificDataStreamDir, dataStream.inputTypes); - createPipeline(specificDataStreamDir, dataStream.pipeline); - createFieldMapping(integration.name, dataStreamName, specificDataStreamDir, dataStream.docs); + await createDatastream(integration.name, specificDataStreamDir, dataStream); + await createAgentInput(specificDataStreamDir, dataStream.inputTypes); + await createPipeline(specificDataStreamDir, dataStream.pipeline); + await createFieldMapping( + integration.name, + dataStreamName, + specificDataStreamDir, + dataStream.docs + ); } - const packageTempPath = joinPath( - tmpDir, - tmpPackageDir, - `${integration.name}-${integration.initialVersion}` - ); - const zipBuffer = createZipArchive(tmpDir, tmpPackageDir); + const tmpPackageDir = joinPath(tmpDir, `${integration.name}-${integration.initialVersion}`); + const zipBuffer = await createZipArchive(tmpPackageDir); return zipBuffer; } -function createDirectories(tmpDir: string, integration: Integration) { - mkdirSync(tmpDir, { recursive: true }); - +async function createDirectories(tmpDir: string, integration: Integration): Promise { const packageDir = joinPath(tmpDir, `${integration.name}-${integration.initialVersion}`); - - mkdirSync(packageDir, { recursive: true }); - - createPackage(packageDir, integration); - + await asyncEnsureDir(tmpDir); + await asyncEnsureDir(packageDir); + await createPackage(packageDir, integration); return packageDir; } -function createZipArchive(tmpDir: string, tmpPackageDir: string) { - const zip = new AdmZip(); - const directoryPath = joinPath(tmpDir, tmpPackageDir); - - zip.addLocalFolder(directoryPath); - - return zip.toBuffer(); -} - -export function createPackage(packageDir: string, integration: Integration) { - createReadme(packageDir, integration); - createChangelog(packageDir, integration); - createBuildFile(packageDir); - createPackageManifest(packageDir, integration); - createPackageSystemTests(packageDir, integration); - createDefaultLogo(packageDir); +async function createPackage(packageDir: string, integration: Integration): Promise { + await createReadme(packageDir, integration); + await createChangelog(packageDir, integration); + await createBuildFile(packageDir); + await createPackageManifest(packageDir, integration); + await createPackageSystemTests(packageDir, integration); + await createDefaultLogo(packageDir); } -function createDefaultLogo(packageDir: string) { +async function createDefaultLogo(packageDir: string): Promise { const logoDir = joinPath(packageDir, 'img'); - ensureDir(logoDir); - const imgTemplateDir = joinPath(__dirname, '../templates/img'); - copyFileSync(joinPath(imgTemplateDir, 'logo.svg'), joinPath(logoDir, 'logo.svg')); -} - -function createBuildFile(packageDir: string) { - const buildTemplateDir = joinPath(__dirname, '../templates/build'); - nunjucks.configure(buildTemplateDir, { autoescape: true }); - const buildFile = nunjucks.render('build.yml.j2', { ecs_version: '8.11.0' }); + await asyncEnsureDir(logoDir); + await asyncCopy(joinPath(imgTemplateDir, 'logo.svg'), joinPath(logoDir, 'logo.svg')); +} +async function createBuildFile(packageDir: string): Promise { + const buildFile = nunjucks.render('build.yml.njk', { ecs_version: '8.11.0' }); const buildDir = joinPath(packageDir, '_dev/build'); - ensureDir(buildDir); - writeFileSync(joinPath(buildDir, 'build.yml'), buildFile, 'utf-8'); -} -function createChangelog(packageDir: string, integration: Integration): void { - const changelogTemplateDir = joinPath(__dirname, '../templates/img'); - nunjucks.configure(changelogTemplateDir, { autoescape: true }); + await asyncEnsureDir(buildDir); + await asyncCreate(joinPath(buildDir, 'build.yml'), buildFile); +} - const changelogTemplate = nunjucks.render('changelog.yml.j2', { +async function createChangelog(packageDir: string, integration: Integration): Promise { + const changelogTemplate = nunjucks.render('changelog.yml.njk', { initial_version: integration.initialVersion, }); - writeFileSync(joinPath(packageDir, 'changelog.yml'), changelogTemplate, 'utf-8'); + await asyncCreate(joinPath(packageDir, 'changelog.yml'), changelogTemplate); } -function createReadme(packageDir: string, integration: Integration) { - const readmeDir = joinPath(packageDir, '_dev/build/docs/'); - mkdirSync(readmeDir, { recursive: true }); - - const readmeTemplatesDir = joinPath(__dirname, '../templates/readme'); - nunjucks.configure(readmeTemplatesDir, { autoescape: true }); - - const readmeTemplate = nunjucks.render('README.md.j2', { +async function createReadme(packageDir: string, integration: Integration) { + const readmeDirPath = joinPath(packageDir, '_dev/build/docs/'); + await asyncEnsureDir(readmeDirPath); + const readmeTemplate = nunjucks.render('README.md.njk', { package_name: integration.name, data_streams: integration.dataStreams, }); - writeFileSync(joinPath(readmeDir, 'README.md'), readmeTemplate, { encoding: 'utf-8' }); + await asyncCreate(joinPath(readmeDirPath, 'README.md'), readmeTemplate); +} + +async function createZipArchive(tmpPackageDir: string): Promise { + const zip = new AdmZip(); + console.log('Zipping package', tmpPackageDir); + zip.addLocalFolder(tmpPackageDir); + return zip.toBuffer(); } -export function createPipeline(specificDataStreamDir: string, pipeline: object) { - const filePath = joinPath(specificDataStreamDir, 'elasticsearch/ingest_pipeline/default.yml'); - const yamlContent = '---\n' + yaml.dump(pipeline, { sortKeys: false }); - writeFileSync(filePath, yamlContent, 'utf-8'); +async function createPackageManifest(packageDir: string, integration: Integration): Promise { + const uniqueInputs: { [key: string]: { type: string; title: string; description: string } } = {}; + + integration.dataStreams.forEach((dataStream: DataStream) => { + dataStream.inputTypes.forEach((inputType: string) => { + if (!uniqueInputs[inputType]) { + uniqueInputs[inputType] = { + type: inputType, + title: dataStream.title, + description: dataStream.description, + }; + } + }); + }); + + const uniqueInputsList = Object.values(uniqueInputs); + + const packageManifest = nunjucks.render('package_manifest.yml.njk', { + format_version: integration.formatVersion, + package_title: integration.title, + package_name: integration.name, + package_version: integration.initialVersion, + package_description: integration.description, + package_owner: integration.owner, + min_version: integration.minKibanaVersion, + inputs: uniqueInputsList, + }); + + await asyncCreate(joinPath(packageDir, 'manifest.yml'), packageManifest); } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts index 626c05074442e..8e80a1d4648dc 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts @@ -5,127 +5,127 @@ * 2.0. */ -import fs from 'fs'; -import path from 'path'; +import { join as joinPath } from 'path'; import nunjucks from 'nunjucks'; import { DataStream } from '../../common'; +import { asyncCopy, asyncEnsureDir, asyncCreate, asyncListDir } from '../util'; -function createDatastream( +export async function createDatastream( packageName: string, specificDataStreamDir: string, dataStream: DataStream -): void { - nunjucks.configure({ autoescape: true }); +): Promise { const dataStreamName = dataStream.name; - const manifestTemplatesDir = path.join(__dirname, '../templates/manifest'); - const pipelineDir = path.join(specificDataStreamDir, 'elasticsearch', 'ingest_pipeline'); - const env = new nunjucks.Environment(new nunjucks.FileSystemLoader(manifestTemplatesDir)); + const pipelineDir = joinPath(specificDataStreamDir, 'elasticsearch', 'ingest_pipeline'); const title = dataStream.title; const description = dataStream.description; - fs.mkdirSync(specificDataStreamDir, { recursive: true }); - createDataStreamFolders(specificDataStreamDir, pipelineDir); - createPipelineTests(specificDataStreamDir, dataStream.rawSamples, packageName, dataStreamName); + await asyncEnsureDir(specificDataStreamDir); + await createDataStreamFolders(specificDataStreamDir, pipelineDir); + await createPipelineTests( + specificDataStreamDir, + dataStream.rawSamples, + packageName, + dataStreamName + ); - const dsStreams: string[] = []; + const dataStreams: string[] = []; for (const inputType of dataStream.inputTypes) { - // Skip httpjson and cel input types for now, requires new prompts. - const inputEntryTemplate = env.getTemplate(`${inputType}_manifest.yml.j2`); const mappedValues = { data_stream_title: title, data_stream_description: description, package_name: packageName, data_stream_name: dataStreamName, }; - const dataStreamManifest = inputEntryTemplate.render(mappedValues); - - const commonTemplate = env.getTemplate('common.yml.j2'); - const commonManifest = commonTemplate.render(mappedValues); + const dataStreamManifest = nunjucks.render(`${inputType}_manifest.yml.njk`, mappedValues); + const commonManifest = nunjucks.render('common_manifest.yml.njk', mappedValues); const combinedManifest = `${dataStreamManifest}\n${commonManifest}`; - dsStreams.push(combinedManifest); - createDataStreamSystemTests( + dataStreams.push(combinedManifest); + + // We comment this out for now, as its not really needed for custom integrations + /* createDataStreamSystemTests( specificDataStreamDir, inputType, mappedValues, packageName, dataStreamName ); + */ } - const finalManifestTemplate = env.getTemplate('data_stream.yml.j2'); - const finalManifest = finalManifestTemplate.render({ title, data_streams: dsStreams }); + const finalManifest = nunjucks.render('data_stream.yml.njk', { + title, + data_streams: dataStreams, + }); - fs.writeFileSync(path.join(specificDataStreamDir, 'manifest.yml'), finalManifest, 'utf-8'); + await asyncCreate(joinPath(specificDataStreamDir, 'manifest.yml'), finalManifest); } -function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): void { - const dataStreamTemplatesDir = path.join(__dirname, '../templates/data_stream'); - for (const item of fs.readdirSync(dataStreamTemplatesDir)) { - const s = path.join(dataStreamTemplatesDir, item); - const d = path.join(specificDataStreamDir, item); - if (fs.lstatSync(s).isDirectory()) { - fs.cpSync(s, d, { recursive: true }); - } else { - fs.copyFileSync(s, d); +async function createDataStreamFolders( + specificDataStreamDir: string, + pipelineDir: string +): Promise { + const dataStreamTemplatesDir = joinPath(__dirname, '../templates/data_stream'); + try { + const items = await asyncListDir(dataStreamTemplatesDir); + + for (const item of items) { + const s = joinPath(dataStreamTemplatesDir, item); + const d = joinPath(specificDataStreamDir, item); + await asyncCopy(s, d); } + + await asyncEnsureDir(pipelineDir); + } catch (error) { + throw error; } - fs.mkdirSync(pipelineDir, { recursive: true }); } -function createPipelineTests( +async function createPipelineTests( specificDataStreamDir: string, rawSamples: string[], packageName: string, dataStreamName: string -): void { - const pipelineTestTemplatesDir = path.join(__dirname, '../templates/pipeline_tests'); - const pipelineTestsDir = path.join(specificDataStreamDir, '_dev/test/pipeline'); - fs.mkdirSync(pipelineTestsDir, { recursive: true }); - for (const item of fs.readdirSync(pipelineTestTemplatesDir)) { - const s = path.join(pipelineTestTemplatesDir, item); - const d = path.join(pipelineTestsDir, item); - if (fs.lstatSync(s).isDirectory()) { - fs.cpSync(s, d, { recursive: true }); - } else { - fs.copyFileSync(s, d); - } +): Promise { + const pipelineTestTemplatesDir = joinPath(__dirname, '../templates/pipeline_tests'); + const pipelineTestsDir = joinPath(specificDataStreamDir, '_dev/test/pipeline'); + await asyncEnsureDir(pipelineTestsDir); + const items = await asyncListDir(pipelineTestTemplatesDir); + for (const item of items) { + const s = joinPath(pipelineTestTemplatesDir, item); + const d = joinPath(pipelineTestsDir, item); + await asyncCopy(s, d); } const formattedPackageName = packageName.replace(/_/g, '-'); const formattedDataStreamName = dataStreamName.replace(/_/g, '-'); - const testFileName = path.join( + const testFileName = joinPath( pipelineTestsDir, `test-${formattedPackageName}-${formattedDataStreamName}.log` ); - fs.writeFileSync(testFileName, rawSamples.join('\n'), 'utf-8'); + await asyncCreate(testFileName, rawSamples.join('\n')); } -function createDataStreamSystemTests( +// We are skipping this one for now, as its not really needed for custom integrations +/* function createDataStreamSystemTests( specificDataStreamDir: string, inputType: string, mappedValues: Record, packageName: string, dataStreamName: string ): void { - const systemTestTemplatesDir = path.join(__dirname, '../templates/system_tests'); + const systemTestTemplatesDir = joinPath(__dirname, '../templates/system_tests'); nunjucks.configure({ autoescape: true }); const env = new nunjucks.Environment(new nunjucks.FileSystemLoader(systemTestTemplatesDir)); mappedValues.package_name = packageName.replace(/_/g, '-'); mappedValues.data_stream_name = dataStreamName.replace(/_/g, '-'); - const systemTestFolder = path.join(specificDataStreamDir, '_dev/test/system'); + const systemTestFolder = joinPath(specificDataStreamDir, '_dev/test/system'); fs.mkdirSync(systemTestFolder, { recursive: true }); - const systemTestTemplate = env.getTemplate(`test-${inputType}-config.yml.j2`); + const systemTestTemplate = env.getTemplate(`test-${inputType}-config.yml.njk`); const systemTestRendered = systemTestTemplate.render(mappedValues); - const systemTestFileName = path.join(systemTestFolder, `test-${inputType}-config.yml`); + const systemTestFileName = joinPath(systemTestFolder, `test-${inputType}-config.yml`); fs.writeFileSync(systemTestFileName, systemTestRendered, 'utf-8'); -} - -export { - createDatastream, - createDataStreamFolders, - createPipelineTests, - createDataStreamSystemTests, -}; +}*/ diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts index 7784ee91bc3b3..64b9a0ef61d48 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts @@ -5,24 +5,16 @@ * 2.0. */ -import * as fs from 'fs'; -import * as path from 'path'; +import { join as joinPath } from 'path'; import nunjucks from 'nunjucks'; import { Integration } from '../../common'; -import { ensureDir } from '../util/util'; +import { asyncEnsureDir, asyncCreate } from '../util'; -export function createPackageSystemTests(integrationDir: string, integration: Integration) { - const systemTestsTemplatesDir = path.join(__dirname, '../templates/system_tests'); - const systemTestsDockerDir = path.join(integrationDir, '_dev/deploy/docker/'); - const systemTestsSamplesDir = path.join(systemTestsDockerDir, 'sample_logs'); - ensureDir(systemTestsSamplesDir); +export async function createPackageSystemTests(integrationDir: string, integration: Integration) { + const systemTestsDockerDir = joinPath(integrationDir, '_dev/deploy/docker/'); + const systemTestsSamplesDir = joinPath(systemTestsDockerDir, 'sample_logs'); + await asyncEnsureDir(systemTestsSamplesDir); - nunjucks.configure(systemTestsTemplatesDir, { autoescape: true }); - - const systemTestDockerTemplate = fs.readFileSync( - path.join(systemTestsTemplatesDir, 'docker-compose.yml.j2'), - 'utf-8' - ); const streamVersion = integration.streamVersion || '0.13.0'; const dockerComposeVersion = integration.dockerComposeVersion || '2.3'; const dockerServices: string[] = []; @@ -30,33 +22,29 @@ export function createPackageSystemTests(integrationDir: string, integration: In const packageName = integration.name.replace(/_/g, '-'); const dataStreamName = stream.name.replace(/_/g, '-'); - const systemTestFileName = path.join( + const systemTestFileName = joinPath( systemTestsSamplesDir, `test-${packageName}-${dataStreamName}.log` ); const rawSamplesContent = stream.rawSamples.join('\n'); - fs.writeFileSync(systemTestFileName, rawSamplesContent, 'utf-8'); + await asyncCreate(systemTestFileName, rawSamplesContent); for (const inputType of stream.inputTypes) { - const systemTestServiceTemplate = fs.readFileSync( - path.join(systemTestsTemplatesDir, `service-${inputType}.j2`), - 'utf-8' - ); const mappedValues = { package_name: packageName, data_stream_name: dataStreamName, stream_version: streamVersion, }; - const renderedService = nunjucks.renderString(systemTestServiceTemplate, mappedValues); + const renderedService = nunjucks.render(`service-${inputType}.njk`, mappedValues); dockerServices.push(renderedService); } } - const renderedDockerCompose = nunjucks.renderString(systemTestDockerTemplate, { + const renderedDockerCompose = nunjucks.render('docker-compose.yml.njk', { services: dockerServices.join('\n'), docker_compose_version: dockerComposeVersion, }); - const dockerComposeFileName = path.join(systemTestsDockerDir, 'docker-compose.yml'); - fs.writeFileSync(dockerComposeFileName, renderedDockerCompose, 'utf-8'); + const dockerComposeFileName = joinPath(systemTestsDockerDir, 'docker-compose.yml'); + await asyncCreate(dockerComposeFileName, renderedDockerCompose); } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts index 5613ff6ef6f17..b3b5c03c3f3c5 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts @@ -4,49 +4,44 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import * as fs from 'fs'; -import * as path from 'path'; + import nunjucks from 'nunjucks'; -import { mergeSamples } from '../util/samples'; -import { generateFields } from '../util/samples'; + +import { generateFields, mergeSamples, asyncCreate } from '../util'; interface Doc { [key: string]: any; } -function createFieldMapping( +export async function createFieldMapping( packageName: string, dataStreamName: string, specificDataStreamDir: string, docs: Doc[] -): void { - const fieldsTemplatesDir = path.join(__dirname, '../templates/fields'); - - const env = nunjucks.configure(fieldsTemplatesDir, { autoescape: true }); - - createBaseFields(specificDataStreamDir, packageName, dataStreamName, env); - createCustomFields(specificDataStreamDir, docs); +): Promise { + await createBaseFields(specificDataStreamDir, packageName, dataStreamName); + await createCustomFields(specificDataStreamDir, docs); } -function createBaseFields( +async function createBaseFields( specificDataStreamDir: string, packageName: string, - dataStreamName: string, - env: nunjucks.Environment -): void { - const baseFieldsTemplate = env.getTemplate('base-fields.yml.njk'); + dataStreamName: string +): Promise { const datasetName = `${packageName}.${dataStreamName}`; - const baseFieldsResult = baseFieldsTemplate.render({ module: packageName, dataset: datasetName }); - - fs.writeFileSync(`${specificDataStreamDir}/fields/base-fields.yml`, baseFieldsResult, { - encoding: 'utf-8', + const baseFields = nunjucks.render('base-fields.yml.njk', { + module: packageName, + dataset: datasetName, }); + + await asyncCreate(`${specificDataStreamDir}/fields/base-fields.yml`, baseFields); } -function createCustomFields(specificDataStreamDir: string, pipelineResults: Doc[]): void { +async function createCustomFields( + specificDataStreamDir: string, + pipelineResults: Doc[] +): Promise { const mergedResults = mergeSamples(pipelineResults); const fieldKeys = generateFields(mergedResults); - fs.writeFileSync(`${specificDataStreamDir}/fields/fields.yml`, fieldKeys, { encoding: 'utf-8' }); + await asyncCreate(`${specificDataStreamDir}/fields/fields.yml`, fieldKeys); } - -export { createFieldMapping }; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts b/x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts deleted file mode 100644 index dbea3c5c57673..0000000000000 --- a/x-pack/plugins/integration_assistant/server/integration_builder/manifest.ts +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import * as fs from 'fs'; -import * as path from 'path'; -import nunjucks from 'nunjucks'; -import { Integration, DataStream } from '../../common'; - -export function createPackageManifest(packageDir: string, integration: Integration) { - const manifestTemplatesDir = path.join(__dirname, '../templates/manifest'); - const uniqueInputs: { [key: string]: { type: string; title: string; description: string } } = {}; - - integration.dataStreams.forEach((dataStream: DataStream) => { - dataStream.inputTypes.forEach((inputType: string) => { - if (!uniqueInputs[inputType]) { - uniqueInputs[inputType] = { - type: inputType, - title: dataStream.title, - description: dataStream.description, - }; - } - }); - }); - - const uniqueInputsList = Object.values(uniqueInputs); - - nunjucks.configure(manifestTemplatesDir, { autoescape: true }); - - const template = nunjucks.render('package.yml.j2', { - format_version: integration.formatVersion, - package_title: integration.title, - package_name: integration.name, - package_version: integration.initialVersion, - package_description: integration.description, - package_owner: integration.owner, - min_version: integration.minKibanaVersion, - inputs: uniqueInputsList, - }); - - fs.writeFileSync(path.join(packageDir, 'manifest.yml'), template, { encoding: 'utf-8' }); -} diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts b/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts new file mode 100644 index 0000000000000..e3f8726ae69de --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import { join as joinPath } from 'path'; +import yaml from 'js-yaml'; +import { asyncCreate } from '../util'; + +export async function createPipeline(specificDataStreamDir: string, pipeline: object) { + const filePath = joinPath(specificDataStreamDir, 'elasticsearch/ingest_pipeline/default.yml'); + const yamlContent = '---\n' + yaml.dump(pipeline, { sortKeys: false }); + await asyncCreate(filePath, yamlContent); +} diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 34cf10acad850..cf03eccf2a262 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -8,6 +8,8 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { INTEGRATION_BUILDER_PATH } from '../../common'; +import { buildPackage } from '../integration_builder'; +import type { BuildIntegrationApiRequest } from '../../common'; // TODO: Currently not implemented export function registerIntegrationBuilderRoutes(router: IRouter) { @@ -16,20 +18,53 @@ export function registerIntegrationBuilderRoutes(router: IRouter) { path: `${INTEGRATION_BUILDER_PATH}`, validate: { body: schema.object({ - packageName: schema.string(), - packageTitle: schema.string(), - dataStreamName: schema.string(), - dataStreamTitle: schema.string(), - inputTypes: schema.arrayOf(schema.string()), - rawSamples: schema.arrayOf(schema.string()), - ingestPipeline: schema.any(), - docs: schema.arrayOf(schema.any()), + integration: schema.object({ + name: schema.string(), + title: schema.string(), + description: schema.string(), + version: schema.string(), + dataStreams: schema.arrayOf( + schema.object({ + name: schema.string(), + title: schema.string(), + description: schema.string(), + inputTypes: schema.arrayOf(schema.string()), + rawSamples: schema.arrayOf(schema.string()), + pipeline: schema.object({ + name: schema.maybe(schema.string()), + description: schema.maybe(schema.string()), + version: schema.maybe(schema.number()), + processors: schema.arrayOf( + schema.recordOf(schema.string(), schema.object({}, { unknowns: 'allow' })) + ), + on_failure: schema.maybe( + schema.arrayOf( + schema.recordOf(schema.string(), schema.object({}, { unknowns: 'allow' })) + ) + ), + }), + docs: schema.arrayOf(schema.object({}, { unknowns: 'allow' })), + }) + ), + streamVersion: schema.maybe(schema.string()), + dockerComposeVersion: schema.maybe(schema.string()), + initialVersion: schema.string(), + formatVersion: schema.string(), + owner: schema.string(), + minKibanaVersion: schema.string(), + }), }), }, }, + // TODO: This needs to implement CustomHttpResponseOptions async (_, req, res) => { - // TODO: Switch out if/when implemented - return res.custom({ statusCode: 501 }); + const { integration } = req.body as BuildIntegrationApiRequest; + try { + const zippedIntegration = await buildPackage(integration); + return res.custom({ statusCode: 200, body: zippedIntegration }); + } catch (e) { + return res.customError({ statusCode: 500, body: e }); + } } ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index f72412f964a2f..a469355c9dc56 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -8,7 +8,7 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { CATEGORIZATION_GRAPH_PATH } from '../../common'; -import { CategorizationApiRequest, CategorizationApiResponse } from '../../common/types'; +import type { CategorizationApiRequest, CategorizationApiResponse } from '../../common'; import { getCategorizationGraph } from '../graphs/categorization'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; diff --git a/x-pack/plugins/integration_assistant/server/templates/readme/README.md.njk b/x-pack/plugins/integration_assistant/server/templates/README.md.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/readme/README.md.njk rename to x-pack/plugins/integration_assistant/server/templates/README.md.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/cloudfoundry.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/common.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/common.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/common.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/common.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/filestream.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/gcs.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/http_endpoint.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/journald.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/kafka.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/logfile.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/tcp.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.njk b/x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/agent/udp.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/fields/base-fields.yml.njk b/x-pack/plugins/integration_assistant/server/templates/base-fields.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/fields/base-fields.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/base-fields.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/build/build.yml.njk b/x-pack/plugins/integration_assistant/server/templates/build.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/build/build.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/build.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/changelog/changelog.yml.njk b/x-pack/plugins/integration_assistant/server/templates/changelog.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/changelog/changelog.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/changelog.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/common.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/common_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/common.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/common_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/package.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/package_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/package.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/package_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/ssl.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/ssl_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/ssl.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/ssl_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/pipeline/pipeline.yml.njk b/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/pipeline/pipeline.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/util/async_file.ts b/x-pack/plugins/integration_assistant/server/util/async_file.ts new file mode 100644 index 0000000000000..40713fb208be5 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/async_file.ts @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +// This file is created to ensure all file operations are using proper async promises throughout the plugin. +import { readdir, writeFile, mkdir, stat, readFile, cp } from 'fs/promises'; +import { dirname } from 'path'; + +export async function asyncExists(path: string): Promise { + try { + await stat(path); + return true; + } catch (error) { + if (error.code === 'ENOENT') { + return false; + } else { + throw error; + } + } +} + +export async function asyncEnsureDir(dirPath: string): Promise { + const exists = await asyncExists(dirPath); + if (!exists) { + await mkdir(dirPath, { recursive: true }); + } +} + +export async function asyncCreate(path: string, content: string): Promise { + return await writeFile(path, content, { encoding: 'utf-8' }); +} + +export async function asyncCopy(source: string, destination: string): Promise { + try { + // Ensure the destination directory exists + await mkdir(dirname(destination), { recursive: true }); + await cp(source, destination, { recursive: true }); + } catch (error) { + return Promise.reject(error); + } +} + +export async function asyncListDir(path: string): Promise { + return await readdir(path); +} + +export async function asyncRead(path: string): Promise { + return await readFile(path, { encoding: 'utf-8' }); +} diff --git a/x-pack/plugins/integration_assistant/server/util/es.ts b/x-pack/plugins/integration_assistant/server/util/es.ts index 7483a7b690680..cf21e4ee7f9d2 100644 --- a/x-pack/plugins/integration_assistant/server/util/es.ts +++ b/x-pack/plugins/integration_assistant/server/util/es.ts @@ -26,6 +26,7 @@ function formatSample(sample: string): DocTemplate { return formatted; } +// TODO: Replace with real client from route context. function newClient(): Client { const client = new Client({ node: 'http://localhost:9200', @@ -66,9 +67,6 @@ export async function handleValidatePipeline( state: EcsMappingState | CategorizationState | RelatedState ): Promise | Partial | Partial> { const [errors, results] = await testPipeline(state.rawSamples, state.currentPipeline); - console.log('testing validate pipeline'); - console.log('errors', errors); - //console.log("results", results); return { errors, pipelineResults: results, diff --git a/x-pack/plugins/integration_assistant/server/util/index.ts b/x-pack/plugins/integration_assistant/server/util/index.ts new file mode 100644 index 0000000000000..1818fd1915274 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/index.ts @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +export { + asyncCreate, + asyncEnsureDir, + asyncCopy, + asyncRead, + asyncExists, + asyncListDir, +} from './async_file'; + +export { generateFields, mergeSamples } from './samples'; +export { deepCopy, generateUniqueId } from './util'; diff --git a/x-pack/plugins/integration_assistant/server/util/pipeline.ts b/x-pack/plugins/integration_assistant/server/util/pipeline.ts index 89f075b839c25..bf5f1addde8bc 100644 --- a/x-pack/plugins/integration_assistant/server/util/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/util/pipeline.ts @@ -5,21 +5,19 @@ * 2.0. */ import { deepCopy } from './util'; - -interface Pipeline { - processors: any[]; -} +import type { Pipeline } from '../../common'; export function combineProcessors(initialPipeline: Pipeline, processors: any[]): Pipeline { // Create a deep copy of the initialPipeline to avoid modifying the original input const currentPipeline = deepCopy(initialPipeline); - // Access and modify the processors list in the copied pipeline + // Add the new processors right before the last 2 removeprocessor in the initial pipeline. + // This is so all the processors if conditions are not accessing possibly removed fields. const currentProcessors = currentPipeline.processors; const combinedProcessors = [ - ...currentProcessors.slice(0, -1), + ...currentProcessors.slice(0, -2), ...processors, - ...currentProcessors.slice(-1), + ...currentProcessors.slice(-2), ]; currentPipeline.processors = combinedProcessors; diff --git a/x-pack/plugins/integration_assistant/server/util/util.ts b/x-pack/plugins/integration_assistant/server/util/util.ts index 92a802149904b..e01063540a750 100644 --- a/x-pack/plugins/integration_assistant/server/util/util.ts +++ b/x-pack/plugins/integration_assistant/server/util/util.ts @@ -4,18 +4,11 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { existsSync, mkdirSync } from 'fs'; export function deepCopy(obj: T): T { return JSON.parse(JSON.stringify(obj)); } -export function ensureDir(dir: string): void { - if (!existsSync(dir)) { - mkdirSync(dir, { recursive: true }); - } -} - export function generateUniqueId() { return `${Date.now() + Math.floor(Math.random() * 1e13)}`; } From 58ad9f5a5ad6bcdce106d40511969f9a547e7e2f Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Mon, 27 May 2024 09:27:27 +0200 Subject: [PATCH 11/62] adding some fix to build integration api, starting on tests --- .../__jest__/fixtures.ts | 14 ++++++++++ .../integration_assistant/public/app.tsx | 1 + .../server/graphs/ecs/graph.test.ts | 26 +++++++++++++++++++ .../server/graphs/ecs/pipeline.ts | 4 +-- .../integration_builder/build_integration.ts | 1 - .../server/integration_builder/fields.ts | 2 +- .../server/routes/build_integration_routes.ts | 1 - 7 files changed, 44 insertions(+), 5 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/__jest__/fixtures.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures.ts new file mode 100644 index 0000000000000..f9df80119cfba --- /dev/null +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures.ts @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +export const ecsMappingGraphRequest = { + rawSamples: [ + '{ "timestamp": "2020-10-19 19:31:31", "id": 0, "class": "general", "event": "status", "connection_id": 16, "account": { "user": "audit_test_user2", "host": "hades.home" }, "login": { "user": "audit_test_user2", "os": "", "ip": "192.0.2.5", "proxy": "" }, "general_data": { "command": "Query", "sql_command": "create_db", "query": "create database audit_test", "status": 0 } }', + '{ "timestamp": "2020-10-19 19:32:10", "id": 0, "class": "connection", "event": "disconnect", "connection_id": 16, "account": { "user": "audit_test_user2", "host": "hades.home" }, "login": { "user": "audit_test_user2", "os": "", "ip": "192.0.2.5", "proxy": "" }, "connection_data": { "connection_type": "ssl" } }', + ], + packageName: 'mysql_enterprise', + dataStreamName: 'audit', +}; diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx index d60db9c77ac30..e6b1d083d504c 100644 --- a/x-pack/plugins/integration_assistant/public/app.tsx +++ b/x-pack/plugins/integration_assistant/public/app.tsx @@ -41,6 +41,7 @@ function RoutingExplorer({ return 'message' in response; }; + // TODO: This is just a temp test UI. It will be removed once the actual UI is implemented return ( diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts new file mode 100644 index 0000000000000..9e56352a29f4e --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeListLLM } from '@langchain/llms/fake'; +import { getEcsGraph } from './graph'; +import { getModel } from '../../providers/bedrock'; + +jest.mock('../../providers/bedrock'); + +describe('runEcsGraph', () => { + it('Should the whole graph successfully with mocked values', () => { + const llm1 = new FakeListLLM({ + responses: ['Response 1', 'Response 2'], + }); + const llm2 = new FakeListLLM({ + responses: ['Response 3', 'Response 4'], + }); + const llm3 = new FakeListLLM({ + responses: ['Response 5', 'Response 6'], + }); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts index a2e342ce37128..05d54e71fa624 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -6,7 +6,7 @@ */ import { load } from 'js-yaml'; import { Environment, FileSystemLoader } from 'nunjucks'; -import { join } from 'path'; +import { join as joinPath } from 'path'; import { ECS_TYPES } from './constants'; import { EcsMappingState } from '../../types'; @@ -156,7 +156,6 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { const processors = generateProcessors(state.currentMapping, samples); // Retrieve all source field names from convert processors to populate single remove processor: const fieldsToRemove = processors.filter((p: any) => p.convert).map((p: any) => p.convert.field); - const templatesPath = join(__dirname, '../../templates/pipeline'); const mappedValues = { processors, ecs_version: state.ecsVersion, @@ -166,6 +165,7 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { fields_to_remove: fieldsToRemove, }; try { + const templatesPath = joinPath(__dirname, '../../templates'); const env = new Environment(new FileSystemLoader(templatesPath), { autoescape: false, }); diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts index d3f23bcdb88a2..cb92fd97f949e 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -106,7 +106,6 @@ async function createReadme(packageDir: string, integration: Integration) { async function createZipArchive(tmpPackageDir: string): Promise { const zip = new AdmZip(); - console.log('Zipping package', tmpPackageDir); zip.addLocalFolder(tmpPackageDir); return zip.toBuffer(); } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts index b3b5c03c3f3c5..d420548276d5d 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts @@ -34,7 +34,7 @@ async function createBaseFields( dataset: datasetName, }); - await asyncCreate(`${specificDataStreamDir}/fields/base-fields.yml`, baseFields); + await asyncCreate(`${specificDataStreamDir}/base-fields.yml`, baseFields); } async function createCustomFields( diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index cf03eccf2a262..294c8a88c4caa 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -22,7 +22,6 @@ export function registerIntegrationBuilderRoutes(router: IRouter) { name: schema.string(), title: schema.string(), description: schema.string(), - version: schema.string(), dataStreams: schema.arrayOf( schema.object({ name: schema.string(), From eeb253d220a689fcb8bbc123030d08bea8faa4d4 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Mon, 27 May 2024 09:56:46 +0200 Subject: [PATCH 12/62] adding some basic styling and conditions to test UI buttons --- .../integration_assistant/public/app.tsx | 10 ++++++- .../components/build_integration_button.tsx | 29 ++++++++++++++----- .../components/categorization_button.tsx | 12 +++++--- .../public/components/ecs_button.tsx | 12 +++++--- .../public/components/related_button.tsx | 12 +++++--- 5 files changed, 54 insertions(+), 21 deletions(-) diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx index e6b1d083d504c..a1c15bb45e4c7 100644 --- a/x-pack/plugins/integration_assistant/public/app.tsx +++ b/x-pack/plugins/integration_assistant/public/app.tsx @@ -31,6 +31,7 @@ function RoutingExplorer({ ); const [currentPipeline, setCurrentPipeline] = useState({} as object); const [resultDocs, setResultDocs] = useState([] as object[]); + const [currentStep, setCurrentStep] = useState(0); const rawSamples = [ '{"ei":0,"event":"user.login","uid":"b675d102-fc25-4f7a-bf5d-96468cc176ea","code":"T1000I","time":"2024-02-23T18:56:50.628Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","required_private_key_policy":"none","success":true,"method":"local","mfa_device":{"mfa_device_name":"otp-device","mfa_device_uuid":"d07bf388-af49-4ec2-b8a4-c8a9e785b70b","mfa_device_type":"TOTP"},"user_agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36","addr.remote":"136.61.214.196:50332"}', '{"ei":0,"event":"cert.create","uid":"efd326fc-dd13-4df8-acef-3102c2d717d3","code":"TC000I","time":"2024-02-23T18:56:50.653Z","cluster_name":"teleport.ericbeahan.com","cert_type":"user","identity":{"user":"teleport-admin","roles":["access","editor"],"logins":["root","ubuntu","ec2-user","-teleport-internal-join"],"expires":"2024-02-24T06:56:50.648137154Z","route_to_cluster":"teleport.ericbeahan.com","traits":{"aws_role_arns":null,"azure_identities":null,"db_names":null,"db_roles":null,"db_users":null,"gcp_service_accounts":null,"host_user_gid":[""],"host_user_uid":[""],"kubernetes_groups":null,"kubernetes_users":null,"logins":["root","ubuntu","ec2-user"],"windows_logins":null},"teleport_cluster":"teleport.ericbeahan.com","client_ip":"136.61.214.196","prev_identity_expires":"0001-01-01T00:00:00Z","private_key_policy":"none"}}', @@ -55,6 +56,8 @@ function RoutingExplorer({ diff --git a/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx b/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx index d927edd7a3b5b..ad1dce2e6f485 100644 --- a/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx +++ b/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx @@ -15,15 +15,16 @@ interface BuildIntegrationButtonProps { req: BuildIntegrationApiRequest ) => Promise>; rawSamples: any[]; - isFetchError: (response: any) => boolean; + currentStep: number; + setCurrentStep: (step: number) => void; } export const BuildIntegrationButton = ({ runIntegrationBuilder, rawSamples, - isFetchError, + currentStep, + setCurrentStep, }: BuildIntegrationButtonProps) => { const [isLoading, setIsLoading] = useState(false); - const [isDisabled, setIsDisabled] = useState(false); const testdocs = [ { ecs: { @@ -93,6 +94,7 @@ export const BuildIntegrationButton = ({ ], }; async function onBuildIntegrationButtonClick() { + setIsLoading(true); const request = { integration: { name: 'teleport', @@ -128,17 +130,28 @@ export const BuildIntegrationButton = ({ } as BuildIntegrationApiRequest; try { const builIntegrationResponse = await runIntegrationBuilder(request); - if (!isFetchError(builIntegrationResponse)) { - console.log('finished building integration successfully'); - } + const blob = new Blob([builIntegrationResponse]); + const url = window.URL.createObjectURL(blob); + const a = document.createElement('a'); + document.body.appendChild(a); + a.style = 'display: none'; + a.target = '_self'; + a.href = url; + a.download = 'integration.zip'; + a.click(); + window.URL.revokeObjectURL(url); + setIsLoading(false); + setCurrentStep(4); } catch (e) { + setIsLoading(false); console.log(e); } } return ( Promise>; rawSamples: any[]; currentPipeline: any; + currentStep: number; + setCurrentStep: (step: number) => void; setCurrentPipeline: (pipeline: any) => void; setLastResponse: (response: any) => void; setResultDocs: (docs: any) => void; @@ -25,13 +27,14 @@ export const CategorizationButton = ({ runCategorizationGraph, rawSamples, currentPipeline, + currentStep, + setCurrentStep, setCurrentPipeline, setLastResponse, setResultDocs, isFetchError, }: CategorizationButtonProps) => { const [isLoading, setIsLoading] = useState(false); - const [isDisabled, setIsDisabled] = useState(false); async function onCategorizationButtonClick() { setIsLoading(true); const request = { @@ -52,7 +55,7 @@ export const CategorizationButton = ({ console.log('finished categorization graph without errors, but no results'); } setIsLoading(false); - setIsDisabled(true); + setCurrentStep(2); } } catch (e) { setIsLoading(false); @@ -61,8 +64,9 @@ export const CategorizationButton = ({ } return ( Promise>; rawSamples: any[]; + currentStep: number; + setCurrentStep: (step: number) => void; setCurrentPipeline: (pipeline: any) => void; setLastResponse: (response: any) => void; isFetchError: (response: any) => boolean; @@ -22,12 +24,13 @@ interface EcsButtonProps { export const EcsButton = ({ runEcsGraph, rawSamples, + currentStep, + setCurrentStep, setCurrentPipeline, setLastResponse, isFetchError, }: EcsButtonProps) => { const [isLoading, setIsLoading] = useState(false); - const [isDisabled, setIsDisabled] = useState(false); async function onEcsButtonClick() { setIsLoading(true); const request = { @@ -46,7 +49,7 @@ export const EcsButton = ({ console.log('finished running ecs graph without errors, but no results'); } setIsLoading(false); - setIsDisabled(true); + setCurrentStep(1); } } catch (e) { setIsLoading(false); @@ -55,8 +58,9 @@ export const EcsButton = ({ } return ( Promise>; rawSamples: any[]; currentPipeline: any; + currentStep: number; + setCurrentStep: (step: number) => void; setCurrentPipeline: (pipeline: any) => void; setLastResponse: (response: any) => void; setResultDocs: (docs: any) => void; @@ -26,13 +28,14 @@ export const RelatedButton = ({ runRelatedGraph, rawSamples, currentPipeline, + currentStep, + setCurrentStep, setCurrentPipeline, setLastResponse, setResultDocs, isFetchError, }: RelatedButtonProps) => { const [isLoading, setIsLoading] = useState(false); - const [isDisabled, setIsDisabled] = useState(false); async function onRelatedButtonClick() { setIsLoading(true); const request = { @@ -53,7 +56,7 @@ export const RelatedButton = ({ console.log('finished related graph without errors, but no results'); } setIsLoading(false); - setIsDisabled(true); + setCurrentStep(3); } } catch (e) { setIsLoading(false); @@ -62,8 +65,9 @@ export const RelatedButton = ({ } return ( Date: Mon, 27 May 2024 12:00:05 +0200 Subject: [PATCH 13/62] adding some small fixes --- package.json | 2 +- .../server/graphs/ecs/graph.test.ts | 91 ++++++++++++++++--- .../server/graphs/ecs/validate.ts | 2 +- x-pack/yarn.lock | 73 +++++++++++++++ 4 files changed, 154 insertions(+), 14 deletions(-) create mode 100644 x-pack/yarn.lock diff --git a/package.json b/package.json index ebcc2d827c4dd..ca956bfd43dc1 100644 --- a/package.json +++ b/package.json @@ -1738,4 +1738,4 @@ "zod-to-json-schema": "^3.22.3" }, "packageManager": "yarn@1.22.21" -} +} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts index 9e56352a29f4e..0d846030fab0d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts @@ -5,22 +5,89 @@ * 2.0. */ -import { FakeListLLM } from '@langchain/llms/fake'; +import { FakeListLLM } from 'langchain/llms/fake'; import { getEcsGraph } from './graph'; import { getModel } from '../../providers/bedrock'; +import { ecsMappingGraphRequest } from '../../../__jest__/fixtures'; +import { handleEcsMapping } from './mapping'; +import { handleDuplicates } from './duplicates'; +import { handleMissingKeys } from './missing'; +import { handleInvalidEcs } from './invalid'; +import { handleValidateMappings } from './validate'; -jest.mock('../../providers/bedrock'); +const llm = new FakeListLLM({ + responses: ["I'll callback later.", "You 'console' them!"], +}); + +jest.mock('./mapping'); +jest.mock('./duplicates'); +jest.mock('./missing'); +jest.mock('./invalid'); +jest.mock('./validate'); +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn().mockReturnValue(llm), +})); describe('runEcsGraph', () => { - it('Should the whole graph successfully with mocked values', () => { - const llm1 = new FakeListLLM({ - responses: ['Response 1', 'Response 2'], - }); - const llm2 = new FakeListLLM({ - responses: ['Response 3', 'Response 4'], - }); - const llm3 = new FakeListLLM({ - responses: ['Response 5', 'Response 6'], - }); + beforeEach(() => { + // Mocked responses for each node that requires an LLM API call/response. + const mockInvokeMapping = jest.fn().mockResolvedValue('mocked mapping result'); + const mockInvokeDuplicates = jest.fn().mockResolvedValue('mocked duplicates result'); + const mockInvokeMissingKeys = jest.fn().mockResolvedValue('mocked missing keys result'); + const mockInvokeInvalidEcs = jest.fn().mockResolvedValue('mocked invalid ecs result'); + const mockInvokeValidateMappings = jest + .fn() + .mockResolvedValue('mocked validate mappings result'); + + // Mock the internal implementations of the functions + (handleEcsMapping as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeMapping(), + lastExecutedChain: 'ecsMapping', + })); + + (handleDuplicates as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeDuplicates(), + lastExecutedChain: 'duplicateFields', + })); + + (handleMissingKeys as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeMissingKeys(), + lastExecutedChain: 'missingKeys', + })); + + (handleInvalidEcs as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeInvalidEcs(), + lastExecutedChain: 'invalidEcs', + })); + + (handleValidateMappings as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeValidateMappings(), + lastExecutedChain: 'validateMappings', + })); + }); + it('Ensures that the graph compiles', async () => { + // When a defined langgraph runs graph.compile() it will error if the graph has any issues. + // Common issues for example detecting a node has no next step, or there is a infinite loop between them. + try { + await getEcsGraph(); + } catch (error) { + fail(`getEcsGraph threw an error: ${error}`); + } + }); + it('Runs the whole graph, with mocked outputs from the LLM.', async () => { + // The mocked outputs are specifically crafted to trigger ALL different conditions, allowing us to test the whole graph. + // This is why we have all the expects ensuring each function was called. + + const ecsGraph = await getEcsGraph(); + const results = await ecsGraph.invoke(ecsMappingGraphRequest); + expect(results).toBe('mocked graph'); + + // Check if the functions were called + expect(handleEcsMapping).toHaveBeenCalled(); + expect(handleDuplicates).toHaveBeenCalled(); + expect(handleMissingKeys).toHaveBeenCalled(); + expect(handleInvalidEcs).toHaveBeenCalled(); + expect(handleValidateMappings).toHaveBeenCalled(); + expects(getModel).toHaveBeenCalled(); }); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts index 078eae31d36f8..66f923e6c11b7 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts @@ -151,6 +151,6 @@ export function handleValidateMappings(state: EcsMappingState): AnyObject { missingKeys, duplicateFields, invalidEcsFields, - lastExecutedChain: 'validate_mappings', + lastExecutedChain: 'validateMappings', }; } diff --git a/x-pack/yarn.lock b/x-pack/yarn.lock new file mode 100644 index 0000000000000..81a53aa291fe3 --- /dev/null +++ b/x-pack/yarn.lock @@ -0,0 +1,73 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@smithy/is-array-buffer@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz#9a95c2d46b8768946a9eec7f935feaddcffa5e7a" + integrity sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ== + dependencies: + tslib "^2.6.2" + +"@smithy/signature-v4@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-3.0.0.tgz#f536d0abebfeeca8e9aab846a4042658ca07d3b7" + integrity sha512-kXFOkNX+BQHe2qnLxpMEaCRGap9J6tUGLzc3A9jdn+nD4JdMwCKTJ+zFwQ20GkY+mAXGatyTw3HcoUlR39HwmA== + dependencies: + "@smithy/is-array-buffer" "^3.0.0" + "@smithy/types" "^3.0.0" + "@smithy/util-hex-encoding" "^3.0.0" + "@smithy/util-middleware" "^3.0.0" + "@smithy/util-uri-escape" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/types@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-3.0.0.tgz#00231052945159c64ffd8b91e8909d8d3006cb7e" + integrity sha512-VvWuQk2RKFuOr98gFhjca7fkBS+xLLURT8bUjk5XQoV0ZLm7WPwWPPY3/AwzTLuUBDeoKDCthfe1AsTUWaSEhw== + dependencies: + tslib "^2.6.2" + +"@smithy/util-buffer-from@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz#559fc1c86138a89b2edaefc1e6677780c24594e3" + integrity sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA== + dependencies: + "@smithy/is-array-buffer" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-hex-encoding@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz#32938b33d5bf2a15796cd3f178a55b4155c535e6" + integrity sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ== + dependencies: + tslib "^2.6.2" + +"@smithy/util-middleware@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-3.0.0.tgz#64d775628b99a495ca83ce982f5c83aa45f1e894" + integrity sha512-q5ITdOnV2pXHSVDnKWrwgSNTDBAMHLptFE07ua/5Ty5WJ11bvr0vk2a7agu7qRhrCFRQlno5u3CneU5EELK+DQ== + dependencies: + "@smithy/types" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-uri-escape@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz#e43358a78bf45d50bb736770077f0f09195b6f54" + integrity sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg== + dependencies: + tslib "^2.6.2" + +"@smithy/util-utf8@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-3.0.0.tgz#1a6a823d47cbec1fd6933e5fc87df975286d9d6a" + integrity sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA== + dependencies: + "@smithy/util-buffer-from" "^3.0.0" + tslib "^2.6.2" + +tslib@^2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" + integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== From 49f9db3cac9393abef826e05a6616dd9ad56003e Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Mon, 27 May 2024 14:38:56 +0200 Subject: [PATCH 14/62] adding some initial jest tests --- .../__jest__/fake_llm.ts | 104 +++++ .../__jest__/fixtures.ts | 360 +++++++++++++++++- .../integration_assistant/jest.config.js | 3 + .../server/graphs/ecs/graph.test.ts | 53 +-- .../server/graphs/ecs/graph.ts | 1 + .../server/graphs/ecs/validate.ts | 1 - 6 files changed, 492 insertions(+), 30 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/__jest__/fake_llm.ts diff --git a/x-pack/plugins/integration_assistant/__jest__/fake_llm.ts b/x-pack/plugins/integration_assistant/__jest__/fake_llm.ts new file mode 100644 index 0000000000000..7ea027b57eb33 --- /dev/null +++ b/x-pack/plugins/integration_assistant/__jest__/fake_llm.ts @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { GenerationChunk } from '@langchain/core/outputs'; +import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; +import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'; + +/** + * Interface for the input parameters specific to the Fake List model. + */ +export interface FakeListInput extends BaseLLMParams { + /** Responses to return */ + responses: string[]; + + /** Time to sleep in milliseconds between responses */ + sleep?: number; +} + +/** + * A fake LLM that returns a predefined list of responses. It can be used for + * testing purposes. + */ +export class FakeListLLM extends LLM { + static lc_name() { + return 'FakeListLLM'; + } + + responses: string[]; + + i = 0; + + sleep?: number; + + constructor({ responses, sleep }: FakeListInput) { + super({}); + this.responses = responses; + this.sleep = sleep; + } + + _llmType() { + return 'fake-list'; + } + + async _call( + _prompt: string, + _options: this['ParsedCallOptions'], + _runManager?: CallbackManagerForLLMRun + ): Promise { + const response = this._currentResponse(); + this._incrementResponse(); + await this._sleepIfRequested(); + + return response; + } + + _currentResponse() { + return this.responses[this.i]; + } + + _incrementResponse() { + if (this.i < this.responses.length - 1) { + this.i += 1; + } else { + this.i = 0; + } + } + + async *_streamResponseChunks( + _input: string, + _options: this['ParsedCallOptions'], + _runManager?: CallbackManagerForLLMRun + ): AsyncGenerator { + const response = this._currentResponse(); + this._incrementResponse(); + + for await (const text of response) { + await this._sleepIfRequested(); + yield this._createResponseChunk(text); + } + } + + async _sleepIfRequested() { + if (this.sleep !== undefined) { + await this._sleep(); + } + } + + async _sleep() { + return new Promise((resolve) => { + setTimeout(() => resolve(), this.sleep); + }); + } + + _createResponseChunk(text: string): GenerationChunk { + return new GenerationChunk({ + text, + generationInfo: {}, + }); + } +} diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures.ts index f9df80119cfba..056cfd2ecef15 100644 --- a/x-pack/plugins/integration_assistant/__jest__/fixtures.ts +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures.ts @@ -4,11 +4,365 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -export const ecsMappingGraphRequest = { +export const ecsMappingMockedRequest = { rawSamples: [ - '{ "timestamp": "2020-10-19 19:31:31", "id": 0, "class": "general", "event": "status", "connection_id": 16, "account": { "user": "audit_test_user2", "host": "hades.home" }, "login": { "user": "audit_test_user2", "os": "", "ip": "192.0.2.5", "proxy": "" }, "general_data": { "command": "Query", "sql_command": "create_db", "query": "create database audit_test", "status": 0 } }', - '{ "timestamp": "2020-10-19 19:32:10", "id": 0, "class": "connection", "event": "disconnect", "connection_id": 16, "account": { "user": "audit_test_user2", "host": "hades.home" }, "login": { "user": "audit_test_user2", "os": "", "ip": "192.0.2.5", "proxy": "" }, "connection_data": { "connection_type": "ssl" } }', + '{ "timestamp": "2020-10-19 19:31:31", "id": 0, "class": "general", "event": "status"}', + '{ "timestamp": "2020-10-19 19:32:10", "id": 0, "class": "connection", "event": "disconnect", "connection_id": 16, "account": { "user": "audit_test_user2", "host": "hades.home" }}', ], packageName: 'mysql_enterprise', dataStreamName: 'audit', }; + +export const ecsMappingExpectedResults = { + mapping: { + mysql_enterprise: { + audit: { + timestamp: { + target: '@timestamp', + confidence: 0.99, + type: 'date', + date_formats: ['yyyy-MM-dd HH:mm:ss'], + }, + id: null, + class: null, + connection_id: null, + account: { + user: { + target: 'user.name', + type: 'string', + date_formats: [], + confidence: 1, + }, + host: { + target: 'source.domain', + type: 'string', + date_formats: [], + confidence: 1, + }, + }, + event: { + target: 'event.action', + confidence: 0.8, + type: 'string', + date_formats: [], + }, + }, + }, + }, + pipeline: { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + tag: 'set_ecs_version', + value: '8.11.0', + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + tag: 'rename_message', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'message', + ignore_missing: true, + tag: 'remove_message', + if: 'ctx.event?.original != null', + }, + }, + { + json: { + field: 'event.original', + tag: 'json_original', + target_field: 'mysql_enterprise.audit', + }, + }, + { + date: { + field: 'mysql_enterprise.audit.timestamp', + target_field: '@timestamp', + formats: ['yyyy-MM-dd HH:mm:ss'], + if: 'ctx.mysql_enterprise?.audit?.timestamp != null', + }, + }, + { + rename: { + field: 'mysql_enterprise.audit.account.user', + target_field: 'user.name', + ignore_missing: true, + }, + }, + { + rename: { + field: 'mysql_enterprise.audit.account.host', + target_field: 'source.domain', + ignore_missing: true, + }, + }, + { + rename: { + field: 'mysql_enterprise.audit.event', + target_field: 'event.action', + ignore_missing: true, + }, + }, + { + script: { + description: 'Drops null/empty values recursively.', + tag: 'script_drop_null_empty_values', + lang: 'painless', + source: + 'boolean dropEmptyFields(Object object) {\n if (object == null || object == "") {\n return true;\n } else if (object instanceof Map) {\n ((Map) object).values().removeIf(value -> dropEmptyFields(value));\n return (((Map) object).size() == 0);\n } else if (object instanceof List) {\n ((List) object).removeIf(value -> dropEmptyFields(value));\n return (((List) object).length == 0);\n }\n return false;\n}\ndropEmptyFields(ctx);\n', + }, + }, + { + geoip: { + field: 'source.ip', + tag: 'geoip_source_ip', + target_field: 'source.geo', + ignore_missing: true, + }, + }, + { + geoip: { + ignore_missing: true, + database_file: 'GeoLite2-ASN.mmdb', + field: 'source.ip', + tag: 'geoip_source_asn', + target_field: 'source.as', + properties: ['asn', 'organization_name'], + }, + }, + { + rename: { + field: 'source.as.asn', + tag: 'rename_source_as_asn', + target_field: 'source.as.number', + ignore_missing: true, + }, + }, + { + rename: { + field: 'source.as.organization_name', + tag: 'rename_source_as_organization_name', + target_field: 'source.as.organization.name', + ignore_missing: true, + }, + }, + { + geoip: { + field: 'destination.ip', + tag: 'geoip_destination_ip', + target_field: 'destination.geo', + ignore_missing: true, + }, + }, + { + geoip: { + database_file: 'GeoLite2-ASN.mmdb', + field: 'destination.ip', + tag: 'geoip_destination_asn', + target_field: 'destination.as', + properties: ['asn', 'organization_name'], + ignore_missing: true, + }, + }, + { + rename: { + field: 'destination.as.asn', + tag: 'rename_destination_as_asn', + target_field: 'destination.as.number', + ignore_missing: true, + }, + }, + { + rename: { + field: 'destination.as.organization_name', + tag: 'rename_destination_as_organization_name', + target_field: 'destination.as.organization.name', + ignore_missing: true, + }, + }, + { + remove: { + field: null, + ignore_missing: true, + tag: 'remove_fields', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], + on_failure: [ + { + append: { + field: 'error.message', + value: + 'Processor {{{_ingest.on_failure_processor_type}}} with tag {{{_ingest.on_failure_processor_tag}}} in pipeline {{{_ingest.on_failure_pipeline}}} failed with message: {{{_ingest.on_failure_message}}}', + }, + }, + { + set: { + field: 'event.kind', + value: 'pipeline_error', + }, + }, + ], + }, +}; + +export const ecsInitialMappingMockedResponse = { + mysql_enterprise: { + audit: { + timestamp: { + target: 'event.action', + confidence: 0.99, + type: 'string', + date_formats: ['yyyy-MM-dd HH:mm:ss'], + }, + id: null, + class: null, + connection_id: null, + account: { + user: { + target: 'user.name', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + host: { + target: 'source.domain', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + }, + event: { + target: 'event.action', + confidence: 0.8, + type: 'string', + date_formats: [], + }, + }, + }, +}; + +export const ecsDuplicateMockedResponse = { + mysql_enterprise: { + audit: { + timestamp: { + target: '@timestamp', + confidence: 0.99, + type: 'date', + date_formats: ['yyyy-MM-dd HH:mm:ss'], + }, + id: null, + connection_id: null, + account: { + user: { + target: 'user.name', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + host: { + target: 'source.domain', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + }, + event: { + target: 'event.action', + confidence: 0.8, + type: 'string', + date_formats: [], + }, + }, + }, +}; + +export const ecsMissingKeysMockedResponse = { + mysql_enterprise: { + audit: { + timestamp: { + target: '@timestamp', + confidence: 0.99, + type: 'date', + date_formats: ['yyyy-MM-dd HH:mm:ss'], + }, + id: null, + class: null, + connection_id: null, + account: { + user: { + target: 'user.name', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + host: { + target: 'source.domain', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + }, + event: { + target: 'invalid.ecs.field', + confidence: 0.8, + type: 'string', + date_formats: [], + }, + }, + }, +}; + +export const ecsInvalidMappingMockedResponse = { + mysql_enterprise: { + audit: { + timestamp: { + target: '@timestamp', + confidence: 0.99, + type: 'date', + date_formats: ['yyyy-MM-dd HH:mm:ss'], + }, + id: null, + class: null, + connection_id: null, + account: { + user: { + target: 'user.name', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + host: { + target: 'source.domain', + type: 'string', + date_formats: [], + confidence: 1.0, + }, + }, + event: { + target: 'event.action', + confidence: 0.8, + type: 'string', + date_formats: [], + }, + }, + }, +}; diff --git a/x-pack/plugins/integration_assistant/jest.config.js b/x-pack/plugins/integration_assistant/jest.config.js index 3c0a3a0899c95..0125bbac9bac4 100644 --- a/x-pack/plugins/integration_assistant/jest.config.js +++ b/x-pack/plugins/integration_assistant/jest.config.js @@ -13,6 +13,9 @@ module.exports = { coverageReporters: ['text', 'html'], collectCoverageFrom: [ '/x-pack/plugins/integration_assistant/{common,public,server}/**/*.{ts,tsx}', + '!/x-pack/plugins/integration_assistant/{__jest__/**/*', + '!/x-pack/plugins/integration_assistant/*.test.{ts,tsx}', + '!/x-pack/plugins/integration_assistant/*.config.ts', ], setupFiles: ['jest-canvas-mock'], }; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts index 0d846030fab0d..5f23e7168f12b 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts @@ -5,17 +5,24 @@ * 2.0. */ -import { FakeListLLM } from 'langchain/llms/fake'; +import { FakeListLLM } from '../../../__jest__/fake_llm'; import { getEcsGraph } from './graph'; import { getModel } from '../../providers/bedrock'; -import { ecsMappingGraphRequest } from '../../../__jest__/fixtures'; +import { + ecsMappingMockedRequest, + ecsInitialMappingMockedResponse, + ecsDuplicateMockedResponse, + ecsInvalidMappingMockedResponse, + ecsMissingKeysMockedResponse, + ecsMappingExpectedResults, +} from '../../../__jest__/fixtures'; import { handleEcsMapping } from './mapping'; import { handleDuplicates } from './duplicates'; import { handleMissingKeys } from './missing'; import { handleInvalidEcs } from './invalid'; import { handleValidateMappings } from './validate'; -const llm = new FakeListLLM({ +const mockLlm = new FakeListLLM({ responses: ["I'll callback later.", "You 'console' them!"], }); @@ -23,50 +30,45 @@ jest.mock('./mapping'); jest.mock('./duplicates'); jest.mock('./missing'); jest.mock('./invalid'); -jest.mock('./validate'); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn().mockReturnValue(llm), -})); +jest.mock('../../providers/bedrock'); describe('runEcsGraph', () => { beforeEach(() => { // Mocked responses for each node that requires an LLM API call/response. - const mockInvokeMapping = jest.fn().mockResolvedValue('mocked mapping result'); - const mockInvokeDuplicates = jest.fn().mockResolvedValue('mocked duplicates result'); - const mockInvokeMissingKeys = jest.fn().mockResolvedValue('mocked missing keys result'); - const mockInvokeInvalidEcs = jest.fn().mockResolvedValue('mocked invalid ecs result'); - const mockInvokeValidateMappings = jest - .fn() - .mockResolvedValue('mocked validate mappings result'); + const mockGetModel = jest.fn().mockReturnValue(mockLlm); + const mockInvokeMapping = jest.fn().mockResolvedValue(ecsInitialMappingMockedResponse); + const mockInvokeDuplicates = jest.fn().mockResolvedValue(ecsDuplicateMockedResponse); + const mockInvokeMissingKeys = jest.fn().mockResolvedValue(ecsMissingKeysMockedResponse); + const mockInvokeInvalidEcs = jest.fn().mockResolvedValue(ecsInvalidMappingMockedResponse); - // Mock the internal implementations of the functions + // Return a fake LLM to prevent API calls from being made, or require API credentials + (getModel as jest.Mock).mockImplementation(mockGetModel()); + + // Returns the initial response, with one duplicate field, to trigger the next step. (handleEcsMapping as jest.Mock).mockImplementation(async () => ({ currentMapping: await mockInvokeMapping(), lastExecutedChain: 'ecsMapping', })); - + // Returns the response with the duplicate field removed, but missing one to trigger the next step. (handleDuplicates as jest.Mock).mockImplementation(async () => ({ currentMapping: await mockInvokeDuplicates(), lastExecutedChain: 'duplicateFields', })); + // Returns the response with the missing field added, but invalid ECS field to trigger the next step. (handleMissingKeys as jest.Mock).mockImplementation(async () => ({ currentMapping: await mockInvokeMissingKeys(), lastExecutedChain: 'missingKeys', })); + // Returns the response with the invalid ECS field fixed, which finishes the chain. (handleInvalidEcs as jest.Mock).mockImplementation(async () => ({ currentMapping: await mockInvokeInvalidEcs(), lastExecutedChain: 'invalidEcs', })); - - (handleValidateMappings as jest.Mock).mockImplementation(async () => ({ - currentMapping: await mockInvokeValidateMappings(), - lastExecutedChain: 'validateMappings', - })); }); it('Ensures that the graph compiles', async () => { - // When a defined langgraph runs graph.compile() it will error if the graph has any issues. + // When getEcsGraph runs, langgraph compiles the graph it will error if the graph has any issues. // Common issues for example detecting a node has no next step, or there is a infinite loop between them. try { await getEcsGraph(); @@ -79,15 +81,14 @@ describe('runEcsGraph', () => { // This is why we have all the expects ensuring each function was called. const ecsGraph = await getEcsGraph(); - const results = await ecsGraph.invoke(ecsMappingGraphRequest); - expect(results).toBe('mocked graph'); + const response = await ecsGraph.invoke(ecsMappingMockedRequest); + console.log(JSON.stringify(response.results, null, 2)); + expect(response.results).toStrictEqual(ecsMappingExpectedResults); // Check if the functions were called expect(handleEcsMapping).toHaveBeenCalled(); expect(handleDuplicates).toHaveBeenCalled(); expect(handleMissingKeys).toHaveBeenCalled(); expect(handleInvalidEcs).toHaveBeenCalled(); - expect(handleValidateMappings).toHaveBeenCalled(); - expects(getModel).toHaveBeenCalled(); }); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 70ede615e93d5..dd7763fbc2da2 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -164,5 +164,6 @@ export function getEcsGraph() { }); const compiledEcsGraph = workflow.compile(); + return compiledEcsGraph; } diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts index 66f923e6c11b7..a40daf6d9c090 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts @@ -146,7 +146,6 @@ export function handleValidateMappings(state: EcsMappingState): AnyObject { const missingKeys = findMissingFields(state?.formattedSamples, state?.currentMapping); const duplicateFields = findDuplicateFields(state?.samples, state?.currentMapping); const invalidEcsFields = findInvalidEcsFields(state?.currentMapping); - return { missingKeys, duplicateFields, From 6659c9f85d264d186ee2ad98206f66c7013744d5 Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Mon, 27 May 2024 15:03:19 +0200 Subject: [PATCH 15/62] typo --- x-pack/plugins/integration_assistant/jest.config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugins/integration_assistant/jest.config.js b/x-pack/plugins/integration_assistant/jest.config.js index 0125bbac9bac4..444d4dd315ed3 100644 --- a/x-pack/plugins/integration_assistant/jest.config.js +++ b/x-pack/plugins/integration_assistant/jest.config.js @@ -13,7 +13,7 @@ module.exports = { coverageReporters: ['text', 'html'], collectCoverageFrom: [ '/x-pack/plugins/integration_assistant/{common,public,server}/**/*.{ts,tsx}', - '!/x-pack/plugins/integration_assistant/{__jest__/**/*', + '!/x-pack/plugins/integration_assistant/{__jest__}/**/*', '!/x-pack/plugins/integration_assistant/*.test.{ts,tsx}', '!/x-pack/plugins/integration_assistant/*.config.ts', ], From 83c6ee2df7d8b3aff4ce51a1beef31aca0286c1a Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Mon, 27 May 2024 16:22:28 +0200 Subject: [PATCH 16/62] adding tests for ecs graph --- .../__jest__/fixtures.ts | 125 ++++++++++++++---- .../server/graphs/ecs/graph.test.ts | 2 - 2 files changed, 97 insertions(+), 30 deletions(-) diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures.ts index 056cfd2ecef15..8e2c1ab3f0327 100644 --- a/x-pack/plugins/integration_assistant/__jest__/fixtures.ts +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures.ts @@ -6,8 +6,8 @@ */ export const ecsMappingMockedRequest = { rawSamples: [ - '{ "timestamp": "2020-10-19 19:31:31", "id": 0, "class": "general", "event": "status"}', - '{ "timestamp": "2020-10-19 19:32:10", "id": 0, "class": "connection", "event": "disconnect", "connection_id": 16, "account": { "user": "audit_test_user2", "host": "hades.home" }}', + '{ "timestamp": "2020-10-19 19:31:31", "cpu_usage": 0.1, "class": "general", "event": "status", "test_array": ["test1", "test2"]}', + '{ "timestamp": "2020-10-19 19:32:10", "cpu_usage": 0.2, "class": "connection", "event": "disconnect", "bytes": 16, "account": { "user": "audit_test_user2", "ip": "10.10.10.10" }}', ], packageName: 'mysql_enterprise', dataStreamName: 'audit', @@ -17,6 +17,7 @@ export const ecsMappingExpectedResults = { mapping: { mysql_enterprise: { audit: { + test_array: null, timestamp: { target: '@timestamp', confidence: 0.99, @@ -25,7 +26,18 @@ export const ecsMappingExpectedResults = { }, id: null, class: null, - connection_id: null, + cpu_usage: { + target: 'host.cpu.usage', + confidence: 0.99, + type: 'number', + date_formats: [], + }, + bytes: { + target: 'network.bytes', + confidence: 0.99, + type: 'number', + date_formats: [], + }, account: { user: { target: 'user.name', @@ -33,8 +45,8 @@ export const ecsMappingExpectedResults = { date_formats: [], confidence: 1, }, - host: { - target: 'source.domain', + ip: { + target: 'source.ip', type: 'string', date_formats: [], confidence: 1, @@ -91,6 +103,20 @@ export const ecsMappingExpectedResults = { if: 'ctx.mysql_enterprise?.audit?.timestamp != null', }, }, + { + rename: { + field: 'mysql_enterprise.audit.cpu_usage', + target_field: 'host.cpu.usage', + ignore_missing: true, + }, + }, + { + rename: { + field: 'mysql_enterprise.audit.bytes', + target_field: 'network.bytes', + ignore_missing: true, + }, + }, { rename: { field: 'mysql_enterprise.audit.account.user', @@ -99,10 +125,12 @@ export const ecsMappingExpectedResults = { }, }, { - rename: { - field: 'mysql_enterprise.audit.account.host', - target_field: 'source.domain', + convert: { + field: 'mysql_enterprise.audit.account.ip', + target_field: 'source.ip', ignore_missing: true, + ignore_failure: true, + type: 'ip', }, }, { @@ -191,7 +219,7 @@ export const ecsMappingExpectedResults = { }, { remove: { - field: null, + field: ['mysql_enterprise.audit.account.ip'], ignore_missing: true, tag: 'remove_fields', }, @@ -227,15 +255,32 @@ export const ecsMappingExpectedResults = { export const ecsInitialMappingMockedResponse = { mysql_enterprise: { audit: { + test_array: null, timestamp: { target: 'event.action', confidence: 0.99, type: 'string', date_formats: ['yyyy-MM-dd HH:mm:ss'], }, - id: null, class: null, - connection_id: null, + id: { + target: 'file.code_signature.trusted', + confidence: 0.99, + type: 'boolean', + date_formats: [], + }, + cpu_usage: { + target: 'host.cpu.usage', + confidence: 0.99, + type: 'number', + date_formats: [], + }, + bytes: { + target: 'network.bytes', + confidence: 0.99, + type: 'number', + date_formats: [], + }, account: { user: { target: 'user.name', @@ -243,8 +288,8 @@ export const ecsInitialMappingMockedResponse = { date_formats: [], confidence: 1.0, }, - host: { - target: 'source.domain', + ip: { + target: 'source.ip', type: 'string', date_formats: [], confidence: 1.0, @@ -263,6 +308,7 @@ export const ecsInitialMappingMockedResponse = { export const ecsDuplicateMockedResponse = { mysql_enterprise: { audit: { + test_array: null, timestamp: { target: '@timestamp', confidence: 0.99, @@ -270,7 +316,12 @@ export const ecsDuplicateMockedResponse = { date_formats: ['yyyy-MM-dd HH:mm:ss'], }, id: null, - connection_id: null, + bytes: { + target: 'network.bytes', + confidence: 0.99, + type: 'number', + date_formats: [], + }, account: { user: { target: 'user.name', @@ -278,19 +329,13 @@ export const ecsDuplicateMockedResponse = { date_formats: [], confidence: 1.0, }, - host: { - target: 'source.domain', + ip: { + target: 'source.ip', type: 'string', date_formats: [], confidence: 1.0, }, }, - event: { - target: 'event.action', - confidence: 0.8, - type: 'string', - date_formats: [], - }, }, }, }; @@ -298,6 +343,7 @@ export const ecsDuplicateMockedResponse = { export const ecsMissingKeysMockedResponse = { mysql_enterprise: { audit: { + test_array: null, timestamp: { target: '@timestamp', confidence: 0.99, @@ -306,7 +352,18 @@ export const ecsMissingKeysMockedResponse = { }, id: null, class: null, - connection_id: null, + cpu_usage: { + target: 'host.cpu.usage', + confidence: 0.99, + type: 'number', + date_formats: [], + }, + bytes: { + target: 'network.bytes', + confidence: 0.99, + type: 'number', + date_formats: [], + }, account: { user: { target: 'user.name', @@ -314,8 +371,8 @@ export const ecsMissingKeysMockedResponse = { date_formats: [], confidence: 1.0, }, - host: { - target: 'source.domain', + ip: { + target: 'source.ip', type: 'string', date_formats: [], confidence: 1.0, @@ -334,6 +391,7 @@ export const ecsMissingKeysMockedResponse = { export const ecsInvalidMappingMockedResponse = { mysql_enterprise: { audit: { + test_array: null, timestamp: { target: '@timestamp', confidence: 0.99, @@ -342,7 +400,18 @@ export const ecsInvalidMappingMockedResponse = { }, id: null, class: null, - connection_id: null, + cpu_usage: { + target: 'host.cpu.usage', + confidence: 0.99, + type: 'number', + date_formats: [], + }, + bytes: { + target: 'network.bytes', + confidence: 0.99, + type: 'number', + date_formats: [], + }, account: { user: { target: 'user.name', @@ -350,8 +419,8 @@ export const ecsInvalidMappingMockedResponse = { date_formats: [], confidence: 1.0, }, - host: { - target: 'source.domain', + ip: { + target: 'source.ip', type: 'string', date_formats: [], confidence: 1.0, diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts index 5f23e7168f12b..4711b46c9a188 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts @@ -20,7 +20,6 @@ import { handleEcsMapping } from './mapping'; import { handleDuplicates } from './duplicates'; import { handleMissingKeys } from './missing'; import { handleInvalidEcs } from './invalid'; -import { handleValidateMappings } from './validate'; const mockLlm = new FakeListLLM({ responses: ["I'll callback later.", "You 'console' them!"], @@ -82,7 +81,6 @@ describe('runEcsGraph', () => { const ecsGraph = await getEcsGraph(); const response = await ecsGraph.invoke(ecsMappingMockedRequest); - console.log(JSON.stringify(response.results, null, 2)); expect(response.results).toStrictEqual(ecsMappingExpectedResults); // Check if the functions were called From 0dd08514a62423a44da159b9995fc5d0e9d53065 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Wed, 29 May 2024 13:07:10 +0200 Subject: [PATCH 17/62] split handleValidatePipeline to new file for mocks, added initial tests for categorization, moved graph to async --- .../__jest__/fake_llm.ts | 104 ----------- .../__jest__/fixtures/categorization.ts | 171 ++++++++++++++++++ .../{fixtures.ts => fixtures/ecs_mapping.ts} | 29 ++- .../__jest__/fixtures/index.ts | 14 ++ .../integration_assistant/common/types.ts | 1 + .../graphs/categorization/graph.test.ts | 126 +++++++++++++ .../server/graphs/categorization/graph.ts | 8 +- .../server/graphs/categorization/validate.ts | 15 +- .../server/graphs/ecs/duplicates.test.ts | 32 ++++ .../server/graphs/ecs/graph.test.ts | 117 ++++++------ .../server/graphs/ecs/graph.ts | 2 +- .../server/graphs/ecs/invalid.test.ts | 32 ++++ .../server/graphs/ecs/mapping.test.ts | 32 ++++ .../server/graphs/ecs/missing.test.ts | 32 ++++ .../server/graphs/related/graph.ts | 4 +- .../integration_assistant/server/types.ts | 1 - .../integration_assistant/server/util/es.ts | 17 +- .../server/util/graph.ts | 19 ++ 18 files changed, 557 insertions(+), 199 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/__jest__/fake_llm.ts create mode 100644 x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts rename x-pack/plugins/integration_assistant/__jest__/{fixtures.ts => fixtures/ecs_mapping.ts} (95%) create mode 100644 x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/graph.ts diff --git a/x-pack/plugins/integration_assistant/__jest__/fake_llm.ts b/x-pack/plugins/integration_assistant/__jest__/fake_llm.ts deleted file mode 100644 index 7ea027b57eb33..0000000000000 --- a/x-pack/plugins/integration_assistant/__jest__/fake_llm.ts +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { GenerationChunk } from '@langchain/core/outputs'; -import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; -import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'; - -/** - * Interface for the input parameters specific to the Fake List model. - */ -export interface FakeListInput extends BaseLLMParams { - /** Responses to return */ - responses: string[]; - - /** Time to sleep in milliseconds between responses */ - sleep?: number; -} - -/** - * A fake LLM that returns a predefined list of responses. It can be used for - * testing purposes. - */ -export class FakeListLLM extends LLM { - static lc_name() { - return 'FakeListLLM'; - } - - responses: string[]; - - i = 0; - - sleep?: number; - - constructor({ responses, sleep }: FakeListInput) { - super({}); - this.responses = responses; - this.sleep = sleep; - } - - _llmType() { - return 'fake-list'; - } - - async _call( - _prompt: string, - _options: this['ParsedCallOptions'], - _runManager?: CallbackManagerForLLMRun - ): Promise { - const response = this._currentResponse(); - this._incrementResponse(); - await this._sleepIfRequested(); - - return response; - } - - _currentResponse() { - return this.responses[this.i]; - } - - _incrementResponse() { - if (this.i < this.responses.length - 1) { - this.i += 1; - } else { - this.i = 0; - } - } - - async *_streamResponseChunks( - _input: string, - _options: this['ParsedCallOptions'], - _runManager?: CallbackManagerForLLMRun - ): AsyncGenerator { - const response = this._currentResponse(); - this._incrementResponse(); - - for await (const text of response) { - await this._sleepIfRequested(); - yield this._createResponseChunk(text); - } - } - - async _sleepIfRequested() { - if (this.sleep !== undefined) { - await this._sleep(); - } - } - - async _sleep() { - return new Promise((resolve) => { - setTimeout(() => resolve(), this.sleep); - }); - } - - _createResponseChunk(text: string): GenerationChunk { - return new GenerationChunk({ - text, - generationInfo: {}, - }); - } -} diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts new file mode 100644 index 0000000000000..833f05dac21d4 --- /dev/null +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts @@ -0,0 +1,171 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { Pipeline } from '../../common'; + +export const categorizationInitialPipeline: Pipeline = { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + value: '8.11.0', + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], +}; + +export const categorizationExpectedResults = { + docs: [ + { + key: 'value', + anotherKey: 'anotherValue', + }, + ], + pipeline: { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + value: '8.11.0', + }, + }, + { + append: { + field: 'event.type', + value: ['change'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], + }, +}; + +export const categorizationInitialMockedResponse = [ + { + append: { + field: 'event.type', + value: ['creation'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise.audit.general_data.sql_command == 'create_db'", + }, + }, +]; + +export const categorizationErrorMockedResponse = [ + { + append: { + field: 'event.type', + value: ['creation'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, +]; + +export const categorizationInvalidMockedResponse = [ + { + append: { + field: 'event.type', + value: ['change'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, +]; + +export const categorizationReviewMockedResponse = [ + { + append: { + field: 'event.type', + value: ['change'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, +]; + +export const testPipelineError: [object[], object[]] = [ + [{ error: 'Sample error message 1' }, { error: 'Sample error message 2' }], + [], +]; + +export const testPipelineValidResult: [object[], object[]] = [ + [], + [{ key: 'value', anotherKey: 'anotherValue' }], +]; + +export const testPipelineInvalidEcs: [object[], object[]] = [ + [], + [{ event: { type: ['database'], category: ['creation'] }, anotherKey: 'anotherValue' }], +]; diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/ecs_mapping.ts similarity index 95% rename from x-pack/plugins/integration_assistant/__jest__/fixtures.ts rename to x-pack/plugins/integration_assistant/__jest__/fixtures/ecs_mapping.ts index 8e2c1ab3f0327..f0ae923def0ef 100644 --- a/x-pack/plugins/integration_assistant/__jest__/fixtures.ts +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/ecs_mapping.ts @@ -4,15 +4,6 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -export const ecsMappingMockedRequest = { - rawSamples: [ - '{ "timestamp": "2020-10-19 19:31:31", "cpu_usage": 0.1, "class": "general", "event": "status", "test_array": ["test1", "test2"]}', - '{ "timestamp": "2020-10-19 19:32:10", "cpu_usage": 0.2, "class": "connection", "event": "disconnect", "bytes": 16, "account": { "user": "audit_test_user2", "ip": "10.10.10.10" }}', - ], - packageName: 'mysql_enterprise', - dataStreamName: 'audit', -}; - export const ecsMappingExpectedResults = { mapping: { mysql_enterprise: { @@ -435,3 +426,23 @@ export const ecsInvalidMappingMockedResponse = { }, }, }; + +export const ecsTestState = { + ecs: 'teststring', + exAnswer: 'testanswer', + finalized: false, + currentPipeline: { test: 'testpipeline' }, + duplicateFields: [], + missingKeys: [], + invalidEcsFields: [], + results: { test: 'testresults' }, + logFormat: 'testlogformat', + ecsVersion: 'testversion', + currentMapping: { test1: 'test1' }, + lastExecutedChain: 'testchain', + rawSamples: ['{"test1": "test1"}'], + samples: ['{ "test1": "test1" }'], + packageName: 'testpackage', + dataStreamName: 'testdatastream', + formattedSamples: '{"test1": "test1"}', +}; diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts new file mode 100644 index 0000000000000..76271e0f955a4 --- /dev/null +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts @@ -0,0 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +export const mockedRequest = { + rawSamples: [ + '{ "timestamp": "2020-10-19 19:31:31", "cpu_usage": 0.1, "class": "general", "event": "status", "test_array": ["test1", "test2"]}', + '{ "timestamp": "2020-10-19 19:32:10", "cpu_usage": 0.2, "class": "connection", "event": "disconnect", "bytes": 16, "account": { "user": "audit_test_user2", "ip": "10.10.10.10" }}', + ], + packageName: 'mysql_enterprise', + dataStreamName: 'audit', +}; diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index 86dd55cab9c2e..d4b685a14d249 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -8,6 +8,7 @@ export interface ESProcessorOptions { on_failure?: ESProcessorItem[]; ignore_failure?: boolean; + ignore_missing?: boolean; if?: string; tag?: string; [key: string]: any; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts new file mode 100644 index 0000000000000..bb0f4feba5f06 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getCategorizationGraph } from './graph'; +import { getModel } from '../../providers/bedrock'; +import { + categorizationExpectedResults, + categorizationErrorMockedResponse, + categorizationInitialMockedResponse, + categorizationInvalidMockedResponse, + categorizationReviewMockedResponse, + categorizationInitialPipeline, + testPipelineError, + testPipelineValidResult, + testPipelineInvalidEcs, +} from '../../../__jest__/fixtures/categorization'; +import { mockedRequest } from '../../../__jest__/fixtures'; +import { handleReview } from './review'; +import { handleCategorization } from './categorization'; +import { handleErrors } from './errors'; +import { handleInvalidCategorization } from './invalid'; +import { testPipeline } from '../../util/es'; +import { combineProcessors } from '../../util/pipeline'; + +const mockLlm = new FakeLLM({ + response: "I'll callback later.", +}); + +jest.mock('./errors'); +jest.mock('./review'); +jest.mock('./categorization'); +jest.mock('./invalid'); +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); + +jest.mock('../../util/es', () => ({ + testPipeline: jest.fn(), +})); + +describe('runCategorizationGraph', () => { + beforeEach(() => { + // Mocked responses for each node that requires an LLM API call/response. + const mockInvokeCategorization = jest + .fn() + .mockResolvedValue(categorizationInitialMockedResponse); + const mockInvokeError = jest.fn().mockResolvedValue(categorizationErrorMockedResponse); + const mockInvokeInvalid = jest.fn().mockResolvedValue(categorizationInvalidMockedResponse); + const mockInvokeReview = jest.fn().mockResolvedValue(categorizationReviewMockedResponse); + + // Return a fake LLM to prevent API calls from being made, or require API credentials + (getModel as jest.Mock).mockReturnValue(mockLlm); + + // After this is triggered, the mock of TestPipeline will trigger the expected error, to route to error handler + (handleCategorization as jest.Mock).mockImplementation(async () => ({ + currentPipeline: categorizationInitialPipeline, + currentProcessors: await mockInvokeCategorization(), + reviewed: false, + lastExecutedChain: 'categorization', + })); + // Error pipeline resolves it, though the responce includes an invalid categorization + (handleErrors as jest.Mock).mockImplementation(async () => ({ + currentPipeline: categorizationInitialPipeline, + currentProcessors: await mockInvokeError(), + reviewed: false, + lastExecutedChain: 'error', + })); + // Invalid categorization is resolved and returned correctly, which routes it to a review + (handleInvalidCategorization as jest.Mock).mockImplementation(async () => ({ + currentPipeline: categorizationInitialPipeline, + currentProcessors: await mockInvokeInvalid(), + reviewed: false, + lastExecutedChain: 'invalidCategorization', + })); + // After the review it should route to modelOutput and finish. + (handleReview as jest.Mock).mockImplementation(async () => { + const currentProcessors = await mockInvokeReview(); + const currentPipeline = combineProcessors(categorizationInitialPipeline, currentProcessors); + return { + currentProcessors, + currentPipeline, + reviewed: true, + lastExecutedChain: 'review', + }; + }); + }); + + it('Ensures that the graph compiles', async () => { + try { + await getCategorizationGraph(); + } catch (error) { + // noop + } + }); + + it('Runs the whole graph, with mocked outputs from the LLM.', async () => { + const categorizationGraph = await getCategorizationGraph(); + + (testPipeline as jest.Mock) + .mockResolvedValueOnce(testPipelineValidResult) + .mockResolvedValueOnce(testPipelineError) + .mockResolvedValueOnce(testPipelineInvalidEcs) + .mockResolvedValueOnce(testPipelineValidResult) + .mockResolvedValueOnce(testPipelineValidResult) + .mockResolvedValueOnce(testPipelineValidResult); + + let response; + try { + response = await categorizationGraph.invoke(mockedRequest); + } catch (e) { + // noop + } + expect(response.results).toStrictEqual(categorizationExpectedResults); + + // Check if the functions were called + expect(handleCategorization).toHaveBeenCalled(); + expect(handleErrors).toHaveBeenCalled(); + expect(handleInvalidCategorization).toHaveBeenCalled(); + expect(handleReview).toHaveBeenCalled(); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index 056dc48cf7c62..e83162b37abac 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -8,7 +8,7 @@ import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { CategorizationState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; import { handleCategorization } from './categorization'; -import { handleValidatePipeline } from '../../util/es'; +import { handleValidatePipeline } from '../../util/graph'; import { handleCategorizationValidation } from './validate'; import { handleInvalidCategorization } from './invalid'; import { handleErrors } from './errors'; @@ -68,10 +68,6 @@ const graphState: StateGraphArgs['channels'] = { value: (x: object[], y?: object[]) => y ?? x, default: () => [{}], }, - currentMapping: { - value: (x: object, y?: object) => y ?? x, - default: () => ({}), - }, currentPipeline: { value: (x: object, y?: object) => y ?? x, default: () => ({}), @@ -145,7 +141,7 @@ function chainRouter(state: CategorizationState): string { return END; } -export function getCategorizationGraph() { +export async function getCategorizationGraph() { const workflow = new StateGraph({ channels: graphState, }) diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts index 5bd8eb1012dba..916b2fea25e96 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts @@ -16,15 +16,15 @@ interface PipelineResult { event?: Event; } -interface ErrorMessage { +interface CategorizationError { error: string; } export function handleCategorizationValidation(state: CategorizationState): { - invalidCategorization: ErrorMessage[]; + invalidCategorization: CategorizationError[]; lastExecutedChain: string; } { - const errors: ErrorMessage[] = []; + const errors: CategorizationError[] = []; const pipelineResults = state.pipelineResults as PipelineResult[]; // Loops through the pipeline results to find invalid categories and types @@ -69,7 +69,7 @@ function createErrorMessage( field: string, errorList: string[], allowedValues: string[] -): ErrorMessage { +): CategorizationError { return { error: `field ${field}'s values (${errorList.join( ', ' @@ -119,8 +119,11 @@ type EventCategories = | 'vulnerability' | 'web'; -function getTypeCategoryIncompatibleError(categories: string[], types: string[]): ErrorMessage[] { - const errors: ErrorMessage[] = []; +function getTypeCategoryIncompatibleError( + categories: string[], + types: string[] +): CategorizationError[] { + const errors: CategorizationError[] = []; let unmatchedTypes = new Set(types); const matchCategories = new Set(categories); let categoryExists = false; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts new file mode 100644 index 0000000000000..a89a68581a611 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleDuplicates } from './duplicates'; +import { EcsMappingState } from '../../types'; +import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; + +const mockLlm = new FakeLLM({ + response: '{ "message": "ll callback later."}', +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: EcsMappingState = ecsTestState; + +describe('Testing duplicates handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleDuplicates()', async () => { + const response = await handleDuplicates(testState); + expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); + expect(response.lastExecutedChain).toBe('duplicateFields'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts index 4711b46c9a188..ddeb78d54c3f7 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts @@ -5,88 +5,91 @@ * 2.0. */ -import { FakeListLLM } from '../../../__jest__/fake_llm'; +import { FakeLLM } from '@langchain/core/utils/testing'; import { getEcsGraph } from './graph'; import { getModel } from '../../providers/bedrock'; import { - ecsMappingMockedRequest, ecsInitialMappingMockedResponse, ecsDuplicateMockedResponse, ecsInvalidMappingMockedResponse, ecsMissingKeysMockedResponse, ecsMappingExpectedResults, -} from '../../../__jest__/fixtures'; +} from '../../../__jest__/fixtures/ecs_mapping'; +import { mockedRequest } from '../../../__jest__/fixtures'; import { handleEcsMapping } from './mapping'; import { handleDuplicates } from './duplicates'; import { handleMissingKeys } from './missing'; import { handleInvalidEcs } from './invalid'; -const mockLlm = new FakeListLLM({ - responses: ["I'll callback later.", "You 'console' them!"], +const mockLlm = new FakeLLM({ + response: "I'll callback later.", }); +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); jest.mock('./mapping'); jest.mock('./duplicates'); jest.mock('./missing'); jest.mock('./invalid'); -jest.mock('../../providers/bedrock'); -describe('runEcsGraph', () => { - beforeEach(() => { - // Mocked responses for each node that requires an LLM API call/response. - const mockGetModel = jest.fn().mockReturnValue(mockLlm); - const mockInvokeMapping = jest.fn().mockResolvedValue(ecsInitialMappingMockedResponse); - const mockInvokeDuplicates = jest.fn().mockResolvedValue(ecsDuplicateMockedResponse); - const mockInvokeMissingKeys = jest.fn().mockResolvedValue(ecsMissingKeysMockedResponse); - const mockInvokeInvalidEcs = jest.fn().mockResolvedValue(ecsInvalidMappingMockedResponse); +describe('EcsGraph', () => { + describe('Compiling and Running', () => { + beforeEach(() => { + // Mocked responses for each node that requires an LLM API call/response. + const mockInvokeMapping = jest.fn().mockResolvedValue(ecsInitialMappingMockedResponse); + const mockInvokeDuplicates = jest.fn().mockResolvedValue(ecsDuplicateMockedResponse); + const mockInvokeMissingKeys = jest.fn().mockResolvedValue(ecsMissingKeysMockedResponse); + const mockInvokeInvalidEcs = jest.fn().mockResolvedValue(ecsInvalidMappingMockedResponse); - // Return a fake LLM to prevent API calls from being made, or require API credentials - (getModel as jest.Mock).mockImplementation(mockGetModel()); + // Return a fake LLM to prevent API calls from being made, or require API credentials + (getModel as jest.Mock).mockReturnValue(mockLlm); - // Returns the initial response, with one duplicate field, to trigger the next step. - (handleEcsMapping as jest.Mock).mockImplementation(async () => ({ - currentMapping: await mockInvokeMapping(), - lastExecutedChain: 'ecsMapping', - })); - // Returns the response with the duplicate field removed, but missing one to trigger the next step. - (handleDuplicates as jest.Mock).mockImplementation(async () => ({ - currentMapping: await mockInvokeDuplicates(), - lastExecutedChain: 'duplicateFields', - })); + // Returns the initial response, with one duplicate field, to trigger the next step. + (handleEcsMapping as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeMapping(), + lastExecutedChain: 'ecsMapping', + })); + // Returns the response with the duplicate field removed, but missing one to trigger the next step. + (handleDuplicates as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeDuplicates(), + lastExecutedChain: 'duplicateFields', + })); - // Returns the response with the missing field added, but invalid ECS field to trigger the next step. - (handleMissingKeys as jest.Mock).mockImplementation(async () => ({ - currentMapping: await mockInvokeMissingKeys(), - lastExecutedChain: 'missingKeys', - })); + // Returns the response with the missing field added, but invalid ECS field to trigger the next step. + (handleMissingKeys as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeMissingKeys(), + lastExecutedChain: 'missingKeys', + })); - // Returns the response with the invalid ECS field fixed, which finishes the chain. - (handleInvalidEcs as jest.Mock).mockImplementation(async () => ({ - currentMapping: await mockInvokeInvalidEcs(), - lastExecutedChain: 'invalidEcs', - })); - }); - it('Ensures that the graph compiles', async () => { - // When getEcsGraph runs, langgraph compiles the graph it will error if the graph has any issues. - // Common issues for example detecting a node has no next step, or there is a infinite loop between them. - try { - await getEcsGraph(); - } catch (error) { - fail(`getEcsGraph threw an error: ${error}`); - } - }); - it('Runs the whole graph, with mocked outputs from the LLM.', async () => { - // The mocked outputs are specifically crafted to trigger ALL different conditions, allowing us to test the whole graph. - // This is why we have all the expects ensuring each function was called. + // Returns the response with the invalid ECS field fixed, which finishes the chain. + (handleInvalidEcs as jest.Mock).mockImplementation(async () => ({ + currentMapping: await mockInvokeInvalidEcs(), + lastExecutedChain: 'invalidEcs', + })); + }); + it('Ensures that the graph compiles', async () => { + // When getEcsGraph runs, langgraph compiles the graph it will error if the graph has any issues. + // Common issues for example detecting a node has no next step, or there is a infinite loop between them. + try { + await getEcsGraph(); + } catch (error) { + fail(`getEcsGraph threw an error: ${error}`); + } + }); + it('Runs the whole graph, with mocked outputs from the LLM.', async () => { + // The mocked outputs are specifically crafted to trigger ALL different conditions, allowing us to test the whole graph. + // This is why we have all the expects ensuring each function was called. - const ecsGraph = await getEcsGraph(); - const response = await ecsGraph.invoke(ecsMappingMockedRequest); - expect(response.results).toStrictEqual(ecsMappingExpectedResults); + const ecsGraph = await getEcsGraph(); + const response = await ecsGraph.invoke(mockedRequest); + expect(response.results).toStrictEqual(ecsMappingExpectedResults); - // Check if the functions were called - expect(handleEcsMapping).toHaveBeenCalled(); - expect(handleDuplicates).toHaveBeenCalled(); - expect(handleMissingKeys).toHaveBeenCalled(); - expect(handleInvalidEcs).toHaveBeenCalled(); + // Check if the functions were called + expect(handleEcsMapping).toHaveBeenCalled(); + expect(handleDuplicates).toHaveBeenCalled(); + expect(handleMissingKeys).toHaveBeenCalled(); + expect(handleInvalidEcs).toHaveBeenCalled(); + }); }); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index dd7763fbc2da2..11ebccec9f83f 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -135,7 +135,7 @@ function chainRouter(state: EcsMappingState): string { return END; } -export function getEcsGraph() { +export async function getEcsGraph() { const workflow = new StateGraph({ channels: graphState, }) diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts new file mode 100644 index 0000000000000..08a02772becbb --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleInvalidEcs } from './invalid'; +import { EcsMappingState } from '../../types'; +import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; + +const mockLlm = new FakeLLM({ + response: '{ "message": "ll callback later."}', +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: EcsMappingState = ecsTestState; + +describe('Testing invalid handlers', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleInvalidEcs()', async () => { + const response = await handleInvalidEcs(testState); + expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); + expect(response.lastExecutedChain).toBe('invalidEcs'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts new file mode 100644 index 0000000000000..57a690e2dec99 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleEcsMapping } from './mapping'; +import { EcsMappingState } from '../../types'; +import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; + +const mockLlm = new FakeLLM({ + response: '{ "message": "ll callback later."}', +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: EcsMappingState = ecsTestState; + +describe('Testing ECS mapping handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleEcsMapping()', async () => { + const response = await handleEcsMapping(testState); + expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); + expect(response.lastExecutedChain).toBe('ecsMapping'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts new file mode 100644 index 0000000000000..e85b459562bde --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleMissingKeys } from './missing'; +import { EcsMappingState } from '../../types'; +import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; + +const mockLlm = new FakeLLM({ + response: '{ "message": "ll callback later."}', +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: EcsMappingState = ecsTestState; + +describe('Testing missing keys handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleMissingKeys()', async () => { + const response = await handleMissingKeys(testState); + expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); + expect(response.lastExecutedChain).toBe('missingKeys'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 15d47106e7581..96d1a01e9fb1f 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -7,7 +7,7 @@ import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { RelatedState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; -import { handleValidatePipeline } from '../../util/es'; +import { handleValidatePipeline } from '../../util/graph'; import { handleRelated } from './related'; import { handleErrors } from './errors'; import { handleReview } from './review'; @@ -134,7 +134,7 @@ function chainRouter(state: RelatedState): string { return END; } -export function getRelatedGraph() { +export async function getRelatedGraph() { const workflow = new StateGraph({ channels: graphState }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) diff --git a/x-pack/plugins/integration_assistant/server/types.ts b/x-pack/plugins/integration_assistant/server/types.ts index 990b3c121bb7f..29f6134cd8c18 100644 --- a/x-pack/plugins/integration_assistant/server/types.ts +++ b/x-pack/plugins/integration_assistant/server/types.ts @@ -24,7 +24,6 @@ export interface CategorizationState { pipelineResults: object[]; finalized: boolean; reviewed: boolean; - currentMapping: object; currentPipeline: object; currentProcessors: object[]; invalidCategorization: object; diff --git a/x-pack/plugins/integration_assistant/server/util/es.ts b/x-pack/plugins/integration_assistant/server/util/es.ts index cf21e4ee7f9d2..c33edb585dfab 100644 --- a/x-pack/plugins/integration_assistant/server/util/es.ts +++ b/x-pack/plugins/integration_assistant/server/util/es.ts @@ -5,7 +5,6 @@ * 2.0. */ import { Client } from '@elastic/elasticsearch'; -import { EcsMappingState, CategorizationState, RelatedState } from '../types'; interface DocTemplate { _index: string; @@ -41,7 +40,10 @@ function newClient(): Client { return client; } -async function testPipeline(samples: string[], pipeline: object): Promise<[any[], any[]]> { +export async function testPipeline( + samples: string[], + pipeline: object +): Promise<[object[], object[]]> { const docs = samples.map((sample) => formatSample(sample)); const results: object[] = []; const errors: object[] = []; @@ -62,14 +64,3 @@ async function testPipeline(samples: string[], pipeline: object): Promise<[any[] return [errors, results]; } - -export async function handleValidatePipeline( - state: EcsMappingState | CategorizationState | RelatedState -): Promise | Partial | Partial> { - const [errors, results] = await testPipeline(state.rawSamples, state.currentPipeline); - return { - errors, - pipelineResults: results, - lastExecutedChain: 'validate_pipeline', - }; -} diff --git a/x-pack/plugins/integration_assistant/server/util/graph.ts b/x-pack/plugins/integration_assistant/server/util/graph.ts new file mode 100644 index 0000000000000..2955ca364d3ad --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/graph.ts @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +import { EcsMappingState, CategorizationState, RelatedState } from '../types'; +import { testPipeline } from './es'; + +export async function handleValidatePipeline( + state: EcsMappingState | CategorizationState | RelatedState +): Promise | Partial | Partial> { + const [errors, results] = await testPipeline(state.rawSamples, state.currentPipeline); + return { + errors, + pipelineResults: results, + lastExecutedChain: 'validate_pipeline', + }; +} From 9e5fa1d39a34a1ec1a4374ab2be16dca6ac6a821 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Wed, 29 May 2024 18:52:46 +0200 Subject: [PATCH 18/62] updating existing and adding new tests for backend, add esclient from context --- .../__jest__/fixtures/categorization.ts | 115 ++++++++ .../__jest__/fixtures/index.ts | 40 +++ .../__jest__/fixtures/related.ts | 277 ++++++++++++++++++ .../categorization/categorization.test.ts | 38 +++ .../graphs/categorization/categorization.ts | 2 +- .../server/graphs/categorization/constants.ts | 2 +- .../graphs/categorization/errors.test.ts | 38 +++ .../server/graphs/categorization/errors.ts | 2 +- .../graphs/categorization/graph.test.ts | 36 ++- .../server/graphs/categorization/graph.ts | 6 +- .../graphs/categorization/invalid.test.ts | 38 +++ .../server/graphs/categorization/invalid.ts | 2 +- .../graphs/categorization/review.test.ts | 39 +++ .../server/graphs/categorization/review.ts | 2 +- .../server/graphs/categorization/validate.ts | 24 +- .../server/graphs/ecs/duplicates.test.ts | 2 +- .../server/graphs/ecs/invalid.test.ts | 2 +- .../server/graphs/ecs/mapping.test.ts | 2 +- .../server/graphs/ecs/missing.test.ts | 2 +- .../server/graphs/related/errors.test.ts | 36 +++ .../server/graphs/related/errors.ts | 4 +- .../server/graphs/related/graph.test.ts | 130 ++++++++ .../server/graphs/related/graph.ts | 9 +- .../server/graphs/related/related.test.ts | 36 +++ .../server/graphs/related/related.ts | 4 +- .../server/graphs/related/review.test.ts | 36 +++ .../server/graphs/related/review.ts | 2 +- .../server/routes/categorization_routes.ts | 6 +- .../integration_assistant/server/types.ts | 1 - .../integration_assistant/server/util/es.ts | 68 +---- .../server/util/graph.ts | 2 +- .../server/util/index.ts | 3 + .../server/util/pipeline.ts | 56 +++- .../server/util/processors.ts | 25 ++ 34 files changed, 964 insertions(+), 123 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/processors.ts diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts index 833f05dac21d4..16b4d8f5196af 100644 --- a/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts @@ -169,3 +169,118 @@ export const testPipelineInvalidEcs: [object[], object[]] = [ [], [{ event: { type: ['database'], category: ['creation'] }, anotherKey: 'anotherValue' }], ]; +export const categorizationTestState = { + rawSamples: ['{"test1": "test1"}'], + samples: ['{ "test1": "test1" }'], + formattedSamples: '{"test1": "test1"}', + ecsTypes: 'testtypes', + ecsCategories: 'testcategories', + exAnswer: 'testanswer', + lastExecutedChain: 'testchain', + packageName: 'testpackage', + dataStreamName: 'testdatastream', + errors: { test: 'testerror' }, + pipelineResults: [{ test: 'testresult' }], + finalized: false, + reviewed: false, + currentPipeline: { test: 'testpipeline' }, + currentProcessors: [ + { + append: { + field: 'event.type', + value: ['creation'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise.audit.general_data.sql_command == 'create_db'", + }, + }, + ], + invalidCategorization: { test: 'testinvalid' }, + initialPipeline: categorizationInitialPipeline, + results: { test: 'testresults' }, +}; + +export const categorizationMockProcessors = [ + { + append: { + field: 'event.type', + value: ['creation'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise.audit.general_data.sql_command == 'create_db'", + }, + }, +]; + +export const categorizationExpectedHandlerResponse = { + currentPipeline: { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + value: '8.11.0', + }, + }, + { + append: { + field: 'event.type', + value: ['creation'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise.audit.general_data.sql_command == 'create_db'", + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], + }, + currentProcessors: [ + { + append: { + field: 'event.type', + value: ['creation'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise.audit.general_data.sql_command == 'create_db'", + }, + }, + ], + reviewed: false, + lastExecutedChain: 'error', +}; diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts index 76271e0f955a4..7e3e155e67b8a 100644 --- a/x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/index.ts @@ -4,6 +4,36 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { Pipeline } from '../../common'; +const currentPipelineMock: Pipeline = { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + value: '8.11.0', + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], +}; + export const mockedRequest = { rawSamples: [ '{ "timestamp": "2020-10-19 19:31:31", "cpu_usage": 0.1, "class": "general", "event": "status", "test_array": ["test1", "test2"]}', @@ -12,3 +42,13 @@ export const mockedRequest = { packageName: 'mysql_enterprise', dataStreamName: 'audit', }; + +export const mockedRequestWithPipeline = { + rawSamples: [ + '{ "timestamp": "2020-10-19 19:31:31", "cpu_usage": 0.1, "class": "general", "event": "status", "test_array": ["test1", "test2"]}', + '{ "timestamp": "2020-10-19 19:32:10", "cpu_usage": 0.2, "class": "connection", "event": "disconnect", "bytes": 16, "account": { "user": "audit_test_user2", "ip": "10.10.10.10" }}', + ], + packageName: 'mysql_enterprise', + dataStreamName: 'audit', + currentPipeline: currentPipelineMock, +}; diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts new file mode 100644 index 0000000000000..490d343092fbf --- /dev/null +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts @@ -0,0 +1,277 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { Pipeline } from '../../common'; + +export const relatedInitialPipeline: Pipeline = { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + value: '8.11.0', + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], +}; + +export const relatedExpectedResults = { + docs: [ + { + key: 'value', + anotherKey: 'anotherValue', + }, + ], + pipeline: { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + value: '8.11.0', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{source.ip}}}'], + allow_duplicates: false, + if: 'ctx.source?.ip != null', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{destination.ip}}}'], + allow_duplicates: false, + if: 'ctx.destination?.ip != null', + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], + }, +}; + +export const relatedInitialMockedResponse = [ + { + append: { + field: 'related.ip', + value: ['{{{source.ip}?.split(":")[0]}}'], + allow_duplicates: false, + if: 'ctx.source?.ip != null', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{destination.ip}}}'], + allow_duplicates: false, + if: 'ctx.destination?.ip != null', + }, + }, +]; + +export const relatedErrorMockedResponse = [ + { + append: { + field: 'related.ip', + value: ['{{{source.ip}}}'], + allow_duplicates: false, + if: 'ctx.source?.ip != null', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{destination.ip}}}'], + allow_duplicates: false, + if: 'ctx.destination?.ip != null', + }, + }, +]; + +export const relatedReviewMockedResponse = [ + { + append: { + field: 'related.ip', + value: ['{{{source.ip}}}'], + allow_duplicates: false, + if: 'ctx.source?.ip != null', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{destination.ip}}}'], + allow_duplicates: false, + if: 'ctx.destination?.ip != null', + }, + }, +]; + +export const testPipelineError: [object[], object[]] = [ + [{ error: 'Sample error message 1' }, { error: 'Sample error message 2' }], + [], +]; + +export const testPipelineValidResult: [object[], object[]] = [ + [], + [{ key: 'value', anotherKey: 'anotherValue' }], +]; + +export const relatedTestState = { + rawSamples: ['{"test1": "test1"}'], + samples: ['{ "test1": "test1" }'], + formattedSamples: '{"test1": "test1"}', + ecs: 'testtypes', + exAnswer: 'testanswer', + packageName: 'testpackage', + dataStreamName: 'testdatastream', + errors: { test: 'testerror' }, + pipelineResults: [{ test: 'testresult' }], + finalized: false, + reviewed: false, + currentPipeline: { test: 'testpipeline' }, + currentProcessors: [ + { + append: { + field: 'related.ip', + value: ['{{{source.ip}?.split(":")[0]}}'], + allow_duplicates: false, + if: 'ctx.source?.ip != null', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{destination.ip}}}'], + allow_duplicates: false, + if: 'ctx.destination?.ip != null', + }, + }, + ], + initialPipeline: relatedInitialPipeline, + results: { test: 'testresults' }, + lastExecutedChain: 'testchain', +}; + +export const relatedMockProcessors = [ + { + append: { + field: 'related.ip', + value: ['{{{source.ip}?.split(":")[0]}}'], + allow_duplicates: false, + if: 'ctx.source?.ip != null', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{destination.ip}}}'], + allow_duplicates: false, + if: 'ctx.destination?.ip != null', + }, + }, +]; + +export const relatedExpectedHandlerResponse = { + currentPipeline: { + description: 'Pipeline to process mysql_enterprise audit logs', + processors: [ + { + set: { + field: 'ecs.version', + value: '8.11.0', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{source.ip}?.split(":")[0]}}'], + allow_duplicates: false, + if: 'ctx.source?.ip != null', + }, + }, + { + append: { + field: 'related.ip', + value: ['{{{destination.ip}}}'], + allow_duplicates: false, + if: 'ctx.destination?.ip != null', + }, + }, + { + rename: { + field: 'message', + target_field: 'event.original', + ignore_missing: true, + if: 'ctx.event?.original == null', + }, + }, + { + remove: { + field: 'event.original', + tag: 'remove_original_event', + if: 'ctx?.tags == null || !(ctx.tags.contains("preserve_original_event"))', + ignore_failure: true, + ignore_missing: true, + }, + }, + ], + }, + currentProcessors: [ + { + append: { + field: 'event.type', + value: ['creation'], + if: "ctx.mysql_enterprise?.audit?.general_data?.sql_command == 'create_db'", + }, + }, + { + append: { + field: 'event.category', + value: ['database'], + if: "ctx.mysql_enterprise.audit.general_data.sql_command == 'create_db'", + }, + }, + ], + reviewed: false, + lastExecutedChain: 'error', +}; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts new file mode 100644 index 0000000000000..fcb5c49f60fb6 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleCategorization } from './categorization'; +import { CategorizationState } from '../../types'; +import { + categorizationTestState, + categorizationMockProcessors, + categorizationExpectedHandlerResponse, +} from '../../../__jest__/fixtures/categorization'; + +const mockLlm = new FakeLLM({ + response: JSON.stringify(categorizationMockProcessors, null, 2), +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: CategorizationState = categorizationTestState; + +describe('Testing categorization handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleCategorization()', async () => { + const response = await handleCategorization(testState); + expect(response.currentPipeline).toStrictEqual( + categorizationExpectedHandlerResponse.currentPipeline + ); + expect(response.lastExecutedChain).toBe('categorization'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts index 130de2094d166..c147f6ed2f037 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts @@ -7,7 +7,7 @@ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; -import { combineProcessors } from '../../util/pipeline'; +import { combineProcessors } from '../../util/processors'; import { Pipeline } from '../../../common'; import { CATEGORIZATION_MAIN_PROMPT } from './prompts'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts index 1b6f268965ac5..ca875c15f026d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/constants.ts @@ -111,7 +111,7 @@ export const EVENT_CATEGORIES = [ 'web', ]; -type EventCategories = +export type EventCategories = | 'api' | 'authentication' | 'configuration' diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts new file mode 100644 index 0000000000000..2c86001a18f3d --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleErrors } from './errors'; +import { CategorizationState } from '../../types'; +import { + categorizationTestState, + categorizationMockProcessors, + categorizationExpectedHandlerResponse, +} from '../../../__jest__/fixtures/categorization'; + +const mockLlm = new FakeLLM({ + response: JSON.stringify(categorizationMockProcessors, null, 2), +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: CategorizationState = categorizationTestState; + +describe('Testing categorization handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleErrors()', async () => { + const response = await handleErrors(testState); + expect(response.currentPipeline).toStrictEqual( + categorizationExpectedHandlerResponse.currentPipeline + ); + expect(response.lastExecutedChain).toBe('error'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts index 695ce727ed58b..a6bd1cd4c23f3 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts @@ -8,7 +8,7 @@ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_ERROR_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; -import { combineProcessors } from '../../util/pipeline'; +import { combineProcessors } from '../../util/processors'; import { Pipeline } from '../../../common'; export async function handleErrors(state: CategorizationState) { diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts index bb0f4feba5f06..3be3bcaf790b9 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts @@ -5,6 +5,7 @@ * 2.0. */ +import { IScopedClusterClient } from '@kbn/core/server'; import { FakeLLM } from '@langchain/core/utils/testing'; import { getCategorizationGraph } from './graph'; import { getModel } from '../../providers/bedrock'; @@ -19,13 +20,12 @@ import { testPipelineValidResult, testPipelineInvalidEcs, } from '../../../__jest__/fixtures/categorization'; -import { mockedRequest } from '../../../__jest__/fixtures'; +import { mockedRequestWithPipeline } from '../../../__jest__/fixtures'; import { handleReview } from './review'; import { handleCategorization } from './categorization'; import { handleErrors } from './errors'; import { handleInvalidCategorization } from './invalid'; -import { testPipeline } from '../../util/es'; -import { combineProcessors } from '../../util/pipeline'; +import { testPipeline, combineProcessors } from '../../util'; const mockLlm = new FakeLLM({ response: "I'll callback later.", @@ -39,11 +39,27 @@ jest.mock('../../providers/bedrock', () => ({ getModel: jest.fn(), })); -jest.mock('../../util/es', () => ({ +jest.mock('../../util/pipeline', () => ({ testPipeline: jest.fn(), })); +jest.mock('../../util/es', () => { + return { + ESClient: { + setClient: jest.fn(), + getClient: jest.fn(), + }, + }; +}); + describe('runCategorizationGraph', () => { + const mockClient = { + asCurrentUser: { + indices: { + getMapping: jest.fn(), + }, + }, + } as unknown as IScopedClusterClient; beforeEach(() => { // Mocked responses for each node that requires an LLM API call/response. const mockInvokeCategorization = jest @@ -56,11 +72,14 @@ describe('runCategorizationGraph', () => { // Return a fake LLM to prevent API calls from being made, or require API credentials (getModel as jest.Mock).mockReturnValue(mockLlm); + // We do not care about ES in these tests, the mock is just to prevent errors. + // After this is triggered, the mock of TestPipeline will trigger the expected error, to route to error handler (handleCategorization as jest.Mock).mockImplementation(async () => ({ currentPipeline: categorizationInitialPipeline, currentProcessors: await mockInvokeCategorization(), reviewed: false, + finalized: false, lastExecutedChain: 'categorization', })); // Error pipeline resolves it, though the responce includes an invalid categorization @@ -68,6 +87,7 @@ describe('runCategorizationGraph', () => { currentPipeline: categorizationInitialPipeline, currentProcessors: await mockInvokeError(), reviewed: false, + finalized: false, lastExecutedChain: 'error', })); // Invalid categorization is resolved and returned correctly, which routes it to a review @@ -75,6 +95,7 @@ describe('runCategorizationGraph', () => { currentPipeline: categorizationInitialPipeline, currentProcessors: await mockInvokeInvalid(), reviewed: false, + finalized: false, lastExecutedChain: 'invalidCategorization', })); // After the review it should route to modelOutput and finish. @@ -85,6 +106,7 @@ describe('runCategorizationGraph', () => { currentProcessors, currentPipeline, reviewed: true, + finalized: false, lastExecutedChain: 'review', }; }); @@ -92,14 +114,14 @@ describe('runCategorizationGraph', () => { it('Ensures that the graph compiles', async () => { try { - await getCategorizationGraph(); + await getCategorizationGraph(mockClient); } catch (error) { // noop } }); it('Runs the whole graph, with mocked outputs from the LLM.', async () => { - const categorizationGraph = await getCategorizationGraph(); + const categorizationGraph = await getCategorizationGraph(mockClient); (testPipeline as jest.Mock) .mockResolvedValueOnce(testPipelineValidResult) @@ -111,7 +133,7 @@ describe('runCategorizationGraph', () => { let response; try { - response = await categorizationGraph.invoke(mockedRequest); + response = await categorizationGraph.invoke(mockedRequestWithPipeline); } catch (e) { // noop } diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index e83162b37abac..6ef95fc8a89c9 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -4,6 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { CategorizationState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; @@ -14,6 +15,7 @@ import { handleInvalidCategorization } from './invalid'; import { handleErrors } from './errors'; import { handleReview } from './review'; import { CATEGORIZATION_EXAMPLE_ANSWER, ECS_CATEGORIES, ECS_TYPES } from './constants'; +import { ESClient } from '../../util/es'; const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { @@ -138,10 +140,12 @@ function chainRouter(state: CategorizationState): string { if (!state.finalized) { return 'modelOutput'; } + return END; } -export async function getCategorizationGraph() { +export async function getCategorizationGraph(client: IScopedClusterClient) { + ESClient.setClient(client); const workflow = new StateGraph({ channels: graphState, }) diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts new file mode 100644 index 0000000000000..6d9b109dba7ec --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleInvalidCategorization } from './invalid'; +import { CategorizationState } from '../../types'; +import { + categorizationTestState, + categorizationMockProcessors, + categorizationExpectedHandlerResponse, +} from '../../../__jest__/fixtures/categorization'; + +const mockLlm = new FakeLLM({ + response: JSON.stringify(categorizationMockProcessors, null, 2), +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: CategorizationState = categorizationTestState; + +describe('Testing categorization handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleInvalidCategorization()', async () => { + const response = await handleInvalidCategorization(testState); + expect(response.currentPipeline).toStrictEqual( + categorizationExpectedHandlerResponse.currentPipeline + ); + expect(response.lastExecutedChain).toBe('invalidCategorization'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts index 14063eb7064e4..e6c563abc4479 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -8,7 +8,7 @@ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; -import { combineProcessors } from '../../util/pipeline'; +import { combineProcessors } from '../../util/processors'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; import { Pipeline } from '../../../common'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts new file mode 100644 index 0000000000000..e2b78b7f80375 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleReview } from './review'; +import { CategorizationState } from '../../types'; +import { + categorizationTestState, + categorizationMockProcessors, + categorizationExpectedHandlerResponse, +} from '../../../__jest__/fixtures/categorization'; + +const mockLlm = new FakeLLM({ + response: JSON.stringify(categorizationMockProcessors, null, 2), +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); + +const testState: CategorizationState = categorizationTestState; + +describe('Testing categorization handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleReview()', async () => { + const response = await handleReview(testState); + expect(response.currentPipeline).toStrictEqual( + categorizationExpectedHandlerResponse.currentPipeline + ); + expect(response.lastExecutedChain).toBe('review'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts index 9c6150dfcb381..25c3ead23992a 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts @@ -8,7 +8,7 @@ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_REVIEW_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; -import { combineProcessors } from '../../util/pipeline'; +import { combineProcessors } from '../../util/processors'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; import { Pipeline } from '../../../common'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts index 916b2fea25e96..8d00bdc31f1fd 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts @@ -7,6 +7,8 @@ import { CategorizationState } from '../../types'; import { ECS_EVENT_TYPES_PER_CATEGORY, EVENT_CATEGORIES, EVENT_TYPES } from './constants'; +import type { EventCategories } from './constants'; + interface Event { type?: string[]; category?: string[]; @@ -97,28 +99,6 @@ function findInvalidTypes(types: string[]): string[] { return invalidTypes; } -type EventCategories = - | 'api' - | 'authentication' - | 'configuration' - | 'database' - | 'driver' - | 'email' - | 'file' - | 'host' - | 'iam' - | 'intrusion_detection' - | 'library' - | 'network' - | 'package' - | 'process' - | 'registry' - | 'session' - | 'threat' - | 'user' - | 'vulnerability' - | 'web'; - function getTypeCategoryIncompatibleError( categories: string[], types: string[] diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts index a89a68581a611..7ec2386c3d8bf 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts @@ -20,7 +20,7 @@ jest.mock('../../providers/bedrock', () => ({ })); const testState: EcsMappingState = ecsTestState; -describe('Testing duplicates handler', () => { +describe('Testing ecs handler', () => { beforeEach(() => { (getModel as jest.Mock).mockReturnValue(mockLlm); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts index 08a02772becbb..6e623ef8ffc2f 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts @@ -20,7 +20,7 @@ jest.mock('../../providers/bedrock', () => ({ })); const testState: EcsMappingState = ecsTestState; -describe('Testing invalid handlers', () => { +describe('Testing ecs handlers', () => { beforeEach(() => { (getModel as jest.Mock).mockReturnValue(mockLlm); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts index 57a690e2dec99..1c10c9c1a10b5 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts @@ -20,7 +20,7 @@ jest.mock('../../providers/bedrock', () => ({ })); const testState: EcsMappingState = ecsTestState; -describe('Testing ECS mapping handler', () => { +describe('Testing ecs handler', () => { beforeEach(() => { (getModel as jest.Mock).mockReturnValue(mockLlm); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts index e85b459562bde..f7b6ef592b483 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts @@ -20,7 +20,7 @@ jest.mock('../../providers/bedrock', () => ({ })); const testState: EcsMappingState = ecsTestState; -describe('Testing missing keys handler', () => { +describe('Testing ecs handler', () => { beforeEach(() => { (getModel as jest.Mock).mockReturnValue(mockLlm); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts new file mode 100644 index 0000000000000..e2d53f7368df2 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleErrors } from './errors'; +import { RelatedState } from '../../types'; +import { + relatedTestState, + relatedMockProcessors, + relatedExpectedHandlerResponse, +} from '../../../__jest__/fixtures/related'; + +const mockLlm = new FakeLLM({ + response: JSON.stringify(relatedMockProcessors, null, 2), +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: RelatedState = relatedTestState; + +describe('Testing related handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleErrors()', async () => { + const response = await handleErrors(testState); + expect(response.currentPipeline).toStrictEqual(relatedExpectedHandlerResponse.currentPipeline); + expect(response.lastExecutedChain).toBe('error'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts index 097df08ba8387..c15c1d75888b1 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts @@ -8,13 +8,12 @@ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { RELATED_ERROR_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; -import { combineProcessors } from '../../util/pipeline'; +import { combineProcessors } from '../../util/processors'; import { Pipeline } from '../../../common'; export async function handleErrors(state: RelatedState) { const relatedErrorPrompt = RELATED_ERROR_PROMPT; const model = getModel(); - const outputParser = new JsonOutputParser(); const relatedErrorGraph = relatedErrorPrompt.pipe(model).pipe(outputParser); @@ -27,7 +26,6 @@ export async function handleErrors(state: RelatedState) { })) as any[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); - return { currentPipeline, currentProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts new file mode 100644 index 0000000000000..416b4d8ff23d0 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IScopedClusterClient } from '@kbn/core/server'; +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getRelatedGraph } from './graph'; +import { getModel } from '../../providers/bedrock'; +import { + relatedExpectedResults, + relatedErrorMockedResponse, + relatedInitialMockedResponse, + relatedReviewMockedResponse, + relatedInitialPipeline, + testPipelineError, + testPipelineValidResult, +} from '../../../__jest__/fixtures/related'; +import { mockedRequestWithPipeline } from '../../../__jest__/fixtures'; +import { handleReview } from './review'; +import { handleRelated } from './related'; +import { handleErrors } from './errors'; +import { testPipeline, combineProcessors } from '../../util'; + +const mockLlm = new FakeLLM({ + response: "I'll callback later.", +}); + +jest.mock('./errors'); +jest.mock('./review'); +jest.mock('./related'); +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); + +jest.mock('../../util/pipeline', () => ({ + testPipeline: jest.fn(), +})); + +jest.mock('../../util/es', () => { + return { + ESClient: { + setClient: jest.fn(), + getClient: jest.fn(), + }, + }; +}); + +describe('runRelatedGraph', () => { + const mockClient = { + asCurrentUser: { + indices: { + getMapping: jest.fn(), + }, + }, + } as unknown as IScopedClusterClient; + beforeEach(() => { + // Mocked responses for each node that requires an LLM API call/response. + const mockInvokeRelated = jest.fn().mockResolvedValue(relatedInitialMockedResponse); + const mockInvokeError = jest.fn().mockResolvedValue(relatedErrorMockedResponse); + const mockInvokeReview = jest.fn().mockResolvedValue(relatedReviewMockedResponse); + + // Return a fake LLM to prevent API calls from being made, or require API credentials + (getModel as jest.Mock).mockReturnValue(mockLlm); + + // After this is triggered, the mock of TestPipeline will trigger the expected error, to route to error handler + (handleRelated as jest.Mock).mockImplementation(async () => ({ + currentPipeline: relatedInitialPipeline, + currentProcessors: await mockInvokeRelated(), + reviewed: false, + finalized: false, + lastExecutedChain: 'related', + })); + // Error pipeline returns the correct response to trigger a review. + (handleErrors as jest.Mock).mockImplementation(async () => ({ + currentPipeline: relatedInitialPipeline, + currentProcessors: await mockInvokeError(), + reviewed: false, + finalized: false, + lastExecutedChain: 'error', + })); + // After the review it should route to modelOutput and finish. + (handleReview as jest.Mock).mockImplementation(async () => { + const currentProcessors = await mockInvokeReview(); + const currentPipeline = combineProcessors(relatedInitialPipeline, currentProcessors); + return { + currentProcessors, + currentPipeline, + reviewed: true, + finalized: false, + lastExecutedChain: 'review', + }; + }); + }); + + it('Ensures that the graph compiles', async () => { + try { + await getRelatedGraph(mockClient); + } catch (error) { + // noop + } + }); + + it('Runs the whole graph, with mocked outputs from the LLM.', async () => { + const relatedGraph = await getRelatedGraph(mockClient); + + (testPipeline as jest.Mock) + .mockResolvedValueOnce(testPipelineValidResult) + .mockResolvedValueOnce(testPipelineError) + .mockResolvedValueOnce(testPipelineValidResult) + .mockResolvedValueOnce(testPipelineValidResult) + .mockResolvedValueOnce(testPipelineValidResult); + + let response; + try { + response = await relatedGraph.invoke(mockedRequestWithPipeline); + } catch (e) { + // noop + } + + expect(response.results).toStrictEqual(relatedExpectedResults); + + // Check if the functions were called + expect(handleRelated).toHaveBeenCalled(); + expect(handleErrors).toHaveBeenCalled(); + expect(handleReview).toHaveBeenCalled(); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 96d1a01e9fb1f..277fa39662be3 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -4,6 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; import { RelatedState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; @@ -12,6 +13,7 @@ import { handleRelated } from './related'; import { handleErrors } from './errors'; import { handleReview } from './review'; import { RELATED_ECS_FIELDS, RELATED_EXAMPLE_ANSWER } from './constants'; +import { ESClient } from '../../util/es'; const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { @@ -62,10 +64,6 @@ const graphState: StateGraphArgs['channels'] = { value: (x: object[], y?: object[]) => y ?? x, default: () => [], }, - currentMapping: { - value: (x: object, y?: object) => y ?? x, - default: () => ({}), - }, currentPipeline: { value: (x: object, y?: object) => y ?? x, default: () => ({}), @@ -134,7 +132,8 @@ function chainRouter(state: RelatedState): string { return END; } -export async function getRelatedGraph() { +export async function getRelatedGraph(client: IScopedClusterClient) { + ESClient.setClient(client); const workflow = new StateGraph({ channels: graphState }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts new file mode 100644 index 0000000000000..bc5968514bd5c --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleRelated } from './related'; +import { RelatedState } from '../../types'; +import { + relatedTestState, + relatedMockProcessors, + relatedExpectedHandlerResponse, +} from '../../../__jest__/fixtures/related'; + +const mockLlm = new FakeLLM({ + response: JSON.stringify(relatedMockProcessors, null, 2), +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: RelatedState = relatedTestState; + +describe('Testing related handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleRelated()', async () => { + const response = await handleRelated(testState); + expect(response.currentPipeline).toStrictEqual(relatedExpectedHandlerResponse.currentPipeline); + expect(response.lastExecutedChain).toBe('related'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts index b93597064bbd9..13264a6ce7431 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts @@ -8,7 +8,7 @@ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { RELATED_MAIN_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; -import { combineProcessors } from '../../util/pipeline'; +import { combineProcessors } from '../../util/processors'; import { Pipeline } from '../../../common'; export async function handleRelated(state: RelatedState) { @@ -30,6 +30,6 @@ export async function handleRelated(state: RelatedState) { currentPipeline, currentProcessors, reviewed: false, - lastExecutedChain: 'main', + lastExecutedChain: 'related', }; } diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts new file mode 100644 index 0000000000000..00debae37fa4e --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { FakeLLM } from '@langchain/core/utils/testing'; +import { getModel } from '../../providers/bedrock'; +import { handleReview } from './review'; +import { RelatedState } from '../../types'; +import { + relatedTestState, + relatedMockProcessors, + relatedExpectedHandlerResponse, +} from '../../../__jest__/fixtures/related'; + +const mockLlm = new FakeLLM({ + response: JSON.stringify(relatedMockProcessors, null, 2), +}); + +jest.mock('../../providers/bedrock', () => ({ + getModel: jest.fn(), +})); +const testState: RelatedState = relatedTestState; + +describe('Testing related handler', () => { + beforeEach(() => { + (getModel as jest.Mock).mockReturnValue(mockLlm); + }); + it('handleReview()', async () => { + const response = await handleReview(testState); + expect(response.currentPipeline).toStrictEqual(relatedExpectedHandlerResponse.currentPipeline); + expect(response.lastExecutedChain).toBe('review'); + }); +}); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts index d50acefa694fa..87eaed0b1a683 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts @@ -8,7 +8,7 @@ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { RELATED_REVIEW_PROMPT } from './prompts'; import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; -import { combineProcessors } from '../../util/pipeline'; +import { combineProcessors } from '../../util/processors'; import { Pipeline } from '../../../common'; export async function handleReview(state: RelatedState) { diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index a469355c9dc56..12a172126a550 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -30,10 +30,12 @@ export function registerCategorizationRoutes(router: IRouter) { }), }, }, - async (_, req, res) => { + async (context, req, res) => { const { packageName, dataStreamName, rawSamples, currentPipeline } = req.body as CategorizationApiRequest; - const graph = await getCategorizationGraph(); + const services = await context.resolve(['core']); + const { client } = services.core.elasticsearch; + const graph = await getCategorizationGraph(client); let results = { results: { docs: {}, pipeline: {} } }; try { results = (await graph.invoke({ diff --git a/x-pack/plugins/integration_assistant/server/types.ts b/x-pack/plugins/integration_assistant/server/types.ts index 29f6134cd8c18..54713df18b0d8 100644 --- a/x-pack/plugins/integration_assistant/server/types.ts +++ b/x-pack/plugins/integration_assistant/server/types.ts @@ -63,7 +63,6 @@ export interface RelatedState { pipelineResults: object[]; finalized: boolean; reviewed: boolean; - currentMapping: object; currentPipeline: object; currentProcessors: object[]; initialPipeline: object; diff --git a/x-pack/plugins/integration_assistant/server/util/es.ts b/x-pack/plugins/integration_assistant/server/util/es.ts index c33edb585dfab..089658e1171e7 100644 --- a/x-pack/plugins/integration_assistant/server/util/es.ts +++ b/x-pack/plugins/integration_assistant/server/util/es.ts @@ -4,63 +4,23 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { Client } from '@elastic/elasticsearch'; +import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -interface DocTemplate { - _index: string; - _id: string; - _source: { - message: string; - }; -} - -function formatSample(sample: string): DocTemplate { - const docsTemplate: DocTemplate = { - _index: 'index', - _id: 'id', - _source: { message: '' }, - }; - const formatted: DocTemplate = { ...docsTemplate }; - formatted._source.message = sample; - return formatted; -} - -// TODO: Replace with real client from route context. -function newClient(): Client { - const client = new Client({ - node: 'http://localhost:9200', - auth: { - username: 'elastic', - password: 'changeme', - }, - tls: { - rejectUnauthorized: false, - }, - }); - return client; -} - -export async function testPipeline( - samples: string[], - pipeline: object -): Promise<[object[], object[]]> { - const docs = samples.map((sample) => formatSample(sample)); - const results: object[] = []; - const errors: object[] = []; +// Allows the initialization of the langgraph to set a reference to the context ES client. +// When handleValidatePipeline runs later, it will fetch the reference again. +export class ESClient { + private static client: IScopedClusterClient | null = null; - const client = newClient(); - try { - const output = await client.ingest.simulate({ docs, pipeline }); - for (const doc of output.docs) { - if (doc.doc?._source?.error) { - errors.push(doc.doc._source.error); - } else if (doc.doc?._source) { - results.push(doc.doc._source); - } + public static setClient(client: IScopedClusterClient): void { + if (!this.client) { + this.client = client; } - } catch (e) { - errors.push({ error: (e as Error).message }); } - return [errors, results]; + public static getClient(): IScopedClusterClient { + if (!this.client) { + throw new Error('Elasticsearch client has not been instantiated.'); + } + return this.client; + } } diff --git a/x-pack/plugins/integration_assistant/server/util/graph.ts b/x-pack/plugins/integration_assistant/server/util/graph.ts index 2955ca364d3ad..35106debb844e 100644 --- a/x-pack/plugins/integration_assistant/server/util/graph.ts +++ b/x-pack/plugins/integration_assistant/server/util/graph.ts @@ -5,7 +5,7 @@ * 2.0. */ import { EcsMappingState, CategorizationState, RelatedState } from '../types'; -import { testPipeline } from './es'; +import { testPipeline } from './pipeline'; export async function handleValidatePipeline( state: EcsMappingState | CategorizationState | RelatedState diff --git a/x-pack/plugins/integration_assistant/server/util/index.ts b/x-pack/plugins/integration_assistant/server/util/index.ts index 1818fd1915274..a32871c5f3bdf 100644 --- a/x-pack/plugins/integration_assistant/server/util/index.ts +++ b/x-pack/plugins/integration_assistant/server/util/index.ts @@ -16,3 +16,6 @@ export { export { generateFields, mergeSamples } from './samples'; export { deepCopy, generateUniqueId } from './util'; +export { testPipeline } from './pipeline'; +export { combineProcessors } from './processors'; +export { ESClient } from './es'; diff --git a/x-pack/plugins/integration_assistant/server/util/pipeline.ts b/x-pack/plugins/integration_assistant/server/util/pipeline.ts index bf5f1addde8bc..3f3b5abc43e49 100644 --- a/x-pack/plugins/integration_assistant/server/util/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/util/pipeline.ts @@ -4,22 +4,48 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { deepCopy } from './util'; -import type { Pipeline } from '../../common'; +import { ESClient } from './es'; -export function combineProcessors(initialPipeline: Pipeline, processors: any[]): Pipeline { - // Create a deep copy of the initialPipeline to avoid modifying the original input - const currentPipeline = deepCopy(initialPipeline); +interface DocTemplate { + _index: string; + _id: string; + _source: { + message: string; + }; +} + +function formatSample(sample: string): DocTemplate { + const docsTemplate: DocTemplate = { + _index: 'index', + _id: 'id', + _source: { message: '' }, + }; + const formatted: DocTemplate = { ...docsTemplate }; + formatted._source.message = sample; + return formatted; +} + +export async function testPipeline( + samples: string[], + pipeline: object +): Promise<[object[], object[]]> { + const docs = samples.map((sample) => formatSample(sample)); + const results: object[] = []; + const errors: object[] = []; - // Add the new processors right before the last 2 removeprocessor in the initial pipeline. - // This is so all the processors if conditions are not accessing possibly removed fields. - const currentProcessors = currentPipeline.processors; - const combinedProcessors = [ - ...currentProcessors.slice(0, -2), - ...processors, - ...currentProcessors.slice(-2), - ]; - currentPipeline.processors = combinedProcessors; + const client = ESClient.getClient(); + try { + const output = await client.asCurrentUser.ingest.simulate({ docs, pipeline }); + for (const doc of output.docs) { + if (doc.doc?._source?.error) { + errors.push(doc.doc._source.error); + } else if (doc.doc?._source) { + results.push(doc.doc._source); + } + } + } catch (e) { + errors.push({ error: (e as Error).message }); + } - return currentPipeline; + return [errors, results]; } diff --git a/x-pack/plugins/integration_assistant/server/util/processors.ts b/x-pack/plugins/integration_assistant/server/util/processors.ts new file mode 100644 index 0000000000000..dd87c8d25a68a --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/processors.ts @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { deepCopy } from './util'; +import type { Pipeline } from '../../common'; + +export function combineProcessors(initialPipeline: Pipeline, processors: any[]): Pipeline { + // Create a deep copy of the initialPipeline to avoid modifying the original input + const currentPipeline = deepCopy(initialPipeline); + + // Add the new processors right before the last 2 removeprocessor in the initial pipeline. + // This is so all the processors if conditions are not accessing possibly removed fields. + const currentProcessors = currentPipeline.processors; + const combinedProcessors = [ + ...currentProcessors.slice(0, -2), + ...processors, + ...currentProcessors.slice(-2), + ]; + currentPipeline.processors = combinedProcessors; + return currentPipeline; +} From 2c54358a4a444236c03acd06349da4bec37adbe5 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Wed, 29 May 2024 19:18:22 +0200 Subject: [PATCH 19/62] added context to related routes --- .../integration_assistant/server/routes/related_routes.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index b439478f23a3c..118894ee5ef74 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -31,10 +31,12 @@ export function registerRelatedRoutes(router: IRouter) { }), }, }, - async (_, req, res) => { + async (context, req, res) => { const { packageName, dataStreamName, rawSamples, currentPipeline } = req.body as RelatedApiRequest; - const graph = await getRelatedGraph(); + const services = await context.resolve(['core']); + const { client } = services.core.elasticsearch; + const graph = await getRelatedGraph(client); let results = { results: { docs: {}, pipeline: {} } }; try { results = (await graph.invoke({ From 6aefd4ff7be57d88936e71fbd6c22ed094d13676 Mon Sep 17 00:00:00 2001 From: Sergi Massaneda Date: Thu, 30 May 2024 10:42:02 +0200 Subject: [PATCH 20/62] disable config by default --- .../integration_assistant/server/config.ts | 18 ++++++++++++++++++ .../integration_assistant/server/constants.ts | 7 +++++++ .../integration_assistant/server/index.ts | 2 ++ .../integration_assistant/server/plugin.ts | 4 ++-- 4 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/server/config.ts diff --git a/x-pack/plugins/integration_assistant/server/config.ts b/x-pack/plugins/integration_assistant/server/config.ts new file mode 100644 index 0000000000000..c8c81b9f63743 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/config.ts @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { schema, type TypeOf } from '@kbn/config-schema'; +import type { PluginConfigDescriptor } from '@kbn/core/server'; + +export const configSchema = schema.object({ + enabled: schema.boolean({ defaultValue: false }), +}); +export type ServerlessSecuritySchema = TypeOf; + +export const config: PluginConfigDescriptor = { + schema: configSchema, +}; diff --git a/x-pack/plugins/integration_assistant/server/constants.ts b/x-pack/plugins/integration_assistant/server/constants.ts index c40d0e02a2ba5..1c4ed1918d310 100644 --- a/x-pack/plugins/integration_assistant/server/constants.ts +++ b/x-pack/plugins/integration_assistant/server/constants.ts @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + export const ROUTE_HANDLER_TIMEOUT = 10 * 60 * 1000; // 10 * 60 seconds = 10 minutes export const LANG_CHAIN_TIMEOUT = ROUTE_HANDLER_TIMEOUT - 10_000; // 9 minutes 50 seconds export const CONNECTOR_TIMEOUT = LANG_CHAIN_TIMEOUT - 10_000; // 9 minutes 40 seconds diff --git a/x-pack/plugins/integration_assistant/server/index.ts b/x-pack/plugins/integration_assistant/server/index.ts index d47e4470ccb8b..71c9c10ecdd6e 100644 --- a/x-pack/plugins/integration_assistant/server/index.ts +++ b/x-pack/plugins/integration_assistant/server/index.ts @@ -7,6 +7,8 @@ import { PluginInitializerContext } from '@kbn/core/server'; +export { config } from './config'; + export async function plugin(initializerContext: PluginInitializerContext) { const { IntegrationAssistantPlugin } = await import('./plugin'); return new IntegrationAssistantPlugin(initializerContext); diff --git a/x-pack/plugins/integration_assistant/server/plugin.ts b/x-pack/plugins/integration_assistant/server/plugin.ts index 7e36f759ff141..8463bc4a915f3 100644 --- a/x-pack/plugins/integration_assistant/server/plugin.ts +++ b/x-pack/plugins/integration_assistant/server/plugin.ts @@ -19,14 +19,14 @@ export class IntegrationAssistantPlugin } public setup(core: CoreSetup) { const router = core.http.createRouter(); - this.logger.debug('integrationAssistant api: Setup'); + this.logger.info('integrationAssistant api: Setup'); registerRoutes(router); return {}; } public start(core: CoreStart) { - this.logger.debug('integrationAssistant api: Started'); + this.logger.info('integrationAssistant api: Started'); return {}; } From 8e641b4364f9ababeda9e8655cfa1cf869570ffe Mon Sep 17 00:00:00 2001 From: Sergi Massaneda Date: Thu, 30 May 2024 11:35:58 +0200 Subject: [PATCH 21/62] log debug --- x-pack/plugins/integration_assistant/server/plugin.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/plugin.ts b/x-pack/plugins/integration_assistant/server/plugin.ts index 8463bc4a915f3..7e36f759ff141 100644 --- a/x-pack/plugins/integration_assistant/server/plugin.ts +++ b/x-pack/plugins/integration_assistant/server/plugin.ts @@ -19,14 +19,14 @@ export class IntegrationAssistantPlugin } public setup(core: CoreSetup) { const router = core.http.createRouter(); - this.logger.info('integrationAssistant api: Setup'); + this.logger.debug('integrationAssistant api: Setup'); registerRoutes(router); return {}; } public start(core: CoreStart) { - this.logger.info('integrationAssistant api: Started'); + this.logger.debug('integrationAssistant api: Started'); return {}; } From 819374b4de4a94276c0971d39ff57906c675e02d Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Thu, 30 May 2024 14:33:04 +0200 Subject: [PATCH 22/62] Add testpipeline API, update testPipeline to allow passing client as argument, update tests --- .../__jest__/fixtures/categorization.ts | 27 ++++++------ .../__jest__/fixtures/related.ts | 16 +++---- .../integration_assistant/common/constants.ts | 1 + .../integration_assistant/common/index.ts | 3 ++ .../integration_assistant/common/types.ts | 17 +++++--- .../components/build_integration_button.tsx | 3 -- .../graphs/categorization/graph.test.ts | 5 ++- .../integration_builder/build_integration.ts | 26 +++++++----- .../server/integration_builder/dev_folders.ts | 4 +- .../server/routes/build_integration_routes.ts | 8 +--- .../server/routes/categorization_routes.ts | 3 +- .../server/routes/ecs_routes.ts | 3 +- .../server/routes/pipeline_routes.ts | 42 +++++++++++++++++++ .../server/routes/related_routes.ts | 3 +- .../server/util/async_file.ts | 2 +- .../server/util/graph.ts | 8 ++-- .../server/util/pipeline.ts | 14 +++---- 17 files changed, 118 insertions(+), 67 deletions(-) create mode 100644 x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts index 16b4d8f5196af..62fa18f5523c1 100644 --- a/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/categorization.ts @@ -155,20 +155,23 @@ export const categorizationReviewMockedResponse = [ }, ]; -export const testPipelineError: [object[], object[]] = [ - [{ error: 'Sample error message 1' }, { error: 'Sample error message 2' }], - [], -]; +export const testPipelineError: { pipelineResults: object[]; errors: object[] } = { + pipelineResults: [], + errors: [{ error: 'Sample error message 1' }, { error: 'Sample error message 2' }], +}; -export const testPipelineValidResult: [object[], object[]] = [ - [], - [{ key: 'value', anotherKey: 'anotherValue' }], -]; +export const testPipelineValidResult: { pipelineResults: object[]; errors: object[] } = { + pipelineResults: [{ key: 'value', anotherKey: 'anotherValue' }], + errors: [], +}; + +export const testPipelineInvalidEcs: { pipelineResults: object[]; errors: object[] } = { + pipelineResults: [ + { event: { type: ['database'], category: ['creation'] }, anotherKey: 'anotherValue' }, + ], + errors: [], +}; -export const testPipelineInvalidEcs: [object[], object[]] = [ - [], - [{ event: { type: ['database'], category: ['creation'] }, anotherKey: 'anotherValue' }], -]; export const categorizationTestState = { rawSamples: ['{"test1": "test1"}'], samples: ['{ "test1": "test1" }'], diff --git a/x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts b/x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts index 490d343092fbf..f34133a4f520f 100644 --- a/x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts +++ b/x-pack/plugins/integration_assistant/__jest__/fixtures/related.ts @@ -146,15 +146,15 @@ export const relatedReviewMockedResponse = [ }, ]; -export const testPipelineError: [object[], object[]] = [ - [{ error: 'Sample error message 1' }, { error: 'Sample error message 2' }], - [], -]; +export const testPipelineError: { pipelineResults: object[]; errors: object[] } = { + pipelineResults: [], + errors: [{ error: 'Sample error message 1' }, { error: 'Sample error message 2' }], +}; -export const testPipelineValidResult: [object[], object[]] = [ - [], - [{ key: 'value', anotherKey: 'anotherValue' }], -]; +export const testPipelineValidResult: { pipelineResults: object[]; errors: object[] } = { + pipelineResults: [{ key: 'value', anotherKey: 'anotherValue' }], + errors: [], +}; export const relatedTestState = { rawSamples: ['{"test1": "test1"}'], diff --git a/x-pack/plugins/integration_assistant/common/constants.ts b/x-pack/plugins/integration_assistant/common/constants.ts index 27b10b45cc0f3..59b0a2cd9b094 100644 --- a/x-pack/plugins/integration_assistant/common/constants.ts +++ b/x-pack/plugins/integration_assistant/common/constants.ts @@ -17,3 +17,4 @@ export const ECS_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/ecs`; export const CATEGORIZATION_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/categorization`; export const RELATED_GRAPH_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/related`; export const INTEGRATION_BUILDER_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/build`; +export const TEST_PIPELINE_PATH = `${INTEGRATION_ASSISTANT_BASE_PATH}/pipeline`; diff --git a/x-pack/plugins/integration_assistant/common/index.ts b/x-pack/plugins/integration_assistant/common/index.ts index 6398a2c495779..99b6cb8793a48 100644 --- a/x-pack/plugins/integration_assistant/common/index.ts +++ b/x-pack/plugins/integration_assistant/common/index.ts @@ -19,6 +19,8 @@ export type { DataStream, Integration, InputTypes, + TestPipelineApiRequest, + TestPipelineApiResponse, } from './types'; export { @@ -27,6 +29,7 @@ export { ECS_GRAPH_PATH, CATEGORIZATION_GRAPH_PATH, RELATED_GRAPH_PATH, + TEST_PIPELINE_PATH, INTEGRATION_BUILDER_PATH, INTEGRATION_ASSISTANT_BASE_PATH, } from './constants'; diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index d4b685a14d249..9f43d0b4ff5d2 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -48,7 +48,6 @@ export interface DataStream { description: string; inputTypes: InputTypes[]; rawSamples: string[]; - // TODO: figure out why changing this to `Pipeline` makes the frontend test objects complain about types. pipeline: object; docs: object[]; } @@ -57,14 +56,10 @@ export interface Integration { name: string; title: string; description: string; - version: string; dataStreams: DataStream[]; - streamVersion?: string; - dockerComposeVersion?: string; - initialVersion?: string; - formatVersion?: string; owner: string; minKibanaVersion: string; + logo?: string; } // Server Request Schemas @@ -93,6 +88,11 @@ export interface RelatedApiRequest { currentPipeline: object; } +export interface TestPipelineApiRequest { + rawSamples: string[]; + pipeline: Pipeline; +} + // Server Response Schemas export interface CategorizationApiResponse { results: { @@ -114,3 +114,8 @@ export interface EcsMappingApiResponse { pipeline: object; }; } + +export interface TestPipelineApiResponse { + pipelineResults: object[]; + errors?: object[]; +} diff --git a/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx b/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx index ad1dce2e6f485..af34c7d2ea564 100644 --- a/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx +++ b/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx @@ -121,9 +121,6 @@ export const BuildIntegrationButton = ({ rawSamples, }, ], - streamVersion: '0.13.0', - dockerComposeVersion: '2.3', - formatVersion: '3.11.0', owner: '@elastic/test-team', minKibanaVersion: '8.13.0', }, diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts index 3be3bcaf790b9..227f56d184f5b 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts @@ -55,8 +55,8 @@ jest.mock('../../util/es', () => { describe('runCategorizationGraph', () => { const mockClient = { asCurrentUser: { - indices: { - getMapping: jest.fn(), + ingest: { + simulate: jest.fn(), }, }, } as unknown as IScopedClusterClient; @@ -137,6 +137,7 @@ describe('runCategorizationGraph', () => { } catch (e) { // noop } + expect(response.results).toStrictEqual(categorizationExpectedResults); // Check if the functions were called diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts index cb92fd97f949e..4545213e1c293 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -22,7 +22,6 @@ export async function buildPackage(integration: Integration): Promise { const agentTemplates = joinPath(templateDir, 'agent'); const manifestTemplates = joinPath(templateDir, 'manifest'); const systemTestTemplates = joinPath(templateDir, 'system_tests'); - // TODO: A bit unsure how we are going to translate this autoescape false or not, needs to be investigated before merging. nunjucks.configure([templateDir, agentTemplates, manifestTemplates, systemTestTemplates], { autoescape: false, }); @@ -46,14 +45,14 @@ export async function buildPackage(integration: Integration): Promise { ); } - const tmpPackageDir = joinPath(tmpDir, `${integration.name}-${integration.initialVersion}`); + const tmpPackageDir = joinPath(tmpDir, `${integration.name}-0.1.0`); const zipBuffer = await createZipArchive(tmpPackageDir); return zipBuffer; } async function createDirectories(tmpDir: string, integration: Integration): Promise { - const packageDir = joinPath(tmpDir, `${integration.name}-${integration.initialVersion}`); + const packageDir = joinPath(tmpDir, `${integration.name}-0.1.0`); await asyncEnsureDir(tmpDir); await asyncEnsureDir(packageDir); await createPackage(packageDir, integration); @@ -66,15 +65,20 @@ async function createPackage(packageDir: string, integration: Integration): Prom await createBuildFile(packageDir); await createPackageManifest(packageDir, integration); await createPackageSystemTests(packageDir, integration); - await createDefaultLogo(packageDir); + await createLogo(packageDir, integration); } -async function createDefaultLogo(packageDir: string): Promise { +async function createLogo(packageDir: string, integration: Integration): Promise { const logoDir = joinPath(packageDir, 'img'); - const imgTemplateDir = joinPath(__dirname, '../templates/img'); - await asyncEnsureDir(logoDir); - await asyncCopy(joinPath(imgTemplateDir, 'logo.svg'), joinPath(logoDir, 'logo.svg')); + + if (integration?.logo !== undefined) { + const buffer = Buffer.from(integration.logo, 'base64'); + await asyncCreate(joinPath(logoDir, 'logo.svg'), buffer); + } else { + const imgTemplateDir = joinPath(__dirname, '../templates/img'); + await asyncCopy(joinPath(imgTemplateDir, 'logo.svg'), joinPath(logoDir, 'logo.svg')); + } } async function createBuildFile(packageDir: string): Promise { @@ -87,7 +91,7 @@ async function createBuildFile(packageDir: string): Promise { async function createChangelog(packageDir: string, integration: Integration): Promise { const changelogTemplate = nunjucks.render('changelog.yml.njk', { - initial_version: integration.initialVersion, + initial_version: '0.1.0', }); await asyncCreate(joinPath(packageDir, 'changelog.yml'), changelogTemplate); @@ -128,10 +132,10 @@ async function createPackageManifest(packageDir: string, integration: Integratio const uniqueInputsList = Object.values(uniqueInputs); const packageManifest = nunjucks.render('package_manifest.yml.njk', { - format_version: integration.formatVersion, + format_version: '3.1.4', package_title: integration.title, package_name: integration.name, - package_version: integration.initialVersion, + package_version: '0.1.0', package_description: integration.description, package_owner: integration.owner, min_version: integration.minKibanaVersion, diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts index 64b9a0ef61d48..3277cbc590035 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts @@ -15,8 +15,8 @@ export async function createPackageSystemTests(integrationDir: string, integrati const systemTestsSamplesDir = joinPath(systemTestsDockerDir, 'sample_logs'); await asyncEnsureDir(systemTestsSamplesDir); - const streamVersion = integration.streamVersion || '0.13.0'; - const dockerComposeVersion = integration.dockerComposeVersion || '2.3'; + const streamVersion = '0.13.0'; + const dockerComposeVersion = '2.3'; const dockerServices: string[] = []; for (const stream of integration.dataStreams) { const packageName = integration.name.replace(/_/g, '-'); diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 294c8a88c4caa..94da2519c15ac 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -22,6 +22,7 @@ export function registerIntegrationBuilderRoutes(router: IRouter) { name: schema.string(), title: schema.string(), description: schema.string(), + logo: schema.maybe(schema.string()), dataStreams: schema.arrayOf( schema.object({ name: schema.string(), @@ -45,12 +46,7 @@ export function registerIntegrationBuilderRoutes(router: IRouter) { docs: schema.arrayOf(schema.object({}, { unknowns: 'allow' })), }) ), - streamVersion: schema.maybe(schema.string()), - dockerComposeVersion: schema.maybe(schema.string()), - initialVersion: schema.string(), - formatVersion: schema.string(), - owner: schema.string(), - minKibanaVersion: schema.string(), + initialVersion: schema.maybe(schema.string()), }), }), }, diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index 12a172126a550..4adbce3b7453c 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -45,8 +45,7 @@ export function registerCategorizationRoutes(router: IRouter) { currentPipeline, })) as CategorizationApiResponse; } catch (e) { - // TODO: Better error responses? - return e; + return res.badRequest({ body: e }); } return res.ok({ body: results }); diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts index 6d289e5e53397..68bdaf0c39160 100644 --- a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -50,8 +50,7 @@ export function registerEcsRoutes(router: IRouter) { rawSamples, })) as EcsMappingApiResponse; } catch (e) { - // TODO: Better error responses? - return e; + return res.badRequest({ body: e }); } return res.ok({ body: results }); diff --git a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts new file mode 100644 index 0000000000000..dca2c71e27ba9 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { IRouter } from '@kbn/core/server'; +import { schema } from '@kbn/config-schema'; +import { TEST_PIPELINE_PATH } from '../../common'; +import { testPipeline } from '../util/pipeline'; +import { TestPipelineApiRequest, TestPipelineApiResponse } from '../../common/types'; + +export function registerEcsRoutes(router: IRouter) { + router.post( + { + path: `${TEST_PIPELINE_PATH}`, + validate: { + body: schema.object({ + pipeline: schema.any(), + rawSamples: schema.arrayOf(schema.string()), + }), + }, + }, + async (context, req, res) => { + const { rawSamples, pipeline } = req.body as TestPipelineApiRequest; + const services = await context.resolve(['core']); + const { client } = services.core.elasticsearch; + let results: TestPipelineApiResponse = { pipelineResults: [], errors: [] }; + try { + results = (await testPipeline(rawSamples, pipeline, client)) as TestPipelineApiResponse; + if (results?.errors && results.errors.length > 0) { + return res.badRequest({ body: JSON.stringify(results.errors) }); + } + } catch (e) { + return res.badRequest({ body: e }); + } + + return res.ok({ body: results }); + } + ); +} diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index 118894ee5ef74..a60e7f10f1867 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -46,8 +46,7 @@ export function registerRelatedRoutes(router: IRouter) { currentPipeline, })) as RelatedApiResponse; } catch (e) { - // TODO: Better error responses? - return e; + return res.badRequest({ body: e }); } return res.ok({ body: results }); diff --git a/x-pack/plugins/integration_assistant/server/util/async_file.ts b/x-pack/plugins/integration_assistant/server/util/async_file.ts index 40713fb208be5..b9449ada32362 100644 --- a/x-pack/plugins/integration_assistant/server/util/async_file.ts +++ b/x-pack/plugins/integration_assistant/server/util/async_file.ts @@ -29,7 +29,7 @@ export async function asyncEnsureDir(dirPath: string): Promise { } } -export async function asyncCreate(path: string, content: string): Promise { +export async function asyncCreate(path: string, content: string | Buffer): Promise { return await writeFile(path, content, { encoding: 'utf-8' }); } diff --git a/x-pack/plugins/integration_assistant/server/util/graph.ts b/x-pack/plugins/integration_assistant/server/util/graph.ts index 35106debb844e..325f30b5027a9 100644 --- a/x-pack/plugins/integration_assistant/server/util/graph.ts +++ b/x-pack/plugins/integration_assistant/server/util/graph.ts @@ -6,14 +6,16 @@ */ import { EcsMappingState, CategorizationState, RelatedState } from '../types'; import { testPipeline } from './pipeline'; +import { ESClient } from './es'; export async function handleValidatePipeline( state: EcsMappingState | CategorizationState | RelatedState ): Promise | Partial | Partial> { - const [errors, results] = await testPipeline(state.rawSamples, state.currentPipeline); + const client = ESClient.getClient(); + const results = await testPipeline(state.rawSamples, state.currentPipeline, client); return { - errors, - pipelineResults: results, + errors: results.errors, + pipelineResults: results.pipelineResults, lastExecutedChain: 'validate_pipeline', }; } diff --git a/x-pack/plugins/integration_assistant/server/util/pipeline.ts b/x-pack/plugins/integration_assistant/server/util/pipeline.ts index 3f3b5abc43e49..c9c58c78b6c9a 100644 --- a/x-pack/plugins/integration_assistant/server/util/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/util/pipeline.ts @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { ESClient } from './es'; +import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; interface DocTemplate { _index: string; @@ -27,25 +27,25 @@ function formatSample(sample: string): DocTemplate { export async function testPipeline( samples: string[], - pipeline: object -): Promise<[object[], object[]]> { + pipeline: object, + client: IScopedClusterClient +): Promise<{ pipelineResults: object[]; errors: object[] }> { const docs = samples.map((sample) => formatSample(sample)); - const results: object[] = []; + const pipelineResults: object[] = []; const errors: object[] = []; - const client = ESClient.getClient(); try { const output = await client.asCurrentUser.ingest.simulate({ docs, pipeline }); for (const doc of output.docs) { if (doc.doc?._source?.error) { errors.push(doc.doc._source.error); } else if (doc.doc?._source) { - results.push(doc.doc._source); + pipelineResults.push(doc.doc._source); } } } catch (e) { errors.push({ error: (e as Error).message }); } - return [errors, results]; + return { pipelineResults, errors }; } From 2bc589d68b4660992dae26862b06f30bca608779 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Thu, 30 May 2024 14:57:09 +0200 Subject: [PATCH 23/62] remove yarn.lock created by accident, add support for testing/updating pipeline as API route --- .../integration_assistant/common/types.ts | 2 +- .../integration_assistant/public/services.ts | 16 +++- .../server/routes/pipeline_routes.ts | 8 +- x-pack/yarn.lock | 73 ------------------- 4 files changed, 22 insertions(+), 77 deletions(-) delete mode 100644 x-pack/yarn.lock diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index 9f43d0b4ff5d2..7ac3d6d06020a 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -90,7 +90,7 @@ export interface RelatedApiRequest { export interface TestPipelineApiRequest { rawSamples: string[]; - pipeline: Pipeline; + currentPipeline: Pipeline; } // Server Response Schemas diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts index d35bece2b8d31..cbc40577706dd 100644 --- a/x-pack/plugins/integration_assistant/public/services.ts +++ b/x-pack/plugins/integration_assistant/public/services.ts @@ -7,7 +7,7 @@ import type { CoreStart } from '@kbn/core/public'; import type { IHttpFetchError } from '@kbn/core-http-browser'; -import type { +import { EcsMappingApiRequest, EcsMappingApiResponse, CategorizationApiRequest, @@ -15,12 +15,15 @@ import type { RelatedApiRequest, RelatedApiResponse, BuildIntegrationApiRequest, + TestPipelineApiRequest, + TestPipelineApiResponse, } from '../common'; import { ECS_GRAPH_PATH, CATEGORIZATION_GRAPH_PATH, RELATED_GRAPH_PATH, INTEGRATION_BUILDER_PATH, + TEST_PIPELINE_PATH, } from '../common'; export interface Services { @@ -30,6 +33,7 @@ export interface Services { ) => Promise; runRelatedGraph: (req: RelatedApiRequest) => Promise; runIntegrationBuilder: (req: BuildIntegrationApiRequest) => Promise; + testPipeline: (req: TestPipelineApiRequest) => Promise; } export function getServices(core: CoreStart): Services { @@ -79,5 +83,15 @@ export function getServices(core: CoreStart): Services { return e; } }, + testPipeline: async (req: TestPipelineApiRequest): Promise => { + try { + const response = await core.http.post(TEST_PIPELINE_PATH, { + body: JSON.stringify({ ...req }), + }); + return response; + } catch (e) { + return e; + } + }, }; } diff --git a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts index dca2c71e27ba9..37d595bc85d89 100644 --- a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts @@ -23,12 +23,16 @@ export function registerEcsRoutes(router: IRouter) { }, }, async (context, req, res) => { - const { rawSamples, pipeline } = req.body as TestPipelineApiRequest; + const { rawSamples, currentPipeline } = req.body as TestPipelineApiRequest; const services = await context.resolve(['core']); const { client } = services.core.elasticsearch; let results: TestPipelineApiResponse = { pipelineResults: [], errors: [] }; try { - results = (await testPipeline(rawSamples, pipeline, client)) as TestPipelineApiResponse; + results = (await testPipeline( + rawSamples, + currentPipeline, + client + )) as TestPipelineApiResponse; if (results?.errors && results.errors.length > 0) { return res.badRequest({ body: JSON.stringify(results.errors) }); } diff --git a/x-pack/yarn.lock b/x-pack/yarn.lock deleted file mode 100644 index 81a53aa291fe3..0000000000000 --- a/x-pack/yarn.lock +++ /dev/null @@ -1,73 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@smithy/is-array-buffer@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz#9a95c2d46b8768946a9eec7f935feaddcffa5e7a" - integrity sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ== - dependencies: - tslib "^2.6.2" - -"@smithy/signature-v4@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-3.0.0.tgz#f536d0abebfeeca8e9aab846a4042658ca07d3b7" - integrity sha512-kXFOkNX+BQHe2qnLxpMEaCRGap9J6tUGLzc3A9jdn+nD4JdMwCKTJ+zFwQ20GkY+mAXGatyTw3HcoUlR39HwmA== - dependencies: - "@smithy/is-array-buffer" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-hex-encoding" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - "@smithy/util-uri-escape" "^3.0.0" - "@smithy/util-utf8" "^3.0.0" - tslib "^2.6.2" - -"@smithy/types@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/types/-/types-3.0.0.tgz#00231052945159c64ffd8b91e8909d8d3006cb7e" - integrity sha512-VvWuQk2RKFuOr98gFhjca7fkBS+xLLURT8bUjk5XQoV0ZLm7WPwWPPY3/AwzTLuUBDeoKDCthfe1AsTUWaSEhw== - dependencies: - tslib "^2.6.2" - -"@smithy/util-buffer-from@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz#559fc1c86138a89b2edaefc1e6677780c24594e3" - integrity sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA== - dependencies: - "@smithy/is-array-buffer" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-hex-encoding@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz#32938b33d5bf2a15796cd3f178a55b4155c535e6" - integrity sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ== - dependencies: - tslib "^2.6.2" - -"@smithy/util-middleware@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-3.0.0.tgz#64d775628b99a495ca83ce982f5c83aa45f1e894" - integrity sha512-q5ITdOnV2pXHSVDnKWrwgSNTDBAMHLptFE07ua/5Ty5WJ11bvr0vk2a7agu7qRhrCFRQlno5u3CneU5EELK+DQ== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-uri-escape@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz#e43358a78bf45d50bb736770077f0f09195b6f54" - integrity sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg== - dependencies: - tslib "^2.6.2" - -"@smithy/util-utf8@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-3.0.0.tgz#1a6a823d47cbec1fd6933e5fc87df975286d9d6a" - integrity sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA== - dependencies: - "@smithy/util-buffer-from" "^3.0.0" - tslib "^2.6.2" - -tslib@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" - integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== From 88a61bcae9a12de048e9a5fa6c4e5c6266dc1bf2 Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Thu, 30 May 2024 18:31:52 +0200 Subject: [PATCH 24/62] use actionsclient --- package.json | 18 +- .../language_models/bedrock_chat_model.ts | 132 +++++ .../server/language_models/index.ts | 1 + .../server/language_models/utils_bedrock.ts | 491 ++++++++++++++++++ .../sub_action_connector.ts | 2 +- .../routes/post_actions_connector_execute.ts | 2 + .../integration_assistant/kibana.jsonc | 1 + .../graphs/categorization/categorization.ts | 6 +- .../server/graphs/categorization/errors.ts | 5 +- .../server/graphs/categorization/graph.ts | 6 +- .../server/graphs/categorization/invalid.ts | 4 +- .../server/graphs/categorization/review.ts | 7 +- .../server/graphs/ecs/duplicates.ts | 6 +- .../server/graphs/ecs/graph.ts | 11 +- .../server/graphs/ecs/invalid.ts | 6 +- .../server/graphs/ecs/mapping.ts | 6 +- .../server/graphs/ecs/missing.ts | 6 +- .../server/graphs/related/errors.ts | 6 +- .../server/graphs/related/graph.ts | 9 +- .../server/graphs/related/related.ts | 6 +- .../server/graphs/related/review.ts | 6 +- .../integration_assistant/server/plugin.ts | 31 +- .../server/providers/bedrock.ts | 28 - .../server/routes/categorization_routes.ts | 43 +- .../server/routes/ecs_routes.ts | 41 +- .../server/routes/related_routes.ts | 41 +- .../common/bedrock/constants.ts | 1 + .../stack_connectors/common/bedrock/schema.ts | 11 + .../server/connector_types/bedrock/bedrock.ts | 56 ++ x-pack/yarn.lock | 73 --- yarn.lock | 112 ++-- 31 files changed, 920 insertions(+), 254 deletions(-) create mode 100644 x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts create mode 100644 x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts delete mode 100644 x-pack/plugins/integration_assistant/server/providers/bedrock.ts delete mode 100644 x-pack/yarn.lock diff --git a/package.json b/package.json index ca956bfd43dc1..fc5410850aa7b 100644 --- a/package.json +++ b/package.json @@ -80,7 +80,7 @@ "resolutions": { "**/@bazel/typescript/protobufjs": "6.11.4", "**/@hello-pangea/dnd": "16.6.0", - "**/@langchain/core": "0.2.0", + "**/@langchain/core": "0.2.3", "**/@types/node": "20.10.5", "**/@typescript-eslint/utils": "5.62.0", "**/chokidar": "^3.5.3", @@ -916,10 +916,10 @@ "@kbn/watcher-plugin": "link:x-pack/plugins/watcher", "@kbn/xstate-utils": "link:packages/kbn-xstate-utils", "@kbn/zod-helpers": "link:packages/kbn-zod-helpers", - "@langchain/community": "^0.2.2", - "@langchain/core": "^0.2.0", - "@langchain/langgraph": "^0.0.20", - "@langchain/openai": "^0.0.33", + "@langchain/community": "^0.2.4", + "@langchain/core": "0.2.3", + "@langchain/langgraph": "^0.0.21", + "@langchain/openai": "^0.0.34", "@loaders.gl/core": "^3.4.7", "@loaders.gl/json": "^3.4.7", "@loaders.gl/shapefile": "^3.4.7", @@ -939,10 +939,10 @@ "@reduxjs/toolkit": "1.9.7", "@slack/webhook": "^7.0.1", "@smithy/eventstream-codec": "^3.0.0", - "@smithy/eventstream-serde-node": "^2.1.1", + "@smithy/eventstream-serde-node": "^3.0.0", "@smithy/protocol-http": "^4.0.0", "@smithy/signature-v4": "^3.0.0", - "@smithy/types": "^2.9.1", + "@smithy/types": "^3.0.0", "@smithy/util-utf8": "^3.0.0", "@tanstack/react-query": "^4.29.12", "@tanstack/react-query-devtools": "^4.29.12", @@ -1058,8 +1058,8 @@ "jsonwebtoken": "^9.0.2", "jsts": "^1.6.2", "kea": "^2.6.0", - "langchain": "^0.2.2", - "langsmith": "^0.1.28", + "langchain": "0.2.3", + "langsmith": "^0.1.30", "launchdarkly-js-client-sdk": "^3.1.4", "launchdarkly-node-server-sdk": "^7.0.3", "load-json-file": "^6.2.0", diff --git a/x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts b/x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts new file mode 100644 index 0000000000000..74582271fda9e --- /dev/null +++ b/x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts @@ -0,0 +1,132 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { + BedrockChat as _BedrockChat, + convertMessagesToPromptAnthropic, +} from '@langchain/community/chat_models/bedrock/web'; +import { AIMessage, BaseMessage } from '@langchain/core/messages'; +import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; +import { ChatResult, ChatGenerationChunk } from '@langchain/core/outputs'; +import { PluginStartContract } from '@kbn/actions-plugin/server/plugin'; +import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; +import { BaseBedrockInput, BedrockLLMInputOutputAdapter } from './utils_bedrock'; + +export class BedrockChat extends _BedrockChat { + actionsClient: Awaited>; + connectorId: string; + + constructor({ + actionsClient, + connectorId, + ...params + }: { + actionsClient: Awaited>; + connectorId: string; + } & Partial & + BaseChatModelParams) { + // Just to make Langchain BedrockChat happy + super({ ...params, credentials: { accessKeyId: '', secretAccessKey: '' } }); + + this.actionsClient = actionsClient; + this.connectorId = connectorId; + } + + async _generate( + messages: BaseMessage[], + options: this['ParsedCallOptions'], + runManager?: CallbackManagerForLLMRun + ): Promise { + const service = 'bedrock-runtime'; + const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`; + const provider = this.model.split('.')[0]; + if (this.streaming) { + const stream = this._streamResponseChunks(messages, options, runManager); + let finalResult: ChatGenerationChunk | undefined; + for await (const chunk of stream) { + if (finalResult === undefined) { + finalResult = chunk; + } else { + finalResult = finalResult.concat(chunk); + } + } + if (finalResult === undefined) { + throw new Error('Could not parse final output from Bedrock streaming call.'); + } + return { + generations: [finalResult], + llmOutput: finalResult.generationInfo, + }; + } + + const response = await this._signedFetch(messages, options, { + bedrockMethod: 'invoke', + endpointHost, + provider, + }); + const json = await response.data.json(); + if (response.status !== 'ok') { + throw new Error(`Error ${response.status}: ${json.message ?? JSON.stringify(json)}`); + } + if (this.usesMessagesApi) { + const outputGeneration = BedrockLLMInputOutputAdapter.prepareMessagesOutput(provider, json); + if (outputGeneration === undefined) { + throw new Error('Failed to parse output generation.'); + } + return { + generations: [outputGeneration], + llmOutput: outputGeneration.generationInfo, + }; + } else { + const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, json); + return { generations: [{ text, message: new AIMessage(text) }] }; + } + } + + async _signedFetch( + messages: BaseMessage[], + options: this['ParsedCallOptions'], + fields: { + bedrockMethod: 'invoke' | 'invoke-with-response-stream'; + endpointHost: string; + provider: string; + } + ) { + const { bedrockMethod, endpointHost, provider } = fields; + const inputBody = this.usesMessagesApi + ? BedrockLLMInputOutputAdapter.prepareMessagesInput( + provider, + messages, + this.maxTokens, + this.temperature, + options.stop ?? this.stopSequences, + this.modelKwargs + ) + : BedrockLLMInputOutputAdapter.prepareInput( + provider, + convertMessagesToPromptAnthropic(messages), + this.maxTokens, + this.temperature, + options.stop ?? this.stopSequences, + this.modelKwargs, + fields.bedrockMethod + ); + + return this.actionsClient.execute({ + actionId: this.connectorId, + params: { + subAction: 'runApiRaw', + subActionParams: { + bedrockMethod, + model: this.model, + endpointHost, + body: JSON.stringify(inputBody), + }, + }, + }) as unknown as Promise; + } +} diff --git a/x-pack/packages/kbn-langchain/server/language_models/index.ts b/x-pack/packages/kbn-langchain/server/language_models/index.ts index fcde4156e0d02..ac42bd2f81c2c 100644 --- a/x-pack/packages/kbn-langchain/server/language_models/index.ts +++ b/x-pack/packages/kbn-langchain/server/language_models/index.ts @@ -8,3 +8,4 @@ export { ActionsClientChatOpenAI } from './chat_openai'; export { ActionsClientLlm } from './llm'; export { ActionsClientSimpleChatModel } from './simple_chat_model'; +export { BedrockChat } from './bedrock_chat_model'; diff --git a/x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts b/x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts new file mode 100644 index 0000000000000..c14b94809931f --- /dev/null +++ b/x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts @@ -0,0 +1,491 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import type { AwsCredentialIdentity, Provider } from '@aws-sdk/types'; +import { AIMessage, AIMessageChunk, BaseMessage } from '@langchain/core/messages'; +import { ChatGeneration, ChatGenerationChunk } from '@langchain/core/outputs'; + +export type CredentialType = AwsCredentialIdentity | Provider; + +function _formatImage(imageUrl: string) { + const regex = /^data:(image\/.+);base64,(.+)$/; + const match = imageUrl.match(regex); + if (match === null) { + throw new Error( + [ + 'Anthropic only supports base64-encoded images currently.', + 'Example: data:image/png;base64,/9j/4AAQSk...', + ].join('\n\n') + ); + } + return { + type: 'base64', + media_type: match[1] ?? '', + data: match[2] ?? '', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any; +} + +function formatMessagesForAnthropic(messages: BaseMessage[]): { + system?: string; + messages: Array>; +} { + let system: string | undefined; + if (messages.length > 0 && messages[0]._getType() === 'system') { + if (typeof messages[0].content !== 'string') { + throw new Error('System message content must be a string.'); + } + system = messages[0].content; + } + const conversationMessages = system !== undefined ? messages.slice(1) : messages; + const formattedMessages = conversationMessages.map((message) => { + let role; + if (message._getType() === 'human') { + role = 'user' as const; + } else if (message._getType() === 'ai') { + role = 'assistant' as const; + } else if (message._getType() === 'system') { + throw new Error('System messages are only permitted as the first passed message.'); + } else { + throw new Error(`Message type "${message._getType()}" is not supported.`); + } + if (typeof message.content === 'string') { + return { + role, + content: message.content, + }; + } else { + return { + role, + content: message.content.map((contentPart) => { + if (contentPart.type === 'image_url') { + let source; + if (typeof contentPart.image_url === 'string') { + source = _formatImage(contentPart.image_url); + } else { + source = _formatImage(contentPart.image_url.url); + } + return { + type: 'image' as const, + source, + }; + } else { + return contentPart; + } + }), + }; + } + }); + return { + messages: formattedMessages, + system, + }; +} + +/** + * format messages for Cohere Command-R and CommandR+ via AWS Bedrock. + * + * @param messages messages The base messages to format as a prompt. + * + * @returns The formatted prompt for Cohere. + * + * `system`: user system prompts. Overrides the default preamble for search query generation. Has no effect on tool use generations.\ + * `message`: (Required) Text input for the model to respond to.\ + * `chatHistory`: A list of previous messages between the user and the model, meant to give the model conversational context for responding to the user's message.\ + * The following are required fields. + * - `role` - The role for the message. Valid values are USER or CHATBOT.\ + * - `message` – Text contents of the message.\ + * + * The following is example JSON for the chat_history field.\ + * "chat_history": [ + * {"role": "USER", "message": "Who discovered gravity?"}, + * {"role": "CHATBOT", "message": "The man who is widely credited with discovering gravity is Sir Isaac Newton"}]\ + * + * docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-cohere-command-r-plus.html + */ +function formatMessagesForCohere(messages: BaseMessage[]): { + system?: string; + message: string; + chatHistory: Array>; +} { + const systemMessages = messages.filter((system) => system._getType() === 'system'); + + const system = systemMessages + .filter((m) => typeof m.content === 'string') + .map((m) => m.content) + .join('\n\n'); + + const conversationMessages = messages.filter((message) => message._getType() !== 'system'); + + const questionContent = conversationMessages.slice(-1); + + if (!questionContent.length || questionContent[0]._getType() !== 'human') { + throw new Error('question message content must be a human message.'); + } + + if (typeof questionContent[0].content !== 'string') { + throw new Error('question message content must be a string.'); + } + + const formattedMessage = questionContent[0].content; + + const formattedChatHistories = conversationMessages.slice(0, -1).map((message) => { + let role; + switch (message._getType()) { + case 'human': + role = 'USER' as const; + break; + case 'ai': + role = 'CHATBOT' as const; + break; + case 'system': + throw new Error('chat_history can not include system prompts.'); + default: + throw new Error(`Message type "${message._getType()}" is not supported.`); + } + + if (typeof message.content !== 'string') { + throw new Error('message content must be a string.'); + } + return { + role, + message: message.content, + }; + }); + + return { + chatHistory: formattedChatHistories, + message: formattedMessage, + system, + }; +} + +/** Bedrock models. + To authenticate, the AWS client uses the following methods to automatically load credentials: + https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html + If a specific credential profile should be used, you must pass the name of the profile from the ~/.aws/credentials file that is to be used. + Make sure the credentials / roles used have the required policies to access the Bedrock service. +*/ +export interface BaseBedrockInput { + /** Model to use. + For example, "amazon.titan-tg1-large", this is equivalent to the modelId property in the list-foundation-models api. + */ + model: string; + + /** The AWS region e.g. `us-west-2`. + Fallback to AWS_DEFAULT_REGION env variable or region specified in ~/.aws/config in case it is not provided here. + */ + region?: string; + + /** AWS Credentials. + If no credentials are provided, the default credentials from `@aws-sdk/credential-provider-node` will be used. + */ + credentials?: CredentialType; + + /** Temperature. */ + temperature?: number; + + /** Max tokens. */ + maxTokens?: number; + + /** A custom fetch function for low-level access to AWS API. Defaults to fetch(). */ + fetchFn?: typeof fetch; + + /** @deprecated Use endpointHost instead Override the default endpoint url. */ + endpointUrl?: string; + + /** Override the default endpoint hostname. */ + endpointHost?: string; + + /** + * Optional additional stop sequences to pass to the model. Currently only supported for Anthropic and AI21. + * @deprecated Use .bind({ "stop": [...] }) instead + * */ + stopSequences?: string[]; + + /** Additional kwargs to pass to the model. */ + modelKwargs?: Record; + + /** Whether or not to stream responses */ + streaming: boolean; +} + +interface Dict { + [key: string]: unknown; +} + +/** + * A helper class used within the `Bedrock` class. It is responsible for + * preparing the input and output for the Bedrock service. It formats the + * input prompt based on the provider (e.g., "anthropic", "ai21", + * "amazon") and extracts the generated text from the service response. + */ +export class BedrockLLMInputOutputAdapter { + /** Adapter class to prepare the inputs from Langchain to a format + that LLM model expects. Also, provides a helper function to extract + the generated text from the model response. */ + + static prepareInput( + provider: string, + prompt: string, + maxTokens = 50, + temperature = 0, + stopSequences: string[] | undefined = undefined, + modelKwargs: Record = {}, + bedrockMethod: 'invoke' | 'invoke-with-response-stream' = 'invoke' + ): Dict { + const inputBody: Dict = {}; + + if (provider === 'anthropic') { + inputBody.prompt = prompt; + inputBody.max_tokens_to_sample = maxTokens; + inputBody.temperature = temperature; + inputBody.stop_sequences = stopSequences; + } else if (provider === 'ai21') { + inputBody.prompt = prompt; + inputBody.maxTokens = maxTokens; + inputBody.temperature = temperature; + inputBody.stopSequences = stopSequences; + } else if (provider === 'meta') { + inputBody.prompt = prompt; + inputBody.max_gen_len = maxTokens; + inputBody.temperature = temperature; + } else if (provider === 'amazon') { + inputBody.inputText = prompt; + inputBody.textGenerationConfig = { + maxTokenCount: maxTokens, + temperature, + }; + } else if (provider === 'cohere') { + inputBody.prompt = prompt; + inputBody.max_tokens = maxTokens; + inputBody.temperature = temperature; + inputBody.stop_sequences = stopSequences; + if (bedrockMethod === 'invoke-with-response-stream') { + inputBody.stream = true; + } + } else if (provider === 'mistral') { + inputBody.prompt = prompt; + inputBody.max_tokens = maxTokens; + inputBody.temperature = temperature; + inputBody.stop = stopSequences; + } + return { ...inputBody, ...modelKwargs }; + } + + static prepareMessagesInput( + provider: string, + messages: BaseMessage[], + maxTokens = 1024, + temperature = 0, + stopSequences: string[] | undefined = undefined, + modelKwargs: Record = {} + ): Dict { + const inputBody: Dict = {}; + + if (provider === 'anthropic') { + const { system, messages: formattedMessages } = formatMessagesForAnthropic(messages); + if (system !== undefined) { + inputBody.system = system; + } + inputBody.anthropic_version = 'bedrock-2023-05-31'; + inputBody.messages = formattedMessages; + inputBody.max_tokens = maxTokens; + inputBody.temperature = temperature; + inputBody.stop_sequences = stopSequences; + return { ...inputBody, ...modelKwargs }; + } else if (provider === 'cohere') { + const { + system, + message: formattedMessage, + chatHistory: formattedChatHistories, + } = formatMessagesForCohere(messages); + + if (system !== undefined && system.length > 0) { + inputBody.preamble = system; + } + inputBody.message = formattedMessage; + inputBody.chat_history = formattedChatHistories; + inputBody.max_tokens = maxTokens; + inputBody.temperature = temperature; + inputBody.stop_sequences = stopSequences; + return { ...inputBody, ...modelKwargs }; + } else { + throw new Error('The messages API is currently only supported by Anthropic or Cohere'); + } + } + + /** + * Extracts the generated text from the service response. + * @param provider The provider name. + * @param responseBody The response body from the service. + * @returns The generated text. + */ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + static prepareOutput(provider: string, responseBody: any): string { + if (provider === 'anthropic') { + return responseBody.completion; + } else if (provider === 'ai21') { + return responseBody?.completions?.[0]?.data?.text ?? ''; + } else if (provider === 'cohere') { + return responseBody?.generations?.[0]?.text ?? responseBody?.text ?? ''; + } else if (provider === 'meta') { + return responseBody.generation; + } else if (provider === 'mistral') { + return responseBody?.outputs?.[0]?.text; + } + + // I haven't been able to get a response with more than one result in it. + return responseBody.results?.[0]?.outputText; + } + + static prepareMessagesOutput( + provider: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + response: any + ): ChatGeneration | undefined { + const responseBody = response ?? {}; + if (provider === 'anthropic') { + if (responseBody.type === 'message_start') { + return parseMessage(responseBody.message, true); + } else if ( + responseBody.type === 'content_block_delta' && + responseBody.delta?.type === 'text_delta' && + typeof responseBody.delta?.text === 'string' + ) { + return new ChatGenerationChunk({ + message: new AIMessageChunk({ + content: responseBody.delta.text, + }), + text: responseBody.delta.text, + }); + } else if (responseBody.type === 'message_delta') { + return new ChatGenerationChunk({ + message: new AIMessageChunk({ content: '' }), + text: '', + generationInfo: { + ...responseBody.delta, + usage: responseBody.usage, + }, + }); + } else if ( + responseBody.type === 'message_stop' && + responseBody['amazon-bedrock-invocationMetrics'] !== undefined + ) { + return new ChatGenerationChunk({ + message: new AIMessageChunk({ content: '' }), + text: '', + generationInfo: { + 'amazon-bedrock-invocationMetrics': responseBody['amazon-bedrock-invocationMetrics'], + }, + }); + } else if (responseBody.type === 'message') { + return parseMessage(responseBody); + } else { + return undefined; + } + } else if (provider === 'cohere') { + if (responseBody.event_type === 'stream-start') { + return parseMessageCohere(responseBody.message, true); + } else if ( + responseBody.event_type === 'text-generation' && + typeof responseBody?.text === 'string' + ) { + return new ChatGenerationChunk({ + message: new AIMessageChunk({ + content: responseBody.text, + }), + text: responseBody.text, + }); + } else if (responseBody.event_type === 'search-queries-generation') { + return parseMessageCohere(responseBody); + } else if ( + responseBody.event_type === 'stream-end' && + responseBody.response !== undefined && + responseBody['amazon-bedrock-invocationMetrics'] !== undefined + ) { + return new ChatGenerationChunk({ + message: new AIMessageChunk({ content: '' }), + text: '', + generationInfo: { + response: responseBody.response, + 'amazon-bedrock-invocationMetrics': responseBody['amazon-bedrock-invocationMetrics'], + }, + }); + } else { + if ( + responseBody.finish_reason === 'COMPLETE' || + responseBody.finish_reason === 'MAX_TOKENS' + ) { + return parseMessageCohere(responseBody); + } else { + return undefined; + } + } + } else { + throw new Error('The messages API is currently only supported by Anthropic or Cohere.'); + } + } +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function parseMessage(responseBody: any, asChunk?: boolean): ChatGeneration { + const { content, id, ...generationInfo } = responseBody; + let parsedContent; + if (Array.isArray(content) && content.length === 1 && content[0].type === 'text') { + parsedContent = content[0].text; + } else if (Array.isArray(content) && content.length === 0) { + parsedContent = ''; + } else { + parsedContent = content; + } + if (asChunk) { + return new ChatGenerationChunk({ + message: new AIMessageChunk({ + content: parsedContent, + additional_kwargs: { id }, + }), + text: typeof parsedContent === 'string' ? parsedContent : '', + generationInfo, + }); + } else { + return { + message: new AIMessage({ + content: parsedContent, + additional_kwargs: { id }, + }), + text: typeof parsedContent === 'string' ? parsedContent : '', + generationInfo, + }; + } +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function parseMessageCohere(responseBody: any, asChunk?: boolean): ChatGeneration { + const { text, ...generationInfo } = responseBody; + let parsedContent = text; + if (typeof text !== 'string') { + parsedContent = ''; + } + if (asChunk) { + return new ChatGenerationChunk({ + message: new AIMessageChunk({ + content: parsedContent, + }), + text: parsedContent, + generationInfo, + }); + } else { + return { + message: new AIMessage({ + content: parsedContent, + }), + text: parsedContent, + generationInfo, + }; + } +} diff --git a/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts b/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts index 19cc7e90d6254..c5a6477d03e80 100644 --- a/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts +++ b/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts @@ -162,7 +162,7 @@ export abstract class SubActionConnector { timeout, }); - this.validateResponse(responseSchema, res.data); + // this.validateResponse(responseSchema, res.data); return res; } catch (error) { diff --git a/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts b/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts index 2d53106bacf13..981e0cb61de81 100644 --- a/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts +++ b/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts @@ -5,6 +5,8 @@ * 2.0. */ +/* eslint-disable complexity */ + import { IRouter, Logger } from '@kbn/core/server'; import { transformError } from '@kbn/securitysolution-es-utils'; import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; diff --git a/x-pack/plugins/integration_assistant/kibana.jsonc b/x-pack/plugins/integration_assistant/kibana.jsonc index 55443d71b7f09..619448acaac8d 100644 --- a/x-pack/plugins/integration_assistant/kibana.jsonc +++ b/x-pack/plugins/integration_assistant/kibana.jsonc @@ -12,6 +12,7 @@ "integration_assistant" ], "requiredPlugins": [ + "actions", "licensing", "management", "features", diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts index 130de2094d166..e153bafb7358a 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts @@ -5,16 +5,14 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { getModel } from '../../providers/bedrock'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { Pipeline } from '../../../common'; import { CATEGORIZATION_MAIN_PROMPT } from './prompts'; -export async function handleCategorization(state: CategorizationState) { +export async function handleCategorization(state: CategorizationState, model: BedrockChat) { const categorizationMainPrompt = CATEGORIZATION_MAIN_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const categorizationMainGraph = categorizationMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts index 695ce727ed58b..5a5114d63d3d0 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts @@ -5,15 +5,14 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { CATEGORIZATION_ERROR_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { Pipeline } from '../../../common'; -export async function handleErrors(state: CategorizationState) { +export async function handleErrors(state: CategorizationState, model: BedrockChat) { const categorizationErrorPrompt = CATEGORIZATION_ERROR_PROMPT; - const model = getModel(); const outputParser = new JsonOutputParser(); const categorizationErrorGraph = categorizationErrorPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index e83162b37abac..429eb3f387d28 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -141,16 +141,16 @@ function chainRouter(state: CategorizationState): string { return END; } -export async function getCategorizationGraph() { +export async function getCategorizationGraph(model) { const workflow = new StateGraph({ channels: graphState, }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) - .addNode('handleCategorization', handleCategorization) + .addNode('handleCategorization', (state) => handleCategorization(state, model)) .addNode('handleValidatePipeline', handleValidatePipeline) .addNode('handleCategorizationValidation', handleCategorizationValidation) - .addNode('handleInvalidCategorization', handleInvalidCategorization) + .addNode('handleInvalidCategorization', (state) => handleInvalidCategorization(state, model)) .addNode('handleErrors', handleErrors) .addNode('handleReview', handleReview) .addEdge(START, 'modelInput') diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts index 14063eb7064e4..f8ca982ac3ae9 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -6,15 +6,13 @@ */ import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; import { Pipeline } from '../../../common'; -export async function handleInvalidCategorization(state: CategorizationState) { +export async function handleInvalidCategorization(state: CategorizationState, model) { const categorizationInvalidPrompt = CATEGORIZATION_VALIDATION_PROMPT; - const model = getModel(); const outputParser = new JsonOutputParser(); const categorizationInvalidGraph = categorizationInvalidPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts index 9c6150dfcb381..77c73f882867c 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts @@ -5,17 +5,16 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { CATEGORIZATION_REVIEW_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; + import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; import { Pipeline } from '../../../common'; -export async function handleReview(state: CategorizationState) { +export async function handleReview(state: CategorizationState, model: BedrockChat) { const categorizationReviewPrompt = CATEGORIZATION_REVIEW_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const categorizationReview = categorizationReviewPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts index 73393114f043c..ce4dbb6322462 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts @@ -5,14 +5,12 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_DUPLICATES_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { EcsMappingState } from '../../types'; -export async function handleDuplicates(state: EcsMappingState) { +export async function handleDuplicates(state: EcsMappingState, model: BedrockChat) { const ecsDuplicatesPrompt = ECS_DUPLICATES_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const ecsDuplicatesGraph = ecsDuplicatesPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 11ebccec9f83f..8bbdc429f8115 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -6,6 +6,7 @@ */ import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_EXAMPLE_ANSWER, ECS_FIELDS } from './constants'; import { modifySamples, mergeSamples } from '../../util/samples'; import { createPipeline } from './pipeline'; @@ -135,17 +136,17 @@ function chainRouter(state: EcsMappingState): string { return END; } -export async function getEcsGraph() { +export async function getEcsGraph(model: BedrockChat) { const workflow = new StateGraph({ channels: graphState, }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) - .addNode('handleEcsMapping', handleEcsMapping) + .addNode('handleEcsMapping', (state) => handleEcsMapping(state, model)) .addNode('handleValidation', handleValidateMappings) - .addNode('handleDuplicates', handleDuplicates) - .addNode('handleMissingKeys', handleMissingKeys) - .addNode('handleInvalidEcs', handleInvalidEcs) + .addNode('handleDuplicates', (state) => handleDuplicates(state, model)) + .addNode('handleMissingKeys', (state) => handleMissingKeys(state, model)) + .addNode('handleInvalidEcs', (state) => handleInvalidEcs(state, model)) .addEdge(START, 'modelInput') .addEdge('modelOutput', END) .addEdge('handleEcsMapping', 'handleValidation') diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts index 2abb87d35b58b..2e41f4a0bf01f 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts @@ -5,14 +5,12 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_INVALID_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { EcsMappingState } from '../../types'; -export async function handleInvalidEcs(state: EcsMappingState) { +export async function handleInvalidEcs(state: EcsMappingState, model: BedrockChat) { const ecsInvalidEcsPrompt = ECS_INVALID_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const ecsInvalidEcsGraph = ecsInvalidEcsPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts index 7df23d3d0e267..82c65013362e7 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts @@ -5,14 +5,12 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_MAIN_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { EcsMappingState } from '../../types'; -export async function handleEcsMapping(state: EcsMappingState) { +export async function handleEcsMapping(state: EcsMappingState, model: BedrockChat) { const ecsMainPrompt = ECS_MAIN_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const ecsMainGraph = ecsMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts index f390f28365deb..bfb83f94f9b0d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts @@ -5,14 +5,12 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_MISSING_KEYS_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { EcsMappingState } from '../../types'; -export async function handleMissingKeys(state: EcsMappingState) { +export async function handleMissingKeys(state: EcsMappingState, model: BedrockChat) { const ecsMissingPrompt = ECS_MISSING_KEYS_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const ecsMissingGraph = ecsMissingPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts index 097df08ba8387..74694ebdb0a31 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts @@ -5,16 +5,14 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { RELATED_ERROR_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { Pipeline } from '../../../common'; -export async function handleErrors(state: RelatedState) { +export async function handleErrors(state: RelatedState, model: BedrockChat) { const relatedErrorPrompt = RELATED_ERROR_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const relatedErrorGraph = relatedErrorPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 96d1a01e9fb1f..a2064360585d7 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -5,6 +5,7 @@ * 2.0. */ import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { RelatedState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; import { handleValidatePipeline } from '../../util/graph'; @@ -134,14 +135,14 @@ function chainRouter(state: RelatedState): string { return END; } -export async function getRelatedGraph() { +export async function getRelatedGraph(model: BedrockChat) { const workflow = new StateGraph({ channels: graphState }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) - .addNode('handleRelated', handleRelated) + .addNode('handleRelated', (state) => handleRelated(state, model)) .addNode('handleValidatePipeline', handleValidatePipeline) - .addNode('handleErrors', handleErrors) - .addNode('handleReview', handleReview) + .addNode('handleErrors', (state) => handleErrors(state, model)) + .addNode('handleReview', (state) => handleReview(state, model)) .addEdge(START, 'modelInput') .addEdge('modelOutput', END) .addEdge('handleRelated', 'handleValidatePipeline') diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts index b93597064bbd9..2f5b131c5c95e 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts @@ -5,16 +5,14 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { RELATED_MAIN_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { Pipeline } from '../../../common'; -export async function handleRelated(state: RelatedState) { +export async function handleRelated(state: RelatedState, model: BedrockChat) { const relatedMainPrompt = RELATED_MAIN_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const relatedMainGraph = relatedMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts index d50acefa694fa..9ddd9dde8052e 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts @@ -5,16 +5,14 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { RELATED_REVIEW_PROMPT } from './prompts'; -import { getModel } from '../../providers/bedrock'; import { RelatedState } from '../../types'; import { combineProcessors } from '../../util/pipeline'; import { Pipeline } from '../../../common'; -export async function handleReview(state: RelatedState) { +export async function handleReview(state: RelatedState, model: BedrockChat) { const relatedReviewPrompt = RELATED_REVIEW_PROMPT; - const model = getModel(); - const outputParser = new JsonOutputParser(); const relatedReviewGraph = relatedReviewPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/plugin.ts b/x-pack/plugins/integration_assistant/server/plugin.ts index 7e36f759ff141..0a67c89813790 100644 --- a/x-pack/plugins/integration_assistant/server/plugin.ts +++ b/x-pack/plugins/integration_assistant/server/plugin.ts @@ -5,10 +5,26 @@ * 2.0. */ -import { Plugin, PluginInitializerContext, CoreSetup, CoreStart, Logger } from '@kbn/core/server'; +import { + Plugin, + PluginInitializerContext, + CoreSetup, + CoreStart, + Logger, + CustomRequestHandlerContext, +} from '@kbn/core/server'; +import { PluginStartContract as ActionsPluginsStart } from '@kbn/actions-plugin/server/plugin'; import { registerRoutes } from './routes'; import { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; +export type IntegrationAssistantRouteHandlerContext = CustomRequestHandlerContext<{ + integrationAssistant: { + getStartServices: CoreSetup<{ + actions: ActionsPluginsStart; + }>['getStartServices']; + }; +}>; + export class IntegrationAssistantPlugin implements Plugin { @@ -17,9 +33,20 @@ export class IntegrationAssistantPlugin constructor(initializerContext: PluginInitializerContext) { this.logger = initializerContext.logger.get(); } - public setup(core: CoreSetup) { + public setup( + core: CoreSetup<{ + actions: ActionsPluginsStart; + }> + ) { + core.http.registerRouteHandlerContext< + IntegrationAssistantRouteHandlerContext, + 'integrationAssistant' + >('integrationAssistant', () => ({ + getStartServices: core.getStartServices, + })); const router = core.http.createRouter(); this.logger.debug('integrationAssistant api: Setup'); + registerRoutes(router); return {}; diff --git a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts b/x-pack/plugins/integration_assistant/server/providers/bedrock.ts deleted file mode 100644 index 7b5590d4a3041..0000000000000 --- a/x-pack/plugins/integration_assistant/server/providers/bedrock.ts +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -import { BedrockChat } from '@langchain/community/chat_models/bedrock/web'; - -// TODO: This function is here temporarily during development, it is supposed to be replaced with the same connector used by Security Assistant. -export function getModel(): BedrockChat { - const model = new BedrockChat({ - model: 'anthropic.claude-3-opus-20240229-v1:0', - region: 'us-west-2', - temperature: 0.05, - maxTokens: 4096, - credentials: { - accessKeyId: process.env.AWS_ACCESS_KEY_ID || '', - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '', - }, - modelKwargs: { - top_k: 200, - temperature: 0.05, - top_p: 0.4, - stop_sequences: ['Human:'], - }, - }); - return model; -} diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index a469355c9dc56..d60fefb02b810 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -7,12 +7,16 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { CATEGORIZATION_GRAPH_PATH } from '../../common'; import type { CategorizationApiRequest, CategorizationApiResponse } from '../../common'; import { getCategorizationGraph } from '../graphs/categorization'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; +import { IntegrationAssistantRouteHandlerContext } from '../plugin'; -export function registerCategorizationRoutes(router: IRouter) { +export function registerCategorizationRoutes( + router: IRouter +) { router.post( { path: `${CATEGORIZATION_GRAPH_PATH}`, @@ -27,13 +31,41 @@ export function registerCategorizationRoutes(router: IRouter) { dataStreamName: schema.string(), rawSamples: schema.arrayOf(schema.string()), currentPipeline: schema.any(), + connectorId: schema.maybe(schema.string()), + model: schema.maybe(schema.string()), + region: schema.maybe(schema.string()), }), }, }, - async (_, req, res) => { + async (context, req, res) => { const { packageName, dataStreamName, rawSamples, currentPipeline } = req.body as CategorizationApiRequest; - const graph = await getCategorizationGraph(); + + const { getStartServices } = await context.integrationAssistant; + const [, { actions: actionsPlugin }] = await getStartServices(); + const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); + const connector = req.body.connectorId + ? await actionsClient.get({ id: req.body.connectorId }) + : (await actionsClient.getAll()).filter( + (connectorItem) => connectorItem.actionTypeId === '.bedrock' + )[0]; + + const model = new BedrockChat({ + actionsClient, + connectorId: connector.id, + model: req.body.model || connector.config?.defaultModel, + region: req.body.region || connector.config?.apiUrl.split('.')[1], + temperature: 0.05, + maxTokens: 4096, + modelKwargs: { + top_k: 200, + temperature: 0.05, + top_p: 0.4, + stop_sequences: ['Human:'], + }, + }); + + const graph = await getCategorizationGraph(model); let results = { results: { docs: {}, pipeline: {} } }; try { results = (await graph.invoke({ @@ -43,8 +75,9 @@ export function registerCategorizationRoutes(router: IRouter) { currentPipeline, })) as CategorizationApiResponse; } catch (e) { - // TODO: Better error responses? - return e; + return res.badRequest({ + body: e, + }); } return res.ok({ body: results }); diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts index 6d289e5e53397..a07ce6c3b6d55 100644 --- a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -7,12 +7,14 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_GRAPH_PATH } from '../../common'; import { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common/types'; import { getEcsGraph } from '../graphs/ecs'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; +import { IntegrationAssistantRouteHandlerContext } from '../plugin'; -export function registerEcsRoutes(router: IRouter) { +export function registerEcsRoutes(router: IRouter) { router.post( { path: `${ECS_GRAPH_PATH}`, @@ -28,12 +30,40 @@ export function registerEcsRoutes(router: IRouter) { rawSamples: schema.arrayOf(schema.string()), // TODO: This is a single nested object of any key or shape, any better schema? mapping: schema.maybe(schema.any()), + connectorId: schema.maybe(schema.string()), + region: schema.maybe(schema.string()), + model: schema.maybe(schema.string()), }), }, }, - async (_, req, res) => { + async (context, req, res) => { const { packageName, dataStreamName, rawSamples, mapping } = req.body as EcsMappingApiRequest; - const graph = await getEcsGraph(); + + const { getStartServices } = await context.integrationAssistant; + const [, { actions: actionsPlugin }] = await getStartServices(); + const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); + const connector = req.body.connectorId + ? await actionsClient.get({ id: req.body.connectorId }) + : (await actionsClient.getAll()).filter( + (connectorItem) => connectorItem.actionTypeId === '.bedrock' + )[0]; + + const model = new BedrockChat({ + actionsClient, + connectorId: connector.id, + model: req.body.model || connector.config?.defaultModel, + region: req.body.region || connector.config?.apiUrl.split('.')[1], + temperature: 0.05, + maxTokens: 4096, + modelKwargs: { + top_k: 200, + temperature: 0.05, + top_p: 0.4, + stop_sequences: ['Human:'], + }, + }); + + const graph = await getEcsGraph(model); let results = { results: { mapping: {}, pipeline: {} } }; try { if (req.body?.mapping) { @@ -50,8 +80,9 @@ export function registerEcsRoutes(router: IRouter) { rawSamples, })) as EcsMappingApiResponse; } catch (e) { - // TODO: Better error responses? - return e; + return res.badRequest({ + body: e, + }); } return res.ok({ body: results }); diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index b439478f23a3c..c8354b6412bc9 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -7,12 +7,14 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { RELATED_GRAPH_PATH } from '../../common'; import { RelatedApiRequest, RelatedApiResponse } from '../../common/types'; import { getRelatedGraph } from '../graphs/related'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; +import { IntegrationAssistantRouteHandlerContext } from '../plugin'; -export function registerRelatedRoutes(router: IRouter) { +export function registerRelatedRoutes(router: IRouter) { router.post( { path: `${RELATED_GRAPH_PATH}`, @@ -28,13 +30,41 @@ export function registerRelatedRoutes(router: IRouter) { rawSamples: schema.arrayOf(schema.string()), // TODO: This is a single nested object of any key or shape, any better schema? currentPipeline: schema.maybe(schema.any()), + connectorId: schema.maybe(schema.string()), + region: schema.maybe(schema.string()), + model: schema.maybe(schema.string()), }), }, }, - async (_, req, res) => { + async (context, req, res) => { const { packageName, dataStreamName, rawSamples, currentPipeline } = req.body as RelatedApiRequest; - const graph = await getRelatedGraph(); + + const { getStartServices } = await context.integrationAssistant; + const [, { actions: actionsPlugin }] = await getStartServices(); + const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); + const connector = req.body.connectorId + ? await actionsClient.get({ id: req.body.connectorId }) + : (await actionsClient.getAll()).filter( + (connectorItem) => connectorItem.actionTypeId === '.bedrock' + )[0]; + + const model = new BedrockChat({ + actionsClient, + connectorId: connector.id, + model: req.body.model || connector.config?.defaultModel, + region: req.body.region || connector.config?.apiUrl.split('.')[1], + temperature: 0.05, + maxTokens: 4096, + modelKwargs: { + top_k: 200, + temperature: 0.05, + top_p: 0.4, + stop_sequences: ['Human:'], + }, + }); + + const graph = await getRelatedGraph(model); let results = { results: { docs: {}, pipeline: {} } }; try { results = (await graph.invoke({ @@ -44,8 +74,9 @@ export function registerRelatedRoutes(router: IRouter) { currentPipeline, })) as RelatedApiResponse; } catch (e) { - // TODO: Better error responses? - return e; + return res.badRequest({ + body: e, + }); } return res.ok({ body: results }); diff --git a/x-pack/plugins/stack_connectors/common/bedrock/constants.ts b/x-pack/plugins/stack_connectors/common/bedrock/constants.ts index 053ca82e0e274..81a4f8bf3aa83 100644 --- a/x-pack/plugins/stack_connectors/common/bedrock/constants.ts +++ b/x-pack/plugins/stack_connectors/common/bedrock/constants.ts @@ -16,6 +16,7 @@ export const BEDROCK_TITLE = i18n.translate( export const BEDROCK_CONNECTOR_ID = '.bedrock'; export enum SUB_ACTION { RUN = 'run', + RUN_RAW = 'runApiRaw', INVOKE_AI = 'invokeAI', INVOKE_STREAM = 'invokeStream', DASHBOARD = 'getDashboard', diff --git a/x-pack/plugins/stack_connectors/common/bedrock/schema.ts b/x-pack/plugins/stack_connectors/common/bedrock/schema.ts index 2fade1be5fc40..aa1100ba01657 100644 --- a/x-pack/plugins/stack_connectors/common/bedrock/schema.ts +++ b/x-pack/plugins/stack_connectors/common/bedrock/schema.ts @@ -19,6 +19,17 @@ export const SecretsSchema = schema.object({ secret: schema.string(), }); +export const RunRawActionParamsSchema = schema.object( + { + // body: schema.string(), + // model: schema.maybe(schema.string()), + // // abort signal from client + // signal: schema.maybe(schema.any()), + // timeout: schema.maybe(schema.number()), + }, + { unknowns: 'allow' } +); + export const RunActionParamsSchema = schema.object({ body: schema.string(), model: schema.maybe(schema.string()), diff --git a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts index 1a4b6aad6653e..66cbdf68be293 100644 --- a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts +++ b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts @@ -5,6 +5,10 @@ * 2.0. */ +import fetch from 'node-fetch'; +import { SignatureV4 } from '@smithy/signature-v4'; +import { HttpRequest } from '@smithy/protocol-http'; +import { Sha256 } from '@aws-crypto/sha256-js'; import { ServiceParams, SubActionConnector } from '@kbn/actions-plugin/server'; import aws from 'aws4'; import { AxiosError, Method } from 'axios'; @@ -18,6 +22,7 @@ import { StreamingResponseSchema, RunActionResponseSchema, RunApiLatestResponseSchema, + RunRawActionParamsSchema, } from '../../../common/bedrock/schema'; import { Config, @@ -78,6 +83,13 @@ export class BedrockConnector extends SubActionConnector { method: 'runApi', schema: RunActionParamsSchema, }); + + this.registerSubAction({ + name: SUB_ACTION.RUN_RAW, + method: 'runApiRaw', + schema: RunRawActionParamsSchema, + }); + this.registerSubAction({ name: SUB_ACTION.INVOKE_AI, method: 'invokeAI', @@ -202,6 +214,50 @@ The Kibana Connector in use may need to be reconfigured with an updated Amazon B }; } + public async runApiRaw({ + body, + bedrockMethod, + model, + signal, + timeout, + endpointHost, + }: RunActionParams): Promise { + const url = new URL(`https://${endpointHost}/model/${model}/${bedrockMethod}`); + + const request = new HttpRequest({ + hostname: url.hostname, + path: url.pathname, + protocol: url.protocol, + method: 'POST', // method must be uppercase + body, + query: Object.fromEntries(url.searchParams.entries()), + headers: { + // host is required by AWS Signature V4: https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html + host: url.host, + accept: 'application/json', + 'content-type': 'application/json', + }, + }); + + const signer = new SignatureV4({ + credentials: { + secretAccessKey: this.secrets.secret, + accessKeyId: this.secrets.accessKey, + }, + service: 'bedrock', + region: 'us-west-2', // this.region, + sha256: Sha256, + }); + + const signedRequest = await signer.sign(request); + + return fetch(url, { + headers: signedRequest.headers, + body: signedRequest.body, + method: signedRequest.method, + }); + } + /** * responsible for making a POST request to the external API endpoint and returning the response data * @param body The stringified request body to be sent in the POST request. diff --git a/x-pack/yarn.lock b/x-pack/yarn.lock deleted file mode 100644 index 81a53aa291fe3..0000000000000 --- a/x-pack/yarn.lock +++ /dev/null @@ -1,73 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@smithy/is-array-buffer@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz#9a95c2d46b8768946a9eec7f935feaddcffa5e7a" - integrity sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ== - dependencies: - tslib "^2.6.2" - -"@smithy/signature-v4@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-3.0.0.tgz#f536d0abebfeeca8e9aab846a4042658ca07d3b7" - integrity sha512-kXFOkNX+BQHe2qnLxpMEaCRGap9J6tUGLzc3A9jdn+nD4JdMwCKTJ+zFwQ20GkY+mAXGatyTw3HcoUlR39HwmA== - dependencies: - "@smithy/is-array-buffer" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-hex-encoding" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - "@smithy/util-uri-escape" "^3.0.0" - "@smithy/util-utf8" "^3.0.0" - tslib "^2.6.2" - -"@smithy/types@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/types/-/types-3.0.0.tgz#00231052945159c64ffd8b91e8909d8d3006cb7e" - integrity sha512-VvWuQk2RKFuOr98gFhjca7fkBS+xLLURT8bUjk5XQoV0ZLm7WPwWPPY3/AwzTLuUBDeoKDCthfe1AsTUWaSEhw== - dependencies: - tslib "^2.6.2" - -"@smithy/util-buffer-from@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz#559fc1c86138a89b2edaefc1e6677780c24594e3" - integrity sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA== - dependencies: - "@smithy/is-array-buffer" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-hex-encoding@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz#32938b33d5bf2a15796cd3f178a55b4155c535e6" - integrity sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ== - dependencies: - tslib "^2.6.2" - -"@smithy/util-middleware@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-3.0.0.tgz#64d775628b99a495ca83ce982f5c83aa45f1e894" - integrity sha512-q5ITdOnV2pXHSVDnKWrwgSNTDBAMHLptFE07ua/5Ty5WJ11bvr0vk2a7agu7qRhrCFRQlno5u3CneU5EELK+DQ== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-uri-escape@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz#e43358a78bf45d50bb736770077f0f09195b6f54" - integrity sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg== - dependencies: - tslib "^2.6.2" - -"@smithy/util-utf8@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-3.0.0.tgz#1a6a823d47cbec1fd6933e5fc87df975286d9d6a" - integrity sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA== - dependencies: - "@smithy/util-buffer-from" "^3.0.0" - tslib "^2.6.2" - -tslib@^2.6.2: - version "2.6.2" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" - integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== diff --git a/yarn.lock b/yarn.lock index 3587084a1b081..664986abdf625 100644 --- a/yarn.lock +++ b/yarn.lock @@ -363,7 +363,7 @@ "@smithy/types" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/types@3.577.0", "@aws-sdk/types@^3.577.0": +"@aws-sdk/types@3.577.0", "@aws-sdk/types@^3.222.0", "@aws-sdk/types@^3.577.0": version "3.577.0" resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.577.0.tgz#7700784d368ce386745f8c340d9d68cea4716f90" integrity sha512-FT2JZES3wBKN/alfmhlo+3ZOq/XJ0C7QOZcDNrpKjB0kqYoKjhVKZ/Hx6ArR0czkKfHzBBEs6y40ebIHx2nSmA== @@ -371,14 +371,6 @@ "@smithy/types" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/types@^3.222.0": - version "3.433.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.433.0.tgz#0f94eae2a4a3525ca872c9ab04e143c01806d755" - integrity sha512-0jEE2mSrNDd8VGFjTc1otYrwYPIkzZJEIK90ZxisKvQ/EURGBhNzWn7ejWB9XCMFT6XumYLBR0V9qq5UPisWtA== - dependencies: - "@smithy/types" "^2.4.0" - tslib "^2.5.0" - "@aws-sdk/util-endpoints@3.583.0": version "3.583.0" resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.583.0.tgz#1554d3b4124be21a72a519603e9727d973845504" @@ -7041,10 +7033,10 @@ resolved "https://registry.yarnpkg.com/@kwsites/promise-deferred/-/promise-deferred-1.1.1.tgz#8ace5259254426ccef57f3175bc64ed7095ed919" integrity sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw== -"@langchain/community@^0.2.2": - version "0.2.2" - resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.2.2.tgz#0ce7cd56ff8940fe73983f1853e165d334a2a446" - integrity sha512-TtlZnPBYt7Sujc1hAYvdZKUmV97wuF15O7b4nBX4lBfQeW38N0DwGbhqpitDbpaJqZ2s8DM4rjapECk0kIdAww== +"@langchain/community@^0.2.4": + version "0.2.4" + resolved "https://registry.yarnpkg.com/@langchain/community/-/community-0.2.4.tgz#fb5feb4f4a01a1b33adfd28ce7126d0dedb3e6d1" + integrity sha512-rwrPNQLyIe84TPqPYbYOfDA4G/ba1rdj7OtZg63dQmxIvNDOmUCh4xIQac2iuRUnM3o4Ben0Faa9qz+V5oPgIA== dependencies: "@langchain/core" "~0.2.0" "@langchain/openai" "~0.0.28" @@ -7052,16 +7044,16 @@ expr-eval "^2.0.2" flat "^5.0.2" js-yaml "^4.1.0" - langchain "~0.2.0" + langchain "0.2.3" langsmith "~0.1.1" uuid "^9.0.0" zod "^3.22.3" zod-to-json-schema "^3.22.5" -"@langchain/core@0.2.0", "@langchain/core@>0.1.0 <0.3.0", "@langchain/core@>0.1.56 <0.3.0", "@langchain/core@>0.1.61 <0.3.0", "@langchain/core@^0.2.0", "@langchain/core@~0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.0.tgz#19c6374a5ad80daf8e14cb58582bc988109a1403" - integrity sha512-UbCJUp9eh2JXd9AW/vhPbTgtZoMgTqJgSan5Wf/EP27X8JM65lWdCOpJW+gHyBXvabbyrZz3/EGaptTUL5gutw== +"@langchain/core@0.2.3", "@langchain/core@>0.1.0 <0.3.0", "@langchain/core@>0.1.56 <0.3.0", "@langchain/core@>0.1.61 <0.3.0", "@langchain/core@~0.2.0": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.2.3.tgz#7faa82f92b0c7843506e827a38bfcbb60f009d13" + integrity sha512-mVuFHSLpPQ4yOHNXeoSA3LnmIMuFmUiit5rvbYcPZqM6SrB2zCNN2nD4Ty5+3H5X4tYItDoSqsTuUNUQySXRQw== dependencies: ansi-styles "^5.0.0" camelcase "6" @@ -7076,18 +7068,18 @@ zod "^3.22.4" zod-to-json-schema "^3.22.3" -"@langchain/langgraph@^0.0.20": - version "0.0.20" - resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.0.20.tgz#9229af1a79107916910fa65fe185bf66cda7736f" - integrity sha512-/byqz3WDbIQqaPDmC+Bo2n36LBpD42yj8wR7KiDZvrOIJSlMIoqwZeRkONEp9D7o61ZRaAMwoUJWriG8L9xdFg== +"@langchain/langgraph@^0.0.21": + version "0.0.21" + resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.0.21.tgz#5037597a954abad9ed5f0a1742226f5fcf27e7d7" + integrity sha512-7jtVZFAwvxSbIribYNzGXYIRrsAXV7YF4u1Xcpd8MYNz8sD3h8+rpIOJcYF1AdFh6laajnz0Gv8abPBHHQ2QiQ== dependencies: "@langchain/core" ">0.1.61 <0.3.0" uuid "^9.0.1" -"@langchain/openai@^0.0.33", "@langchain/openai@~0.0.28": - version "0.0.33" - resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.33.tgz#af88d815ff0095018c879d3a1a5a32b2795b5c69" - integrity sha512-hTBo9y9bHtFvMT5ySBW7TrmKhLSA91iNahigeqAFBVrLmBDz+6rzzLFc1mpq6JEAR3fZKdaUXqso3nB23jIpTw== +"@langchain/openai@^0.0.34", "@langchain/openai@~0.0.28": + version "0.0.34" + resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.0.34.tgz#36c9bca0721ab9f7e5d40927e7c0429cacbd5b56" + integrity sha512-M+CW4oXle5fdoz2T2SwdOef8pl3/1XmUx1vjn2mXUVM/128aO0l23FMF0SNBsAbRV6P+p/TuzjodchJbi0Ht/A== dependencies: "@langchain/core" ">0.1.56 <0.3.0" js-tiktoken "^1.0.12" @@ -8317,16 +8309,6 @@ "@smithy/url-parser" "^3.0.0" tslib "^2.6.2" -"@smithy/eventstream-codec@^2.1.1": - version "2.1.1" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-2.1.1.tgz#4405ab0f9c77d439c575560c4886e59ee17d6d38" - integrity sha512-E8KYBxBIuU4c+zrpR22VsVrOPoEDzk35bQR3E+xm4k6Pa6JqzkDOdMyf9Atac5GPNKHJBdVaQ4JtjdWX2rl/nw== - dependencies: - "@aws-crypto/crc32" "3.0.0" - "@smithy/types" "^2.9.1" - "@smithy/util-hex-encoding" "^2.1.1" - tslib "^2.5.0" - "@smithy/eventstream-codec@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-3.0.0.tgz#81d30391220f73d41f432f65384b606d67673e46" @@ -8337,23 +8319,23 @@ "@smithy/util-hex-encoding" "^3.0.0" tslib "^2.6.2" -"@smithy/eventstream-serde-node@^2.1.1": - version "2.1.1" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.1.1.tgz#2e1afa27f9c7eb524c1c53621049c5e4e3cea6a5" - integrity sha512-LF882q/aFidFNDX7uROAGxq3H0B7rjyPkV6QDn6/KDQ+CG7AFkRccjxRf1xqajq/Pe4bMGGr+VKAaoF6lELIQw== +"@smithy/eventstream-serde-node@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-3.0.0.tgz#6519523fbb429307be29b151b8ba35bcca2b6e64" + integrity sha512-baRPdMBDMBExZXIUAoPGm/hntixjt/VFpU6+VmCyiYJYzRHRxoaI1MN+5XE+hIS8AJ2GCHLMFEIOLzq9xx1EgQ== dependencies: - "@smithy/eventstream-serde-universal" "^2.1.1" - "@smithy/types" "^2.9.1" - tslib "^2.5.0" + "@smithy/eventstream-serde-universal" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" -"@smithy/eventstream-serde-universal@^2.1.1": - version "2.1.1" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-2.1.1.tgz#0f5eec9ad033017973a67bafb5549782499488d2" - integrity sha512-LR0mMT+XIYTxk4k2fIxEA1BPtW3685QlqufUEUAX1AJcfFfxNDKEvuCRZbO8ntJb10DrIFVJR9vb0MhDCi0sAQ== +"@smithy/eventstream-serde-universal@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-3.0.0.tgz#cb8441a73fbde4cbaa68e4a21236f658d914a073" + integrity sha512-HNFfShmotWGeAoW4ujP8meV9BZavcpmerDbPIjkJbxKbN8RsUcpRQ/2OyIxWNxXNH2GWCAxuSB7ynmIGJlQ3Dw== dependencies: - "@smithy/eventstream-codec" "^2.1.1" - "@smithy/types" "^2.9.1" - tslib "^2.5.0" + "@smithy/eventstream-codec" "^3.0.0" + "@smithy/types" "^3.0.0" + tslib "^2.6.2" "@smithy/fetch-http-handler@^3.0.1": version "3.0.1" @@ -8545,13 +8527,6 @@ "@smithy/util-stream" "^3.0.1" tslib "^2.6.2" -"@smithy/types@^2.4.0", "@smithy/types@^2.9.1": - version "2.9.1" - resolved "https://registry.yarnpkg.com/@smithy/types/-/types-2.9.1.tgz#ed04d4144eed3b8bd26d20fc85aae8d6e357ebb9" - integrity sha512-vjXlKNXyprDYDuJ7UW5iobdmyDm6g8dDG+BFUncAg/3XJaN45Gy5RWWWUVgrzIK7S4R1KWgIX5LeJcfvSI24bw== - dependencies: - tslib "^2.5.0" - "@smithy/types@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/types/-/types-3.0.0.tgz#00231052945159c64ffd8b91e8909d8d3006cb7e" @@ -8647,13 +8622,6 @@ "@smithy/types" "^3.0.0" tslib "^2.6.2" -"@smithy/util-hex-encoding@^2.1.1": - version "2.1.1" - resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-2.1.1.tgz#978252b9fb242e0a59bae4ead491210688e0d15f" - integrity sha512-3UNdP2pkYUUBGEXzQI9ODTDK+Tcu1BlCyDBaRHwyxhA+8xLP8agEKQq4MGmpjqb4VQAjq9TwlCQX0kP6XDKYLg== - dependencies: - tslib "^2.5.0" - "@smithy/util-hex-encoding@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz#32938b33d5bf2a15796cd3f178a55b4155c535e6" @@ -22010,10 +21978,10 @@ kuler@^2.0.0: resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3" integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== -langchain@^0.2.2, langchain@~0.2.0: - version "0.2.2" - resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.2.2.tgz#21605450458c77f022c88fdb7671bb82f4a9a17f" - integrity sha512-4tt2QuwW8AXdIL8CRkQeGOCoYYH3QbLHfQ09yD0iWLV1rwUYJ8mIYFAz/+u6CB8YNEyR/HI105s4xrxFQbWa9g== +langchain@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/langchain/-/langchain-0.2.3.tgz#c14bb05cf871b21bd63b84b3ab89580b1d62539f" + integrity sha512-T9xR7zd+Nj0oXy6WoYKmZLy0DlQiDLFPGYWdOXDxy+AvqlujoPdVQgDSpdqiOHvAjezrByAoKxoHCz5XMwTP/Q== dependencies: "@langchain/core" "~0.2.0" "@langchain/openai" "~0.0.28" @@ -22037,10 +22005,10 @@ langchainhub@~0.0.8: resolved "https://registry.yarnpkg.com/langchainhub/-/langchainhub-0.0.8.tgz#fd4b96dc795e22e36c1a20bad31b61b0c33d3110" integrity sha512-Woyb8YDHgqqTOZvWIbm2CaFDGfZ4NTSyXV687AG4vXEfoNo7cGQp7nhl7wL3ehenKWmNEmcxCLgOZzW8jE6lOQ== -langsmith@^0.1.28, langsmith@~0.1.1, langsmith@~0.1.7: - version "0.1.28" - resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.28.tgz#fbe01352d0b993fd11d4085dd337b1cec17ef28d" - integrity sha512-IQUbo7I7rEE6QYBhrcgwqvlkcUsHlia0yTQpDwWdITw/VJx1f7gLPjNdbwWE+jvOZ4HcD7gCf2HR6zFXputu5A== +langsmith@^0.1.30, langsmith@~0.1.1, langsmith@~0.1.7: + version "0.1.30" + resolved "https://registry.yarnpkg.com/langsmith/-/langsmith-0.1.30.tgz#3000e441605b26e15a87fb991a3929c944edbc0a" + integrity sha512-g8f10H1iiRjCweXJjgM3Y9xl6ApCa1OThDvc0BlSDLVrGVPy1on9wT39vAzYkeadC7oG48p7gfpGlYH3kLkJ9Q== dependencies: "@types/uuid" "^9.0.1" commander "^10.0.1" From 37815f3e17665dd836e83ca5076999b5cc6b2da2 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 11:20:25 +0200 Subject: [PATCH 25/62] removing the public folder and disable UI, will provide in separate PR --- .../integration_assistant/jest.config.js | 2 +- .../integration_assistant/kibana.jsonc | 2 +- .../integration_assistant/public/app.tsx | 118 ------------- .../components/build_integration_button.tsx | 159 ------------------ .../components/categorization_button.tsx | 77 --------- .../public/components/ecs_button.tsx | 71 -------- .../public/components/related_button.tsx | 78 --------- .../integration_assistant/public/index.ts | 13 -- .../integration_assistant/public/plugin.tsx | 43 ----- .../integration_assistant/public/services.ts | 97 ----------- .../integration_assistant/public/types.ts | 17 -- .../server/graphs/categorization/graph.ts | 12 +- .../server/graphs/ecs/graph.ts | 8 +- .../server/graphs/related/graph.ts | 6 +- .../integration_assistant/tsconfig.json | 3 +- 15 files changed, 18 insertions(+), 688 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/public/app.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/categorization_button.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/ecs_button.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/components/related_button.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/index.ts delete mode 100644 x-pack/plugins/integration_assistant/public/plugin.tsx delete mode 100644 x-pack/plugins/integration_assistant/public/services.ts delete mode 100644 x-pack/plugins/integration_assistant/public/types.ts diff --git a/x-pack/plugins/integration_assistant/jest.config.js b/x-pack/plugins/integration_assistant/jest.config.js index 444d4dd315ed3..5da1e904b8894 100644 --- a/x-pack/plugins/integration_assistant/jest.config.js +++ b/x-pack/plugins/integration_assistant/jest.config.js @@ -12,7 +12,7 @@ module.exports = { coverageDirectory: '/target/kibana-coverage/jest/x-pack/plugins/integration_assistant', coverageReporters: ['text', 'html'], collectCoverageFrom: [ - '/x-pack/plugins/integration_assistant/{common,public,server}/**/*.{ts,tsx}', + '/x-pack/plugins/integration_assistant/{common,server}/**/*.{ts,tsx}', '!/x-pack/plugins/integration_assistant/{__jest__}/**/*', '!/x-pack/plugins/integration_assistant/*.test.{ts,tsx}', '!/x-pack/plugins/integration_assistant/*.config.ts', diff --git a/x-pack/plugins/integration_assistant/kibana.jsonc b/x-pack/plugins/integration_assistant/kibana.jsonc index 619448acaac8d..613b01a9c65b6 100644 --- a/x-pack/plugins/integration_assistant/kibana.jsonc +++ b/x-pack/plugins/integration_assistant/kibana.jsonc @@ -6,7 +6,7 @@ "plugin": { "id": "integrationAssistant", "server": true, - "browser": true, + "browser": false, "configPath": [ "xpack", "integration_assistant" diff --git a/x-pack/plugins/integration_assistant/public/app.tsx b/x-pack/plugins/integration_assistant/public/app.tsx deleted file mode 100644 index a1c15bb45e4c7..0000000000000 --- a/x-pack/plugins/integration_assistant/public/app.tsx +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React, { useState } from 'react'; -import ReactDOM from 'react-dom'; -import { AppMountParameters } from '@kbn/core/public'; -import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { EuiPageTemplate, EuiText, EuiFlexGroup, EuiFlexItem, EuiCodeBlock } from '@elastic/eui'; -import { EcsMappingApiResponse, CategorizationApiResponse, RelatedApiResponse } from '../common'; - -import { Services } from './services'; -import { EcsButton } from './components/ecs_button'; -import { CategorizationButton } from './components/categorization_button'; -import { RelatedButton } from './components/related_button'; -import { BuildIntegrationButton } from './components/build_integration_button'; - -type Props = Services; - -function RoutingExplorer({ - runEcsGraph, - runCategorizationGraph, - runRelatedGraph, - runIntegrationBuilder, -}: Props) { - const [lastResponse, setLastResponse] = useState( - {} as EcsMappingApiResponse | CategorizationApiResponse | RelatedApiResponse - ); - const [currentPipeline, setCurrentPipeline] = useState({} as object); - const [resultDocs, setResultDocs] = useState([] as object[]); - const [currentStep, setCurrentStep] = useState(0); - const rawSamples = [ - '{"ei":0,"event":"user.login","uid":"b675d102-fc25-4f7a-bf5d-96468cc176ea","code":"T1000I","time":"2024-02-23T18:56:50.628Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","required_private_key_policy":"none","success":true,"method":"local","mfa_device":{"mfa_device_name":"otp-device","mfa_device_uuid":"d07bf388-af49-4ec2-b8a4-c8a9e785b70b","mfa_device_type":"TOTP"},"user_agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36","addr.remote":"136.61.214.196:50332"}', - '{"ei":0,"event":"cert.create","uid":"efd326fc-dd13-4df8-acef-3102c2d717d3","code":"TC000I","time":"2024-02-23T18:56:50.653Z","cluster_name":"teleport.ericbeahan.com","cert_type":"user","identity":{"user":"teleport-admin","roles":["access","editor"],"logins":["root","ubuntu","ec2-user","-teleport-internal-join"],"expires":"2024-02-24T06:56:50.648137154Z","route_to_cluster":"teleport.ericbeahan.com","traits":{"aws_role_arns":null,"azure_identities":null,"db_names":null,"db_roles":null,"db_users":null,"gcp_service_accounts":null,"host_user_gid":[""],"host_user_uid":[""],"kubernetes_groups":null,"kubernetes_users":null,"logins":["root","ubuntu","ec2-user"],"windows_logins":null},"teleport_cluster":"teleport.ericbeahan.com","client_ip":"136.61.214.196","prev_identity_expires":"0001-01-01T00:00:00Z","private_key_policy":"none"}}', - '{"ei":0,"event":"session.start","uid":"fff30583-13be-49e8-b159-32952c6ea34f","code":"T2000I","time":"2024-02-23T18:56:57.199Z","cluster_name":"teleport.ericbeahan.com","user":"teleport-admin","login":"ec2-user","user_kind":1,"sid":"293fda2d-2266-4d4d-b9d1-bd5ea9dd9fc3","private_key_policy":"none","namespace":"default","server_id":"face0091-2bf1-43fd-a16a-f1514b4119f4","server_hostname":"ip-172-31-8-163.us-east-2.compute.internal","server_labels":{"hostname":"ip-172-31-8-163.us-east-2.compute.internal","teleport.internal/resource-id":"dccb2999-9fb8-4169-aded-ec7a1c0a26de"},"addr.remote":"136.61.214.196:50339","proto":"ssh","size":"80:25","initial_command":[""],"session_recording":"node"}', - ]; - // TODO: Just a quick way to test the return type - const isFetchError = (response: any): response is IHttpFetchError => { - return 'message' in response; - }; - - // TODO: This is just a temp test UI. It will be removed once the actual UI is implemented - return ( - - - -

Integration Assistant test UI

-
-
- - - - - - - - - - - - - - - - - - -

Last Response/Error

-
- {JSON.stringify(lastResponse, null, 2)} -
-
-
-
- ); -} - -export const renderApp = (props: Props, element: AppMountParameters['element']) => { - ReactDOM.render(, element); - - return () => ReactDOM.unmountComponentAtNode(element); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx b/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx deleted file mode 100644 index af34c7d2ea564..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/build_integration_button.tsx +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React, { useState } from 'react'; -import { EuiButton } from '@elastic/eui'; -import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { BuildIntegrationApiRequest } from '../../common'; -// TODO: Temp test button while UI development is in progress -interface BuildIntegrationButtonProps { - runIntegrationBuilder: ( - req: BuildIntegrationApiRequest - ) => Promise>; - rawSamples: any[]; - currentStep: number; - setCurrentStep: (step: number) => void; -} -export const BuildIntegrationButton = ({ - runIntegrationBuilder, - rawSamples, - currentStep, - setCurrentStep, -}: BuildIntegrationButtonProps) => { - const [isLoading, setIsLoading] = useState(false); - const testdocs = [ - { - ecs: { - version: '8.11.0', - }, - related: { - user: ['', 'teleport-admin', '{0=access, 1=editor}'], - ip: ['136.61.214.196'], - }, - teleport: { - audit: { - cluster_name: 'teleport.ericbeahan.com', - cert_type: 'user', - }, - }, - }, - { - ecs: { - version: '8.11.0', - }, - related: { - user: ['', 'teleport-admin', '{0=access, 1=editor}'], - ip: ['136.61.214.196'], - }, - teleport: { - audit: { - cluster_name: 'teleport.ericbeahan.com', - cert_type: 'user', - }, - }, - }, - ]; - const testPipeline = { - description: 'Pipeline to process teleport audit logs', - processors: [ - { - set: { - field: 'ecs.version', - tag: 'set_ecs_version', - value: '8.11.0', - }, - }, - { - rename: { - field: 'message', - target_field: 'event.original', - tag: 'rename_message', - ignore_missing: true, - if: 'ctx.event?.original == null', - }, - }, - ], - on_failure: [ - { - append: { - field: 'error.message', - value: - 'Processor {{{_ingest.on_failure_processor_type}}} with tag {{{_ingest.on_failure_processor_tag}}} in pipeline {{{_ingest.on_failure_pipeline}}} failed with message: {{{_ingest.on_failure_message}}}', - }, - }, - { - set: { - field: 'event.kind', - value: 'pipeline_error', - }, - }, - ], - }; - async function onBuildIntegrationButtonClick() { - setIsLoading(true); - const request = { - integration: { - name: 'teleport', - title: 'Test Package Title', - description: 'Test Package Description', - initialVersion: '0.1.0', - dataStreams: [ - { - title: 'Datastream 1 Test Title', - name: 'audit', - description: 'Datastream 1 Test Description', - inputTypes: ['filestream'], - pipeline: testPipeline, - docs: testdocs, - rawSamples, - }, - { - title: 'Datastream 2 Test Title', - name: 'session', - description: 'Datastream 2 Test Description', - inputTypes: ['gcs'], - pipeline: testPipeline, - docs: testdocs, - rawSamples, - }, - ], - owner: '@elastic/test-team', - minKibanaVersion: '8.13.0', - }, - } as BuildIntegrationApiRequest; - try { - const builIntegrationResponse = await runIntegrationBuilder(request); - const blob = new Blob([builIntegrationResponse]); - const url = window.URL.createObjectURL(blob); - const a = document.createElement('a'); - document.body.appendChild(a); - a.style = 'display: none'; - a.target = '_self'; - a.href = url; - a.download = 'integration.zip'; - a.click(); - window.URL.revokeObjectURL(url); - setIsLoading(false); - setCurrentStep(4); - } catch (e) { - setIsLoading(false); - console.log(e); - } - } - return ( - - {isLoading ? 'Building Integration' : 'Build Integration'} - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/categorization_button.tsx b/x-pack/plugins/integration_assistant/public/components/categorization_button.tsx deleted file mode 100644 index 762d9ea3f0cfc..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/categorization_button.tsx +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React, { useState } from 'react'; -import { EuiButton } from '@elastic/eui'; -import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { CategorizationApiRequest, CategorizationApiResponse } from '../../common'; -// TODO: Temp test button while UI development is in progress -interface CategorizationButtonProps { - runCategorizationGraph: ( - req: CategorizationApiRequest - ) => Promise>; - rawSamples: any[]; - currentPipeline: any; - currentStep: number; - setCurrentStep: (step: number) => void; - setCurrentPipeline: (pipeline: any) => void; - setLastResponse: (response: any) => void; - setResultDocs: (docs: any) => void; - isFetchError: (response: any) => boolean; -} -export const CategorizationButton = ({ - runCategorizationGraph, - rawSamples, - currentPipeline, - currentStep, - setCurrentStep, - setCurrentPipeline, - setLastResponse, - setResultDocs, - isFetchError, -}: CategorizationButtonProps) => { - const [isLoading, setIsLoading] = useState(false); - async function onCategorizationButtonClick() { - setIsLoading(true); - const request = { - packageName: 'teleport', - dataStreamName: 'audit', - rawSamples, - currentPipeline, - } as CategorizationApiRequest; - try { - const categorizationResponse = await runCategorizationGraph(request); - if (!isFetchError(categorizationResponse)) { - if (Object.keys(categorizationResponse?.results).length > 0) { - setCurrentPipeline(categorizationResponse.results.pipeline); - setResultDocs(categorizationResponse.results.docs); - setLastResponse(categorizationResponse); - console.log('finished categorization graph successfully'); - } else { - console.log('finished categorization graph without errors, but no results'); - } - setIsLoading(false); - setCurrentStep(2); - } - } catch (e) { - setIsLoading(false); - console.log(e); - } - } - return ( - - {isLoading ? 'Running Categorization Graph' : 'Run Categorization Graph'} - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/ecs_button.tsx b/x-pack/plugins/integration_assistant/public/components/ecs_button.tsx deleted file mode 100644 index 48284893bf09d..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/ecs_button.tsx +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React, { useState } from 'react'; -import { EuiButton } from '@elastic/eui'; -import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common'; -// TODO: Temp test button while UI development is in progress -interface EcsButtonProps { - runEcsGraph: ( - req: EcsMappingApiRequest - ) => Promise>; - rawSamples: any[]; - currentStep: number; - setCurrentStep: (step: number) => void; - setCurrentPipeline: (pipeline: any) => void; - setLastResponse: (response: any) => void; - isFetchError: (response: any) => boolean; -} -export const EcsButton = ({ - runEcsGraph, - rawSamples, - currentStep, - setCurrentStep, - setCurrentPipeline, - setLastResponse, - isFetchError, -}: EcsButtonProps) => { - const [isLoading, setIsLoading] = useState(false); - async function onEcsButtonClick() { - setIsLoading(true); - const request = { - packageName: 'teleport', - dataStreamName: 'audit', - rawSamples, - } as EcsMappingApiRequest; - try { - const ecsResponse = await runEcsGraph(request); - if (!isFetchError(ecsResponse)) { - if (Object.keys(ecsResponse?.results).length > 0) { - setCurrentPipeline(ecsResponse.results.pipeline); - setLastResponse(ecsResponse); - console.log('finished running ecs graph successfully'); - } else { - console.log('finished running ecs graph without errors, but no results'); - } - setIsLoading(false); - setCurrentStep(1); - } - } catch (e) { - setIsLoading(false); - console.log(e); - } - } - return ( - - {isLoading ? 'Running ECS Graph' : 'Run ECS Graph'} - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/components/related_button.tsx b/x-pack/plugins/integration_assistant/public/components/related_button.tsx deleted file mode 100644 index de6cbd0978aec..0000000000000 --- a/x-pack/plugins/integration_assistant/public/components/related_button.tsx +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import React, { useState } from 'react'; -import { EuiButton } from '@elastic/eui'; -import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { RelatedApiRequest, RelatedApiResponse } from '../../common'; - -// TODO: Temp test button while UI development is in progress -interface RelatedButtonProps { - runRelatedGraph: ( - req: RelatedApiRequest - ) => Promise>; - rawSamples: any[]; - currentPipeline: any; - currentStep: number; - setCurrentStep: (step: number) => void; - setCurrentPipeline: (pipeline: any) => void; - setLastResponse: (response: any) => void; - setResultDocs: (docs: any) => void; - isFetchError: (response: any) => boolean; -} -export const RelatedButton = ({ - runRelatedGraph, - rawSamples, - currentPipeline, - currentStep, - setCurrentStep, - setCurrentPipeline, - setLastResponse, - setResultDocs, - isFetchError, -}: RelatedButtonProps) => { - const [isLoading, setIsLoading] = useState(false); - async function onRelatedButtonClick() { - setIsLoading(true); - const request = { - packageName: 'teleport', - dataStreamName: 'audit', - rawSamples, - currentPipeline, - } as RelatedApiRequest; - try { - const relatedResponse = await runRelatedGraph(request); - if (!isFetchError(relatedResponse)) { - if (Object.keys(relatedResponse?.results).length > 0) { - setCurrentPipeline(relatedResponse.results.pipeline); - setResultDocs(relatedResponse.results.docs); - setLastResponse(relatedResponse); - console.log('finished related graph successfully'); - } else { - console.log('finished related graph without errors, but no results'); - } - setIsLoading(false); - setCurrentStep(3); - } - } catch (e) { - setIsLoading(false); - console.log(e); - } - } - return ( - - {isLoading ? 'Running Related Graph' : 'Run Related Graph'} - - ); -}; diff --git a/x-pack/plugins/integration_assistant/public/index.ts b/x-pack/plugins/integration_assistant/public/index.ts deleted file mode 100644 index 1ef32ec38169b..0000000000000 --- a/x-pack/plugins/integration_assistant/public/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { IntegrationAssistantPlugin } from './plugin'; - -export function plugin() { - return new IntegrationAssistantPlugin(); -} -export type { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; diff --git a/x-pack/plugins/integration_assistant/public/plugin.tsx b/x-pack/plugins/integration_assistant/public/plugin.tsx deleted file mode 100644 index 34b519749cf60..0000000000000 --- a/x-pack/plugins/integration_assistant/public/plugin.tsx +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { CoreStart, Plugin, CoreSetup, AppMountParameters } from '@kbn/core/public'; -import { i18n } from '@kbn/i18n'; -import { getServices } from './services'; -import { PLUGIN_ID, INTEGRATION_ASSISTANT_APP_ROUTE } from '../common'; -import { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; - -export class IntegrationAssistantPlugin - implements Plugin -{ - public setup(core: CoreSetup): IntegrationAssistantPluginSetup { - core.application.register({ - id: PLUGIN_ID, - euiIconType: 'logoElastic', - title: i18n.translate('xpack.fleet.integrationAssistantAppTitle', { - defaultMessage: 'Integration Assistant', - }), - appRoute: INTEGRATION_ASSISTANT_APP_ROUTE, - async mount(params: AppMountParameters) { - const [coreStart] = await core.getStartServices(); - const startServices = getServices(coreStart); - const { renderApp } = await import('./app'); - const unmount = renderApp(startServices, params.element); - return () => { - unmount(); - }; - }, - }); - return {}; - } - - public start(core: CoreStart): IntegrationAssistantPluginStart { - return {}; - } - - public stop() {} -} diff --git a/x-pack/plugins/integration_assistant/public/services.ts b/x-pack/plugins/integration_assistant/public/services.ts deleted file mode 100644 index cbc40577706dd..0000000000000 --- a/x-pack/plugins/integration_assistant/public/services.ts +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import type { CoreStart } from '@kbn/core/public'; -import type { IHttpFetchError } from '@kbn/core-http-browser'; -import { - EcsMappingApiRequest, - EcsMappingApiResponse, - CategorizationApiRequest, - CategorizationApiResponse, - RelatedApiRequest, - RelatedApiResponse, - BuildIntegrationApiRequest, - TestPipelineApiRequest, - TestPipelineApiResponse, -} from '../common'; -import { - ECS_GRAPH_PATH, - CATEGORIZATION_GRAPH_PATH, - RELATED_GRAPH_PATH, - INTEGRATION_BUILDER_PATH, - TEST_PIPELINE_PATH, -} from '../common'; - -export interface Services { - runEcsGraph: (req: EcsMappingApiRequest) => Promise; - runCategorizationGraph: ( - req: CategorizationApiRequest - ) => Promise; - runRelatedGraph: (req: RelatedApiRequest) => Promise; - runIntegrationBuilder: (req: BuildIntegrationApiRequest) => Promise; - testPipeline: (req: TestPipelineApiRequest) => Promise; -} - -export function getServices(core: CoreStart): Services { - return { - runEcsGraph: async (req: EcsMappingApiRequest): Promise => { - try { - const response = await core.http.post(ECS_GRAPH_PATH, { - body: JSON.stringify({ ...req }), - }); - return response; - } catch (e) { - return e; - } - }, - runCategorizationGraph: async ( - req: CategorizationApiRequest - ): Promise => { - try { - const response = await core.http.post( - CATEGORIZATION_GRAPH_PATH, - { - body: JSON.stringify({ ...req }), - } - ); - return response; - } catch (e) { - return e; - } - }, - runRelatedGraph: async (req: RelatedApiRequest): Promise => { - try { - const response = await core.http.post(RELATED_GRAPH_PATH, { - body: JSON.stringify({ ...req }), - }); - return response; - } catch (e) { - return e; - } - }, - runIntegrationBuilder: async (req: BuildIntegrationApiRequest): Promise => { - try { - const response = await core.http.post(INTEGRATION_BUILDER_PATH, { - body: JSON.stringify({ ...req }), - }); - return response; - } catch (e) { - return e; - } - }, - testPipeline: async (req: TestPipelineApiRequest): Promise => { - try { - const response = await core.http.post(TEST_PIPELINE_PATH, { - body: JSON.stringify({ ...req }), - }); - return response; - } catch (e) { - return e; - } - }, - }; -} diff --git a/x-pack/plugins/integration_assistant/public/types.ts b/x-pack/plugins/integration_assistant/public/types.ts deleted file mode 100644 index a7b8a413213c3..0000000000000 --- a/x-pack/plugins/integration_assistant/public/types.ts +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import type { NavigationPublicPluginStart } from '@kbn/navigation-plugin/public'; - -// eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface IntegrationAssistantPluginSetup {} -// eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface IntegrationAssistantPluginStart {} - -export interface AppPluginStartDependencies { - navigation: NavigationPublicPluginStart; -} diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index 35e2b3e7b9b69..8e3c124ce386e 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -152,12 +152,16 @@ export async function getCategorizationGraph(client: IScopedClusterClient, model }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) - .addNode('handleCategorization', (state) => handleCategorization(state, model)) + .addNode('handleCategorization', (state: CategorizationState) => + handleCategorization(state, model) + ) .addNode('handleValidatePipeline', handleValidatePipeline) .addNode('handleCategorizationValidation', handleCategorizationValidation) - .addNode('handleInvalidCategorization', (state) => handleInvalidCategorization(state, model)) - .addNode('handleErrors', (state) => handleErrors(state, model)) - .addNode('handleReview', (state) => handleReview(state, model)) + .addNode('handleInvalidCategorization', (state: CategorizationState) => + handleInvalidCategorization(state, model) + ) + .addNode('handleErrors', (state: CategorizationState) => handleErrors(state, model)) + .addNode('handleReview', (state: CategorizationState) => handleReview(state, model)) .addEdge(START, 'modelInput') .addEdge('modelOutput', END) .addEdge('modelInput', 'handleValidatePipeline') diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 8bbdc429f8115..048d366afe49f 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -142,11 +142,11 @@ export async function getEcsGraph(model: BedrockChat) { }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) - .addNode('handleEcsMapping', (state) => handleEcsMapping(state, model)) + .addNode('handleEcsMapping', (state: EcsMappingState) => handleEcsMapping(state, model)) .addNode('handleValidation', handleValidateMappings) - .addNode('handleDuplicates', (state) => handleDuplicates(state, model)) - .addNode('handleMissingKeys', (state) => handleMissingKeys(state, model)) - .addNode('handleInvalidEcs', (state) => handleInvalidEcs(state, model)) + .addNode('handleDuplicates', (state: EcsMappingState) => handleDuplicates(state, model)) + .addNode('handleMissingKeys', (state: EcsMappingState) => handleMissingKeys(state, model)) + .addNode('handleInvalidEcs', (state: EcsMappingState) => handleInvalidEcs(state, model)) .addEdge(START, 'modelInput') .addEdge('modelOutput', END) .addEdge('handleEcsMapping', 'handleValidation') diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 7ec5afb916912..ad3266b6e19b6 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -138,10 +138,10 @@ export async function getRelatedGraph(client: IScopedClusterClient, model: Bedro const workflow = new StateGraph({ channels: graphState }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) - .addNode('handleRelated', (state) => handleRelated(state, model)) + .addNode('handleRelated', (state: RelatedState) => handleRelated(state, model)) .addNode('handleValidatePipeline', handleValidatePipeline) - .addNode('handleErrors', (state) => handleErrors(state, model)) - .addNode('handleReview', (state) => handleReview(state, model)) + .addNode('handleErrors', (state: RelatedState) => handleErrors(state, model)) + .addNode('handleReview', (state: RelatedState) => handleReview(state, model)) .addEdge(START, 'modelInput') .addEdge('modelOutput', END) .addEdge('handleRelated', 'handleValidatePipeline') diff --git a/x-pack/plugins/integration_assistant/tsconfig.json b/x-pack/plugins/integration_assistant/tsconfig.json index bc6dac3bea829..890de4eb7bd8d 100644 --- a/x-pack/plugins/integration_assistant/tsconfig.json +++ b/x-pack/plugins/integration_assistant/tsconfig.json @@ -5,10 +5,9 @@ }, "include": [ "index.ts", - "public/**/*.ts", - "public/**/*.tsx", "server/**/*.ts", "common/**/*.ts", + "__jest__/**/*", "../../typings/**/*", ], "exclude": [ From f57625369bf58736b5444497b57fd4e8c1807c77 Mon Sep 17 00:00:00 2001 From: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Date: Fri, 31 May 2024 09:43:38 +0000 Subject: [PATCH 26/62] [CI] Auto-commit changed files from 'node scripts/lint_ts_projects --fix' --- x-pack/plugins/integration_assistant/tsconfig.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugins/integration_assistant/tsconfig.json b/x-pack/plugins/integration_assistant/tsconfig.json index 890de4eb7bd8d..129cfc4e8e174 100644 --- a/x-pack/plugins/integration_assistant/tsconfig.json +++ b/x-pack/plugins/integration_assistant/tsconfig.json @@ -15,7 +15,9 @@ ], "kbn_references": [ "@kbn/core", - "@kbn/core-http-browser", "@kbn/config-schema", + "@kbn/langchain", + "@kbn/core-elasticsearch-server", + "@kbn/actions-plugin", ] } From ca5ca42e20a8e9e3d355dd4b7f0449a052d4f696 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 11:51:35 +0200 Subject: [PATCH 27/62] Adding some minor route argument changes --- .../plugins/integration_assistant/README.md | 56 +++++++++++++++++-- .../server/routes/build_integration_routes.ts | 7 ++- .../server/routes/pipeline_routes.ts | 3 +- .../server/routes/register_routes.ts | 5 +- 4 files changed, 60 insertions(+), 11 deletions(-) diff --git a/x-pack/plugins/integration_assistant/README.md b/x-pack/plugins/integration_assistant/README.md index 1ac7540508360..5bcac9a8d2b45 100644 --- a/x-pack/plugins/integration_assistant/README.md +++ b/x-pack/plugins/integration_assistant/README.md @@ -1,9 +1,53 @@ -Team owner: Platform +Team owner: Security Integrations Scalability -A working example of a plugin that registers and uses multiple custom routes. +# Integration Assistant -Read more: +## Overview -- [IRouter API Docs](../../docs/development/core/server/kibana-plugin-core-server.irouter.md) -- [HttpHandler (core.http.fetch) API Docs](../../docs/development/core/public/kibana-plugin-core-public.httphandler.md) -- [Routing Conventions](../../STYLEGUIDE.mdx#api-endpoints) \ No newline at end of file +This is a new Kibana plugin created to help users with automatically generating integration packages based on provided log samples and relevant information + +## Features + +Exposes 4 API's that can be consumed by any frontend plugin, which are: + +- ECS Mapping API +- Categorization API +- Related Fields API +- Build Integration API + +## Development + +### Backend + +#### Overview + +The backend part of the plugin utilizes langraph extensively to parse the provided log samples and generate the integration package. + +One instance of langraph is created that will include one or more `nodes` in which each node represents a step in the integration package generation process. + +Each node links to a specific function, usually a `handler` specified in its own file under each graph folder that will be executed when the node is reached. + +#### Structure + +**Graphs** +The graph components are split into logical parts and are placed in separate folders for each graph under the `./server/graphs` directory. + +Each graph folder needs to contains at least one `graph.ts`, which exports a function that returns the compiled graph object. + +Each exported graph function is then linked up to one or more API routes. + +**Routes** +All routes are defined under `./server/routes` in its own file, and then included in the `./server/routes/register_routes.ts` file. + +**Integration Builder** +The integration builder is the last step in the + +### Tests + +All mocks/fixtures are placed in the top `./__jest__` directory of the plugin. If many mocks/fixtures are required, try to split them up into separate file(s). + +Tests can be run with: + +```bash +node scripts/jest x-pack/plugins/integration_assistant/ --coverage +``` diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 94da2519c15ac..1a2420c9255a4 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -10,9 +10,11 @@ import { schema } from '@kbn/config-schema'; import { INTEGRATION_BUILDER_PATH } from '../../common'; import { buildPackage } from '../integration_builder'; import type { BuildIntegrationApiRequest } from '../../common'; +import { IntegrationAssistantRouteHandlerContext } from '../plugin'; -// TODO: Currently not implemented -export function registerIntegrationBuilderRoutes(router: IRouter) { +export function registerIntegrationBuilderRoutes( + router: IRouter +) { router.post( { path: `${INTEGRATION_BUILDER_PATH}`, @@ -51,7 +53,6 @@ export function registerIntegrationBuilderRoutes(router: IRouter) { }), }, }, - // TODO: This needs to implement CustomHttpResponseOptions async (_, req, res) => { const { integration } = req.body as BuildIntegrationApiRequest; try { diff --git a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts index 37d595bc85d89..f44d94c8cdad6 100644 --- a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts @@ -10,8 +10,9 @@ import { schema } from '@kbn/config-schema'; import { TEST_PIPELINE_PATH } from '../../common'; import { testPipeline } from '../util/pipeline'; import { TestPipelineApiRequest, TestPipelineApiResponse } from '../../common/types'; +import { IntegrationAssistantRouteHandlerContext } from '../plugin'; -export function registerEcsRoutes(router: IRouter) { +export function registerPipelineRoutes(router: IRouter) { router.post( { path: `${TEST_PIPELINE_PATH}`, diff --git a/x-pack/plugins/integration_assistant/server/routes/register_routes.ts b/x-pack/plugins/integration_assistant/server/routes/register_routes.ts index 954e80b924c86..d43472f6ad3dd 100644 --- a/x-pack/plugins/integration_assistant/server/routes/register_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/register_routes.ts @@ -10,10 +10,13 @@ import { registerEcsRoutes } from './ecs_routes'; import { registerIntegrationBuilderRoutes } from './build_integration_routes'; import { registerCategorizationRoutes } from './categorization_routes'; import { registerRelatedRoutes } from './related_routes'; +import { registerPipelineRoutes } from './pipeline_routes'; +import { IntegrationAssistantRouteHandlerContext } from '../plugin'; -export function registerRoutes(router: IRouter) { +export function registerRoutes(router: IRouter) { registerEcsRoutes(router); registerIntegrationBuilderRoutes(router); registerCategorizationRoutes(router); registerRelatedRoutes(router); + registerPipelineRoutes(router); } From 696b170376bb8509d88b38710d32bdff3554fa60 Mon Sep 17 00:00:00 2001 From: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Date: Fri, 31 May 2024 10:04:45 +0000 Subject: [PATCH 28/62] [CI] Auto-commit changed files from 'node scripts/generate codeowners' --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1968e6a7e7984..7d853f76b0280 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -501,6 +501,7 @@ x-pack/plugins/observability_solution/infra @elastic/obs-ux-logs-team @elastic/o x-pack/plugins/ingest_pipelines @elastic/kibana-management src/plugins/input_control_vis @elastic/kibana-presentation src/plugins/inspector @elastic/kibana-presentation +x-pack/plugins/integration_assistant @elastic/security-solution src/plugins/interactive_setup @elastic/kibana-security test/interactive_setup_api_integration/plugins/test_endpoints @elastic/kibana-security packages/kbn-interpreter @elastic/kibana-visualizations From 044ca8ae427d997eef44a468beeb74a5df39c8f3 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 12:14:17 +0200 Subject: [PATCH 29/62] update readme --- x-pack/plugins/integration_assistant/README.md | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/x-pack/plugins/integration_assistant/README.md b/x-pack/plugins/integration_assistant/README.md index 5bcac9a8d2b45..17b2b8fd46a26 100644 --- a/x-pack/plugins/integration_assistant/README.md +++ b/x-pack/plugins/integration_assistant/README.md @@ -14,6 +14,7 @@ Exposes 4 API's that can be consumed by any frontend plugin, which are: - Categorization API - Related Fields API - Build Integration API +- Optional Test Pipeline API (Used to update pipeline results if the ingest pipeline changes by a user in the UI). ## Development @@ -30,6 +31,7 @@ Each node links to a specific function, usually a `handler` specified in its own #### Structure **Graphs** + The graph components are split into logical parts and are placed in separate folders for each graph under the `./server/graphs` directory. Each graph folder needs to contains at least one `graph.ts`, which exports a function that returns the compiled graph object. @@ -37,12 +39,23 @@ Each graph folder needs to contains at least one `graph.ts`, which exports a fun Each exported graph function is then linked up to one or more API routes. **Routes** + All routes are defined under `./server/routes` in its own file, and then included in the `./server/routes/register_routes.ts` file. **Integration Builder** -The integration builder is the last step in the -### Tests +The integration builder is the last step in the expected API flow (ECS Mapping -> Categorization -> Related Fields -> Integration Builder). +With the provided package and data stream details, a optional logo and a list of sample logs, the API will build out the entire folder structure and files required for the integration package, archive it and return it as a `Buffer`. + +**Templates** + +Currently the templates are stored as nunjucks files as they were converted from jinja2 templates, which uses the exact same format. Longer term this will most likely be switched to the Kibana forked Handlebars templating engine. + +The templates are stored in the `./server/templates` directory and are used to generate the integration package files while running the Integration Builder API. + +One template (pipeline.yml.njk) is used by the ECS Mapping API to generate provide the boilerplate ingest pipeline structure we want to use for all generated integrations. + +## Tests All mocks/fixtures are placed in the top `./__jest__` directory of the plugin. If many mocks/fixtures are required, try to split them up into separate file(s). From ba3b3e509c973ccfb37e0675b320b4f91ba8853a Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 12:36:39 +0200 Subject: [PATCH 30/62] Update file operations to be sync rather than async as they have to be performed in order anyway, remove unecessary awaits. --- .../integration_assistant/common/types.ts | 2 - .../server/integration_builder/agent.ts | 15 ++-- .../integration_builder/build_integration.ts | 74 +++++++++---------- .../server/routes/build_integration_routes.ts | 1 - .../server/util/async_file.ts | 52 ------------- .../server/util/files.ts | 51 +++++++++++++ .../server/util/index.ts | 9 +-- 7 files changed, 93 insertions(+), 111 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/server/util/async_file.ts create mode 100644 x-pack/plugins/integration_assistant/server/util/files.ts diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index 7ac3d6d06020a..41e15c1d0a0c3 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -57,8 +57,6 @@ export interface Integration { title: string; description: string; dataStreams: DataStream[]; - owner: string; - minKibanaVersion: string; logo?: string; } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts index e524619d8ca2e..f6e98562731e1 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts @@ -7,27 +7,24 @@ import { join as joinPath } from 'path'; import { InputTypes } from '../../common'; -import { asyncEnsureDir, asyncCreate, asyncRead } from '../util'; +import { ensureDirSync, createSync, readSync } from '../util'; -export async function createAgentInput( - specificDataStreamDir: string, - inputTypes: InputTypes[] -): Promise { +export function createAgentInput(specificDataStreamDir: string, inputTypes: InputTypes[]): void { const agentDir = joinPath(specificDataStreamDir, 'agent', 'stream'); const agentTemplatesDir = joinPath(__dirname, '../templates/agent'); - await asyncEnsureDir(agentDir); + ensureDirSync(agentDir); // Load common options that exists for all .yml.hbs files, to be merged with each specific input file const commonFilePath = joinPath(agentTemplatesDir, 'common.yml.hbs'); - const commonFile = await asyncRead(commonFilePath); + const commonFile = readSync(commonFilePath); for (const inputType of inputTypes) { const inputTypeFilePath = joinPath(agentTemplatesDir, `${inputType}.yml.hbs`); - const inputTypeFile = await asyncRead(inputTypeFilePath); + const inputTypeFile = readSync(inputTypeFilePath); const combinedContents = `${inputTypeFile}\n${commonFile}`; const destinationFilePath = joinPath(agentDir, `${inputType}.yml.hbs`); - await asyncCreate(destinationFilePath, combinedContents); + createSync(destinationFilePath, combinedContents); } } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts index 4545213e1c293..f7af9d5cd109d 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -15,7 +15,7 @@ import { createDatastream } from './data_stream'; import { createAgentInput } from './agent'; import { createFieldMapping } from './fields'; import { createPipeline } from './pipeline'; -import { generateUniqueId, asyncEnsureDir, asyncCopy, asyncCreate } from '../util'; +import { generateUniqueId, ensureDirSync, copySync, createSync } from '../util'; export async function buildPackage(integration: Integration): Promise { const templateDir = joinPath(__dirname, '../templates'); @@ -27,22 +27,17 @@ export async function buildPackage(integration: Integration): Promise { }); const tmpDir = joinPath(tmpdir(), `integration-assistant-${generateUniqueId()}`); - const packageDir = await createDirectories(tmpDir, integration); + const packageDir = createDirectories(tmpDir, integration); const dataStreamsDir = joinPath(packageDir, 'data_stream'); for (const dataStream of integration.dataStreams) { const dataStreamName = dataStream.name; const specificDataStreamDir = joinPath(dataStreamsDir, dataStreamName); - await createDatastream(integration.name, specificDataStreamDir, dataStream); - await createAgentInput(specificDataStreamDir, dataStream.inputTypes); - await createPipeline(specificDataStreamDir, dataStream.pipeline); - await createFieldMapping( - integration.name, - dataStreamName, - specificDataStreamDir, - dataStream.docs - ); + createDatastream(integration.name, specificDataStreamDir, dataStream); + createAgentInput(specificDataStreamDir, dataStream.inputTypes); + createPipeline(specificDataStreamDir, dataStream.pipeline); + createFieldMapping(integration.name, dataStreamName, specificDataStreamDir, dataStream.docs); } const tmpPackageDir = joinPath(tmpDir, `${integration.name}-0.1.0`); @@ -51,70 +46,71 @@ export async function buildPackage(integration: Integration): Promise { return zipBuffer; } -async function createDirectories(tmpDir: string, integration: Integration): Promise { +function createDirectories(tmpDir: string, integration: Integration): string { const packageDir = joinPath(tmpDir, `${integration.name}-0.1.0`); - await asyncEnsureDir(tmpDir); - await asyncEnsureDir(packageDir); - await createPackage(packageDir, integration); + ensureDirSync(tmpDir); + ensureDirSync(packageDir); + createPackage(packageDir, integration); return packageDir; } -async function createPackage(packageDir: string, integration: Integration): Promise { - await createReadme(packageDir, integration); - await createChangelog(packageDir, integration); - await createBuildFile(packageDir); - await createPackageManifest(packageDir, integration); - await createPackageSystemTests(packageDir, integration); - await createLogo(packageDir, integration); +function createPackage(packageDir: string, integration: Integration): void { + createReadme(packageDir, integration); + createChangelog(packageDir); + createBuildFile(packageDir); + createPackageManifest(packageDir, integration); + createPackageSystemTests(packageDir, integration); + createLogo(packageDir, integration); } -async function createLogo(packageDir: string, integration: Integration): Promise { +function createLogo(packageDir: string, integration: Integration): void { const logoDir = joinPath(packageDir, 'img'); - await asyncEnsureDir(logoDir); + ensureDirSync(logoDir); if (integration?.logo !== undefined) { const buffer = Buffer.from(integration.logo, 'base64'); - await asyncCreate(joinPath(logoDir, 'logo.svg'), buffer); + createSync(joinPath(logoDir, 'logo.svg'), buffer); } else { const imgTemplateDir = joinPath(__dirname, '../templates/img'); - await asyncCopy(joinPath(imgTemplateDir, 'logo.svg'), joinPath(logoDir, 'logo.svg')); + copySync(joinPath(imgTemplateDir, 'logo.svg'), joinPath(logoDir, 'logo.svg')); } } -async function createBuildFile(packageDir: string): Promise { +function createBuildFile(packageDir: string): void { const buildFile = nunjucks.render('build.yml.njk', { ecs_version: '8.11.0' }); const buildDir = joinPath(packageDir, '_dev/build'); - await asyncEnsureDir(buildDir); - await asyncCreate(joinPath(buildDir, 'build.yml'), buildFile); + ensureDirSync(buildDir); + createSync(joinPath(buildDir, 'build.yml'), buildFile); } -async function createChangelog(packageDir: string, integration: Integration): Promise { +function createChangelog(packageDir: string): void { const changelogTemplate = nunjucks.render('changelog.yml.njk', { initial_version: '0.1.0', }); - await asyncCreate(joinPath(packageDir, 'changelog.yml'), changelogTemplate); + createSync(joinPath(packageDir, 'changelog.yml'), changelogTemplate); } -async function createReadme(packageDir: string, integration: Integration) { +function createReadme(packageDir: string, integration: Integration) { const readmeDirPath = joinPath(packageDir, '_dev/build/docs/'); - await asyncEnsureDir(readmeDirPath); + ensureDirSync(readmeDirPath); const readmeTemplate = nunjucks.render('README.md.njk', { package_name: integration.name, data_streams: integration.dataStreams, }); - await asyncCreate(joinPath(readmeDirPath, 'README.md'), readmeTemplate); + createSync(joinPath(readmeDirPath, 'README.md'), readmeTemplate); } async function createZipArchive(tmpPackageDir: string): Promise { const zip = new AdmZip(); zip.addLocalFolder(tmpPackageDir); - return zip.toBuffer(); + const buffer = zip.toBuffer(); + return buffer; } -async function createPackageManifest(packageDir: string, integration: Integration): Promise { +function createPackageManifest(packageDir: string, integration: Integration): void { const uniqueInputs: { [key: string]: { type: string; title: string; description: string } } = {}; integration.dataStreams.forEach((dataStream: DataStream) => { @@ -137,10 +133,10 @@ async function createPackageManifest(packageDir: string, integration: Integratio package_name: integration.name, package_version: '0.1.0', package_description: integration.description, - package_owner: integration.owner, - min_version: integration.minKibanaVersion, + package_owner: '@elastic/custom-integrations', + min_version: '^8.13.0', inputs: uniqueInputsList, }); - await asyncCreate(joinPath(packageDir, 'manifest.yml'), packageManifest); + createSync(joinPath(packageDir, 'manifest.yml'), packageManifest); } diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 1a2420c9255a4..26f44414002a2 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -48,7 +48,6 @@ export function registerIntegrationBuilderRoutes( docs: schema.arrayOf(schema.object({}, { unknowns: 'allow' })), }) ), - initialVersion: schema.maybe(schema.string()), }), }), }, diff --git a/x-pack/plugins/integration_assistant/server/util/async_file.ts b/x-pack/plugins/integration_assistant/server/util/async_file.ts deleted file mode 100644 index b9449ada32362..0000000000000 --- a/x-pack/plugins/integration_assistant/server/util/async_file.ts +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -// This file is created to ensure all file operations are using proper async promises throughout the plugin. -import { readdir, writeFile, mkdir, stat, readFile, cp } from 'fs/promises'; -import { dirname } from 'path'; - -export async function asyncExists(path: string): Promise { - try { - await stat(path); - return true; - } catch (error) { - if (error.code === 'ENOENT') { - return false; - } else { - throw error; - } - } -} - -export async function asyncEnsureDir(dirPath: string): Promise { - const exists = await asyncExists(dirPath); - if (!exists) { - await mkdir(dirPath, { recursive: true }); - } -} - -export async function asyncCreate(path: string, content: string | Buffer): Promise { - return await writeFile(path, content, { encoding: 'utf-8' }); -} - -export async function asyncCopy(source: string, destination: string): Promise { - try { - // Ensure the destination directory exists - await mkdir(dirname(destination), { recursive: true }); - await cp(source, destination, { recursive: true }); - } catch (error) { - return Promise.reject(error); - } -} - -export async function asyncListDir(path: string): Promise { - return await readdir(path); -} - -export async function asyncRead(path: string): Promise { - return await readFile(path, { encoding: 'utf-8' }); -} diff --git a/x-pack/plugins/integration_assistant/server/util/files.ts b/x-pack/plugins/integration_assistant/server/util/files.ts new file mode 100644 index 0000000000000..0a5c55dce2658 --- /dev/null +++ b/x-pack/plugins/integration_assistant/server/util/files.ts @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { readdirSync, writeFileSync, mkdirSync, statSync, readFileSync, cpSync } from 'fs'; +import { dirname } from 'path'; + +export function existsSync(path: string): boolean { + try { + statSync(path); + return true; + } catch (error) { + if (error.code === 'ENOENT') { + return false; + } else { + throw error; + } + } +} + +export function ensureDirSync(dirPath: string): void { + const exists = existsSync(dirPath); + if (!exists) { + mkdirSync(dirPath, { recursive: true }); + } +} + +export function createSync(path: string, content: string | Buffer): void { + writeFileSync(path, content, { encoding: 'utf-8' }); +} + +export function copySync(source: string, destination: string): void { + try { + // Ensure the destination directory exists + mkdirSync(dirname(destination), { recursive: true }); + cpSync(source, destination, { recursive: true }); + } catch (error) { + throw error; + } +} + +export function listDirSync(path: string): string[] { + return readdirSync(path); +} + +export function readSync(path: string): string { + return readFileSync(path, { encoding: 'utf-8' }); +} diff --git a/x-pack/plugins/integration_assistant/server/util/index.ts b/x-pack/plugins/integration_assistant/server/util/index.ts index a32871c5f3bdf..1e10c1bbf6258 100644 --- a/x-pack/plugins/integration_assistant/server/util/index.ts +++ b/x-pack/plugins/integration_assistant/server/util/index.ts @@ -5,14 +5,7 @@ * 2.0. */ -export { - asyncCreate, - asyncEnsureDir, - asyncCopy, - asyncRead, - asyncExists, - asyncListDir, -} from './async_file'; +export { existsSync, ensureDirSync, createSync, copySync, listDirSync, readSync } from './files'; export { generateFields, mergeSamples } from './samples'; export { deepCopy, generateUniqueId } from './util'; From b78e00a872afe30afb3a367c2e3c9162d23ccbd7 Mon Sep 17 00:00:00 2001 From: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Date: Fri, 31 May 2024 10:52:50 +0000 Subject: [PATCH 31/62] [CI] Auto-commit changed files from 'node scripts/build_plugin_list_docs' --- docs/developer/plugin-list.asciidoc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc index 178aa842fb2a8..9afe4b7270f01 100644 --- a/docs/developer/plugin-list.asciidoc +++ b/docs/developer/plugin-list.asciidoc @@ -630,6 +630,10 @@ the infrastructure monitoring use-case within Kibana. |The ingest_pipelines plugin provides Kibana support for Elasticsearch's ingest pipelines. +|{kib-repo}blob/{branch}/x-pack/plugins/integration_assistant/README.md[integrationAssistant] +|Team owner: Security Integrations Scalability + + |{kib-repo}blob/{branch}/x-pack/plugins/kubernetes_security/README.md[kubernetesSecurity] |This plugin provides interactive visualizations of your Kubernetes workload and session data. From 728a3d925e972ef12e8727a8687e0f825a60aa2d Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 13:06:55 +0200 Subject: [PATCH 32/62] removing reference to local bedrock model in tests --- docs/developer/plugin-list.asciidoc | 4 +++ .../categorization/categorization.test.ts | 9 +------ .../graphs/categorization/errors.test.ts | 9 +------ .../graphs/categorization/graph.test.ts | 20 ++------------ .../server/graphs/categorization/graph.ts | 6 ++--- .../graphs/categorization/invalid.test.ts | 9 +------ .../server/graphs/categorization/invalid.ts | 3 ++- .../graphs/categorization/review.test.ts | 10 +------ .../server/graphs/ecs/duplicates.test.ts | 9 +------ .../server/graphs/ecs/graph.test.ts | 11 ++------ .../server/graphs/ecs/invalid.test.ts | 9 +------ .../server/graphs/ecs/mapping.test.ts | 9 +------ .../server/graphs/ecs/missing.test.ts | 9 +------ .../server/graphs/related/errors.test.ts | 9 +------ .../server/graphs/related/graph.test.ts | 20 ++------------ .../server/graphs/related/graph.ts | 6 ++--- .../server/graphs/related/related.test.ts | 9 +------ .../server/graphs/related/review.test.ts | 9 +------ .../integration_assistant/server/util/es.ts | 26 ------------------- .../server/util/graph.ts | 10 +++---- .../server/util/index.ts | 1 - 21 files changed, 34 insertions(+), 173 deletions(-) delete mode 100644 x-pack/plugins/integration_assistant/server/util/es.ts diff --git a/docs/developer/plugin-list.asciidoc b/docs/developer/plugin-list.asciidoc index 178aa842fb2a8..9afe4b7270f01 100644 --- a/docs/developer/plugin-list.asciidoc +++ b/docs/developer/plugin-list.asciidoc @@ -630,6 +630,10 @@ the infrastructure monitoring use-case within Kibana. |The ingest_pipelines plugin provides Kibana support for Elasticsearch's ingest pipelines. +|{kib-repo}blob/{branch}/x-pack/plugins/integration_assistant/README.md[integrationAssistant] +|Team owner: Security Integrations Scalability + + |{kib-repo}blob/{branch}/x-pack/plugins/kubernetes_security/README.md[kubernetesSecurity] |This plugin provides interactive visualizations of your Kubernetes workload and session data. diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts index fcb5c49f60fb6..7f36323191a13 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleCategorization } from './categorization'; import { CategorizationState } from '../../types'; import { @@ -19,17 +18,11 @@ const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: CategorizationState = categorizationTestState; describe('Testing categorization handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleCategorization()', async () => { - const response = await handleCategorization(testState); + const response = await handleCategorization(testState, mockLlm); expect(response.currentPipeline).toStrictEqual( categorizationExpectedHandlerResponse.currentPipeline ); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts index 2c86001a18f3d..a85eb68ff846c 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleErrors } from './errors'; import { CategorizationState } from '../../types'; import { @@ -19,17 +18,11 @@ const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: CategorizationState = categorizationTestState; describe('Testing categorization handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleErrors()', async () => { - const response = await handleErrors(testState); + const response = await handleErrors(testState, mockLlm); expect(response.currentPipeline).toStrictEqual( categorizationExpectedHandlerResponse.currentPipeline ); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts index 227f56d184f5b..a255536a3da1f 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts @@ -8,7 +8,6 @@ import { IScopedClusterClient } from '@kbn/core/server'; import { FakeLLM } from '@langchain/core/utils/testing'; import { getCategorizationGraph } from './graph'; -import { getModel } from '../../providers/bedrock'; import { categorizationExpectedResults, categorizationErrorMockedResponse, @@ -35,23 +34,11 @@ jest.mock('./errors'); jest.mock('./review'); jest.mock('./categorization'); jest.mock('./invalid'); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); jest.mock('../../util/pipeline', () => ({ testPipeline: jest.fn(), })); -jest.mock('../../util/es', () => { - return { - ESClient: { - setClient: jest.fn(), - getClient: jest.fn(), - }, - }; -}); - describe('runCategorizationGraph', () => { const mockClient = { asCurrentUser: { @@ -69,9 +56,6 @@ describe('runCategorizationGraph', () => { const mockInvokeInvalid = jest.fn().mockResolvedValue(categorizationInvalidMockedResponse); const mockInvokeReview = jest.fn().mockResolvedValue(categorizationReviewMockedResponse); - // Return a fake LLM to prevent API calls from being made, or require API credentials - (getModel as jest.Mock).mockReturnValue(mockLlm); - // We do not care about ES in these tests, the mock is just to prevent errors. // After this is triggered, the mock of TestPipeline will trigger the expected error, to route to error handler @@ -114,14 +98,14 @@ describe('runCategorizationGraph', () => { it('Ensures that the graph compiles', async () => { try { - await getCategorizationGraph(mockClient); + await getCategorizationGraph(mockClient, mockLlm); } catch (error) { // noop } }); it('Runs the whole graph, with mocked outputs from the LLM.', async () => { - const categorizationGraph = await getCategorizationGraph(mockClient); + const categorizationGraph = await getCategorizationGraph(mockClient, mockLlm); (testPipeline as jest.Mock) .mockResolvedValueOnce(testPipelineValidResult) diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index 8e3c124ce386e..bd30e401b4f62 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -16,7 +16,6 @@ import { handleInvalidCategorization } from './invalid'; import { handleErrors } from './errors'; import { handleReview } from './review'; import { CATEGORIZATION_EXAMPLE_ANSWER, ECS_CATEGORIES, ECS_TYPES } from './constants'; -import { ESClient } from '../../util/es'; const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { @@ -146,7 +145,6 @@ function chainRouter(state: CategorizationState): string { } export async function getCategorizationGraph(client: IScopedClusterClient, model: BedrockChat) { - ESClient.setClient(client); const workflow = new StateGraph({ channels: graphState, }) @@ -155,7 +153,9 @@ export async function getCategorizationGraph(client: IScopedClusterClient, model .addNode('handleCategorization', (state: CategorizationState) => handleCategorization(state, model) ) - .addNode('handleValidatePipeline', handleValidatePipeline) + .addNode('handleValidatePipeline', (state: CategorizationState) => + handleValidatePipeline(state, client) + ) .addNode('handleCategorizationValidation', handleCategorizationValidation) .addNode('handleInvalidCategorization', (state: CategorizationState) => handleInvalidCategorization(state, model) diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts index 6d9b109dba7ec..6834aba357336 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleInvalidCategorization } from './invalid'; import { CategorizationState } from '../../types'; import { @@ -19,17 +18,11 @@ const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: CategorizationState = categorizationTestState; describe('Testing categorization handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleInvalidCategorization()', async () => { - const response = await handleInvalidCategorization(testState); + const response = await handleInvalidCategorization(testState, mockLlm); expect(response.currentPipeline).toStrictEqual( categorizationExpectedHandlerResponse.currentPipeline ); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts index fad98e7df9f87..7bc2d024bd044 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -5,13 +5,14 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; +import { BedrockChat } from '@kbn/langchain/server/language_models'; import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; import { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; import { Pipeline } from '../../../common'; -export async function handleInvalidCategorization(state: CategorizationState, model) { +export async function handleInvalidCategorization(state: CategorizationState, model: BedrockChat) { const categorizationInvalidPrompt = CATEGORIZATION_VALIDATION_PROMPT; const outputParser = new JsonOutputParser(); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts index e2b78b7f80375..66f7b5446a958 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleReview } from './review'; import { CategorizationState } from '../../types'; import { @@ -19,18 +18,11 @@ const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); - const testState: CategorizationState = categorizationTestState; describe('Testing categorization handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleReview()', async () => { - const response = await handleReview(testState); + const response = await handleReview(testState, mockLlm); expect(response.currentPipeline).toStrictEqual( categorizationExpectedHandlerResponse.currentPipeline ); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts index 7ec2386c3d8bf..20ddda95d0e97 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleDuplicates } from './duplicates'; import { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; @@ -15,17 +14,11 @@ const mockLlm = new FakeLLM({ response: '{ "message": "ll callback later."}', }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: EcsMappingState = ecsTestState; describe('Testing ecs handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleDuplicates()', async () => { - const response = await handleDuplicates(testState); + const response = await handleDuplicates(testState, mockLlm); expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); expect(response.lastExecutedChain).toBe('duplicateFields'); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts index ddeb78d54c3f7..34427d423054f 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts @@ -7,7 +7,6 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { getEcsGraph } from './graph'; -import { getModel } from '../../providers/bedrock'; import { ecsInitialMappingMockedResponse, ecsDuplicateMockedResponse, @@ -25,9 +24,6 @@ const mockLlm = new FakeLLM({ response: "I'll callback later.", }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); jest.mock('./mapping'); jest.mock('./duplicates'); jest.mock('./missing'); @@ -42,9 +38,6 @@ describe('EcsGraph', () => { const mockInvokeMissingKeys = jest.fn().mockResolvedValue(ecsMissingKeysMockedResponse); const mockInvokeInvalidEcs = jest.fn().mockResolvedValue(ecsInvalidMappingMockedResponse); - // Return a fake LLM to prevent API calls from being made, or require API credentials - (getModel as jest.Mock).mockReturnValue(mockLlm); - // Returns the initial response, with one duplicate field, to trigger the next step. (handleEcsMapping as jest.Mock).mockImplementation(async () => ({ currentMapping: await mockInvokeMapping(), @@ -72,7 +65,7 @@ describe('EcsGraph', () => { // When getEcsGraph runs, langgraph compiles the graph it will error if the graph has any issues. // Common issues for example detecting a node has no next step, or there is a infinite loop between them. try { - await getEcsGraph(); + await getEcsGraph(mockLlm); } catch (error) { fail(`getEcsGraph threw an error: ${error}`); } @@ -81,7 +74,7 @@ describe('EcsGraph', () => { // The mocked outputs are specifically crafted to trigger ALL different conditions, allowing us to test the whole graph. // This is why we have all the expects ensuring each function was called. - const ecsGraph = await getEcsGraph(); + const ecsGraph = await getEcsGraph(mockLlm); const response = await ecsGraph.invoke(mockedRequest); expect(response.results).toStrictEqual(ecsMappingExpectedResults); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts index 6e623ef8ffc2f..899498ddab517 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleInvalidEcs } from './invalid'; import { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; @@ -15,17 +14,11 @@ const mockLlm = new FakeLLM({ response: '{ "message": "ll callback later."}', }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: EcsMappingState = ecsTestState; describe('Testing ecs handlers', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleInvalidEcs()', async () => { - const response = await handleInvalidEcs(testState); + const response = await handleInvalidEcs(testState, mockLlm); expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); expect(response.lastExecutedChain).toBe('invalidEcs'); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts index 1c10c9c1a10b5..beb1778e4e57d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleEcsMapping } from './mapping'; import { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; @@ -15,17 +14,11 @@ const mockLlm = new FakeLLM({ response: '{ "message": "ll callback later."}', }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: EcsMappingState = ecsTestState; describe('Testing ecs handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleEcsMapping()', async () => { - const response = await handleEcsMapping(testState); + const response = await handleEcsMapping(testState, mockLlm); expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); expect(response.lastExecutedChain).toBe('ecsMapping'); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts index f7b6ef592b483..f9826f5757c0d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleMissingKeys } from './missing'; import { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; @@ -15,17 +14,11 @@ const mockLlm = new FakeLLM({ response: '{ "message": "ll callback later."}', }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: EcsMappingState = ecsTestState; describe('Testing ecs handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleMissingKeys()', async () => { - const response = await handleMissingKeys(testState); + const response = await handleMissingKeys(testState, mockLlm); expect(response.currentMapping).toStrictEqual({ message: 'll callback later.' }); expect(response.lastExecutedChain).toBe('missingKeys'); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts index e2d53f7368df2..572c12d050090 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleErrors } from './errors'; import { RelatedState } from '../../types'; import { @@ -19,17 +18,11 @@ const mockLlm = new FakeLLM({ response: JSON.stringify(relatedMockProcessors, null, 2), }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: RelatedState = relatedTestState; describe('Testing related handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleErrors()', async () => { - const response = await handleErrors(testState); + const response = await handleErrors(testState, mockLlm); expect(response.currentPipeline).toStrictEqual(relatedExpectedHandlerResponse.currentPipeline); expect(response.lastExecutedChain).toBe('error'); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts index 416b4d8ff23d0..3c783822d2bbf 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts @@ -8,7 +8,6 @@ import { IScopedClusterClient } from '@kbn/core/server'; import { FakeLLM } from '@langchain/core/utils/testing'; import { getRelatedGraph } from './graph'; -import { getModel } from '../../providers/bedrock'; import { relatedExpectedResults, relatedErrorMockedResponse, @@ -31,23 +30,11 @@ const mockLlm = new FakeLLM({ jest.mock('./errors'); jest.mock('./review'); jest.mock('./related'); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); jest.mock('../../util/pipeline', () => ({ testPipeline: jest.fn(), })); -jest.mock('../../util/es', () => { - return { - ESClient: { - setClient: jest.fn(), - getClient: jest.fn(), - }, - }; -}); - describe('runRelatedGraph', () => { const mockClient = { asCurrentUser: { @@ -62,9 +49,6 @@ describe('runRelatedGraph', () => { const mockInvokeError = jest.fn().mockResolvedValue(relatedErrorMockedResponse); const mockInvokeReview = jest.fn().mockResolvedValue(relatedReviewMockedResponse); - // Return a fake LLM to prevent API calls from being made, or require API credentials - (getModel as jest.Mock).mockReturnValue(mockLlm); - // After this is triggered, the mock of TestPipeline will trigger the expected error, to route to error handler (handleRelated as jest.Mock).mockImplementation(async () => ({ currentPipeline: relatedInitialPipeline, @@ -97,14 +81,14 @@ describe('runRelatedGraph', () => { it('Ensures that the graph compiles', async () => { try { - await getRelatedGraph(mockClient); + await getRelatedGraph(mockClient, mockLlm); } catch (error) { // noop } }); it('Runs the whole graph, with mocked outputs from the LLM.', async () => { - const relatedGraph = await getRelatedGraph(mockClient); + const relatedGraph = await getRelatedGraph(mockClient, mockLlm); (testPipeline as jest.Mock) .mockResolvedValueOnce(testPipelineValidResult) diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index ad3266b6e19b6..1c67bf19cc884 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -14,7 +14,6 @@ import { handleRelated } from './related'; import { handleErrors } from './errors'; import { handleReview } from './review'; import { RELATED_ECS_FIELDS, RELATED_EXAMPLE_ANSWER } from './constants'; -import { ESClient } from '../../util/es'; const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { @@ -134,12 +133,13 @@ function chainRouter(state: RelatedState): string { } export async function getRelatedGraph(client: IScopedClusterClient, model: BedrockChat) { - ESClient.setClient(client); const workflow = new StateGraph({ channels: graphState }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) .addNode('handleRelated', (state: RelatedState) => handleRelated(state, model)) - .addNode('handleValidatePipeline', handleValidatePipeline) + .addNode('handleValidatePipeline', (state: RelatedState) => + handleValidatePipeline(state, client) + ) .addNode('handleErrors', (state: RelatedState) => handleErrors(state, model)) .addNode('handleReview', (state: RelatedState) => handleReview(state, model)) .addEdge(START, 'modelInput') diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts index bc5968514bd5c..ef0b0abe27983 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleRelated } from './related'; import { RelatedState } from '../../types'; import { @@ -19,17 +18,11 @@ const mockLlm = new FakeLLM({ response: JSON.stringify(relatedMockProcessors, null, 2), }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: RelatedState = relatedTestState; describe('Testing related handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleRelated()', async () => { - const response = await handleRelated(testState); + const response = await handleRelated(testState, mockLlm); expect(response.currentPipeline).toStrictEqual(relatedExpectedHandlerResponse.currentPipeline); expect(response.lastExecutedChain).toBe('related'); }); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts index 00debae37fa4e..c39b68a7b5ad8 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts @@ -6,7 +6,6 @@ */ import { FakeLLM } from '@langchain/core/utils/testing'; -import { getModel } from '../../providers/bedrock'; import { handleReview } from './review'; import { RelatedState } from '../../types'; import { @@ -19,17 +18,11 @@ const mockLlm = new FakeLLM({ response: JSON.stringify(relatedMockProcessors, null, 2), }); -jest.mock('../../providers/bedrock', () => ({ - getModel: jest.fn(), -})); const testState: RelatedState = relatedTestState; describe('Testing related handler', () => { - beforeEach(() => { - (getModel as jest.Mock).mockReturnValue(mockLlm); - }); it('handleReview()', async () => { - const response = await handleReview(testState); + const response = await handleReview(testState, mockLlm); expect(response.currentPipeline).toStrictEqual(relatedExpectedHandlerResponse.currentPipeline); expect(response.lastExecutedChain).toBe('review'); }); diff --git a/x-pack/plugins/integration_assistant/server/util/es.ts b/x-pack/plugins/integration_assistant/server/util/es.ts deleted file mode 100644 index 089658e1171e7..0000000000000 --- a/x-pack/plugins/integration_assistant/server/util/es.ts +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; - -// Allows the initialization of the langgraph to set a reference to the context ES client. -// When handleValidatePipeline runs later, it will fetch the reference again. -export class ESClient { - private static client: IScopedClusterClient | null = null; - - public static setClient(client: IScopedClusterClient): void { - if (!this.client) { - this.client = client; - } - } - - public static getClient(): IScopedClusterClient { - if (!this.client) { - throw new Error('Elasticsearch client has not been instantiated.'); - } - return this.client; - } -} diff --git a/x-pack/plugins/integration_assistant/server/util/graph.ts b/x-pack/plugins/integration_assistant/server/util/graph.ts index 325f30b5027a9..adc2756d515a8 100644 --- a/x-pack/plugins/integration_assistant/server/util/graph.ts +++ b/x-pack/plugins/integration_assistant/server/util/graph.ts @@ -4,14 +4,14 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { EcsMappingState, CategorizationState, RelatedState } from '../types'; +import { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; +import { CategorizationState, RelatedState } from '../types'; import { testPipeline } from './pipeline'; -import { ESClient } from './es'; export async function handleValidatePipeline( - state: EcsMappingState | CategorizationState | RelatedState -): Promise | Partial | Partial> { - const client = ESClient.getClient(); + state: CategorizationState | RelatedState, + client: IScopedClusterClient +): Promise | Partial> { const results = await testPipeline(state.rawSamples, state.currentPipeline, client); return { errors: results.errors, diff --git a/x-pack/plugins/integration_assistant/server/util/index.ts b/x-pack/plugins/integration_assistant/server/util/index.ts index 1e10c1bbf6258..b84db3eee4ee7 100644 --- a/x-pack/plugins/integration_assistant/server/util/index.ts +++ b/x-pack/plugins/integration_assistant/server/util/index.ts @@ -11,4 +11,3 @@ export { generateFields, mergeSamples } from './samples'; export { deepCopy, generateUniqueId } from './util'; export { testPipeline } from './pipeline'; export { combineProcessors } from './processors'; -export { ESClient } from './es'; From a3da6136a9ab1f97f6e5dc8fb69a67513a39676c Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 13:13:14 +0200 Subject: [PATCH 33/62] fix create router type for backend --- x-pack/plugins/integration_assistant/server/plugin.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugins/integration_assistant/server/plugin.ts b/x-pack/plugins/integration_assistant/server/plugin.ts index 0a67c89813790..87f261de28c3b 100644 --- a/x-pack/plugins/integration_assistant/server/plugin.ts +++ b/x-pack/plugins/integration_assistant/server/plugin.ts @@ -44,7 +44,7 @@ export class IntegrationAssistantPlugin >('integrationAssistant', () => ({ getStartServices: core.getStartServices, })); - const router = core.http.createRouter(); + const router = core.http.createRouter(); this.logger.debug('integrationAssistant api: Setup'); registerRoutes(router); From 71725fd162301eaf1dd9d298e37dc5c94249adbd Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 13:46:31 +0200 Subject: [PATCH 34/62] renamed all template files to pass linter checks --- .../server/integration_builder/agent.ts | 5 +- .../integration_builder/build_integration.ts | 2 +- .../server/integration_builder/data_stream.ts | 51 +++++++++---------- .../server/integration_builder/dev_folders.ts | 17 ++++--- .../server/integration_builder/fields.ts | 25 ++++----- .../server/integration_builder/pipeline.ts | 6 +-- ...udwatch.yml.hbs => aws_cloudwatch.yml.hbs} | 0 .../agent/{aws-s3.yml.hbs => aws_s3.yml.hbs} | 0 ...age.yml.hbs => azure_blob_storage.yml.hbs} | 0 ...venthub.yml.hbs => azure_eventhub.yml.hbs} | 0 ...{gcp-pubsub.yml.hbs => gcp_pubsub.yml.hbs} | 0 ...ase-fields.yml.njk => base_fields.yml.njk} | 0 ...ml.njk => aws_cloudwatch_manifest.yml.njk} | 0 ...st.yml.njk => aws_s3.yml_manifest.yml.njk} | 0 ...jk => azure_blob_storage_manifest.yml.njk} | 0 ...ml.njk => azure_eventhub_manifest.yml.njk} | 0 ...st.yml.njk => gcp_pubsub_manifest.yml.njk} | 0 ...mmon-config.yml => test_common_config.yml} | 0 .../{README.md.njk => readme.md.njk} | 0 ...compose.yml.njk => docker_compose.yml.njk} | 0 ...-filestream.njk => service_filestream.njk} | 0 .../{service-gcs.njk => service_gcs.njk} | 0 ...ervice-logfile.njk => service_logfile.njk} | 0 .../{service-tcp.njk => service_tcp.njk} | 0 .../{service-udp.njk => service_udp.njk} | 0 ...yml.njk => test_filestream_config.yml.njk} | 0 ...config.yml.njk => test_gcs_config.yml.njk} | 0 ...ig.yml.njk => test_logfile_config.yml.njk} | 0 ...config.yml.njk => test_tcp_config.yml.njk} | 0 ...config.yml.njk => test_udp_config.yml.njk} | 0 30 files changed, 52 insertions(+), 54 deletions(-) rename x-pack/plugins/integration_assistant/server/templates/agent/{aws-cloudwatch.yml.hbs => aws_cloudwatch.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{aws-s3.yml.hbs => aws_s3.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{azure-blob-storage.yml.hbs => azure_blob_storage.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{azure-eventhub.yml.hbs => azure_eventhub.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/agent/{gcp-pubsub.yml.hbs => gcp_pubsub.yml.hbs} (100%) rename x-pack/plugins/integration_assistant/server/templates/{base-fields.yml.njk => base_fields.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{aws-cloudwatch_manifest.yml.njk => aws_cloudwatch_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{aws-s3.yml_manifest.yml.njk => aws_s3.yml_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{azure-blob-storage_manifest.yml.njk => azure_blob_storage_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{azure-eventhub_manifest.yml.njk => azure_eventhub_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/manifest/{gcp-pubsub_manifest.yml.njk => gcp_pubsub_manifest.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/pipeline_tests/{test-common-config.yml => test_common_config.yml} (100%) rename x-pack/plugins/integration_assistant/server/templates/{README.md.njk => readme.md.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{docker-compose.yml.njk => docker_compose.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{service-filestream.njk => service_filestream.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{service-gcs.njk => service_gcs.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{service-logfile.njk => service_logfile.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{service-tcp.njk => service_tcp.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{service-udp.njk => service_udp.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{test-filestream-config.yml.njk => test_filestream_config.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{test-gcs-config.yml.njk => test_gcs_config.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{test-logfile-config.yml.njk => test_logfile_config.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{test-tcp-config.yml.njk => test_tcp_config.yml.njk} (100%) rename x-pack/plugins/integration_assistant/server/templates/system_tests/{test-udp-config.yml.njk => test_udp_config.yml.njk} (100%) diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts index f6e98562731e1..dac7d8fd1ed81 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts @@ -19,7 +19,10 @@ export function createAgentInput(specificDataStreamDir: string, inputTypes: Inpu const commonFile = readSync(commonFilePath); for (const inputType of inputTypes) { - const inputTypeFilePath = joinPath(agentTemplatesDir, `${inputType}.yml.hbs`); + const inputTypeFilePath = joinPath( + agentTemplatesDir, + `${inputType.replaceAll('-', '_')}.yml.hbs` + ); const inputTypeFile = readSync(inputTypeFilePath); const combinedContents = `${inputTypeFile}\n${commonFile}`; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts index f7af9d5cd109d..2d416ee00a691 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -95,7 +95,7 @@ function createChangelog(packageDir: string): void { function createReadme(packageDir: string, integration: Integration) { const readmeDirPath = joinPath(packageDir, '_dev/build/docs/'); ensureDirSync(readmeDirPath); - const readmeTemplate = nunjucks.render('README.md.njk', { + const readmeTemplate = nunjucks.render('readme.md.njk', { package_name: integration.name, data_streams: integration.dataStreams, }); diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts index 8e80a1d4648dc..6b99de53f0917 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts @@ -8,26 +8,21 @@ import { join as joinPath } from 'path'; import nunjucks from 'nunjucks'; import { DataStream } from '../../common'; -import { asyncCopy, asyncEnsureDir, asyncCreate, asyncListDir } from '../util'; +import { copySync, ensureDirSync, createSync, listDirSync } from '../util'; -export async function createDatastream( +export function createDatastream( packageName: string, specificDataStreamDir: string, dataStream: DataStream -): Promise { +): void { const dataStreamName = dataStream.name; const pipelineDir = joinPath(specificDataStreamDir, 'elasticsearch', 'ingest_pipeline'); const title = dataStream.title; const description = dataStream.description; - await asyncEnsureDir(specificDataStreamDir); - await createDataStreamFolders(specificDataStreamDir, pipelineDir); - await createPipelineTests( - specificDataStreamDir, - dataStream.rawSamples, - packageName, - dataStreamName - ); + ensureDirSync(specificDataStreamDir); + createDataStreamFolders(specificDataStreamDir, pipelineDir); + createPipelineTests(specificDataStreamDir, dataStream.rawSamples, packageName, dataStreamName); const dataStreams: string[] = []; for (const inputType of dataStream.inputTypes) { @@ -37,7 +32,10 @@ export async function createDatastream( package_name: packageName, data_stream_name: dataStreamName, }; - const dataStreamManifest = nunjucks.render(`${inputType}_manifest.yml.njk`, mappedValues); + const dataStreamManifest = nunjucks.render( + `${inputType.replaceAll('-', '_')}_manifest.yml.njk`, + mappedValues + ); const commonManifest = nunjucks.render('common_manifest.yml.njk', mappedValues); const combinedManifest = `${dataStreamManifest}\n${commonManifest}`; @@ -59,43 +57,40 @@ export async function createDatastream( data_streams: dataStreams, }); - await asyncCreate(joinPath(specificDataStreamDir, 'manifest.yml'), finalManifest); + createSync(joinPath(specificDataStreamDir, 'manifest.yml'), finalManifest); } -async function createDataStreamFolders( - specificDataStreamDir: string, - pipelineDir: string -): Promise { +function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): void { const dataStreamTemplatesDir = joinPath(__dirname, '../templates/data_stream'); try { - const items = await asyncListDir(dataStreamTemplatesDir); + const items = listDirSync(dataStreamTemplatesDir); for (const item of items) { const s = joinPath(dataStreamTemplatesDir, item); const d = joinPath(specificDataStreamDir, item); - await asyncCopy(s, d); + copySync(s, d); } - await asyncEnsureDir(pipelineDir); + ensureDirSync(pipelineDir); } catch (error) { throw error; } } -async function createPipelineTests( +function createPipelineTests( specificDataStreamDir: string, rawSamples: string[], packageName: string, dataStreamName: string -): Promise { +): void { const pipelineTestTemplatesDir = joinPath(__dirname, '../templates/pipeline_tests'); const pipelineTestsDir = joinPath(specificDataStreamDir, '_dev/test/pipeline'); - await asyncEnsureDir(pipelineTestsDir); - const items = await asyncListDir(pipelineTestTemplatesDir); + ensureDirSync(pipelineTestsDir); + const items = listDirSync(pipelineTestTemplatesDir); for (const item of items) { const s = joinPath(pipelineTestTemplatesDir, item); - const d = joinPath(pipelineTestsDir, item); - await asyncCopy(s, d); + const d = joinPath(pipelineTestsDir, item.replaceAll('_', '-')); + copySync(s, d); } const formattedPackageName = packageName.replace(/_/g, '-'); const formattedDataStreamName = dataStreamName.replace(/_/g, '-'); @@ -103,7 +98,7 @@ async function createPipelineTests( pipelineTestsDir, `test-${formattedPackageName}-${formattedDataStreamName}.log` ); - await asyncCreate(testFileName, rawSamples.join('\n')); + createSync(testFileName, rawSamples.join('\n')); } // We are skipping this one for now, as its not really needed for custom integrations @@ -123,7 +118,7 @@ async function createPipelineTests( fs.mkdirSync(systemTestFolder, { recursive: true }); - const systemTestTemplate = env.getTemplate(`test-${inputType}-config.yml.njk`); + const systemTestTemplate = env.getTemplate(`test_${inputType.replaceAll('-', '_')}_config.yml.njk`); const systemTestRendered = systemTestTemplate.render(mappedValues); const systemTestFileName = joinPath(systemTestFolder, `test-${inputType}-config.yml`); diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts index 3277cbc590035..6f3edf3feaa1a 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts @@ -8,12 +8,12 @@ import { join as joinPath } from 'path'; import nunjucks from 'nunjucks'; import { Integration } from '../../common'; -import { asyncEnsureDir, asyncCreate } from '../util'; +import { ensureDirSync, createSync } from '../util'; -export async function createPackageSystemTests(integrationDir: string, integration: Integration) { +export function createPackageSystemTests(integrationDir: string, integration: Integration) { const systemTestsDockerDir = joinPath(integrationDir, '_dev/deploy/docker/'); const systemTestsSamplesDir = joinPath(systemTestsDockerDir, 'sample_logs'); - await asyncEnsureDir(systemTestsSamplesDir); + ensureDirSync(systemTestsSamplesDir); const streamVersion = '0.13.0'; const dockerComposeVersion = '2.3'; @@ -27,7 +27,7 @@ export async function createPackageSystemTests(integrationDir: string, integrati `test-${packageName}-${dataStreamName}.log` ); const rawSamplesContent = stream.rawSamples.join('\n'); - await asyncCreate(systemTestFileName, rawSamplesContent); + createSync(systemTestFileName, rawSamplesContent); for (const inputType of stream.inputTypes) { const mappedValues = { @@ -35,16 +35,19 @@ export async function createPackageSystemTests(integrationDir: string, integrati data_stream_name: dataStreamName, stream_version: streamVersion, }; - const renderedService = nunjucks.render(`service-${inputType}.njk`, mappedValues); + const renderedService = nunjucks.render( + `service_${inputType.replaceAll('_', '-')}.njk`, + mappedValues + ); dockerServices.push(renderedService); } } - const renderedDockerCompose = nunjucks.render('docker-compose.yml.njk', { + const renderedDockerCompose = nunjucks.render('docker_compose.yml.njk', { services: dockerServices.join('\n'), docker_compose_version: dockerComposeVersion, }); const dockerComposeFileName = joinPath(systemTestsDockerDir, 'docker-compose.yml'); - await asyncCreate(dockerComposeFileName, renderedDockerCompose); + createSync(dockerComposeFileName, renderedDockerCompose); } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts index d420548276d5d..674b4871626ac 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts @@ -7,41 +7,38 @@ import nunjucks from 'nunjucks'; -import { generateFields, mergeSamples, asyncCreate } from '../util'; +import { generateFields, mergeSamples, createSync } from '../util'; interface Doc { [key: string]: any; } -export async function createFieldMapping( +export function createFieldMapping( packageName: string, dataStreamName: string, specificDataStreamDir: string, docs: Doc[] -): Promise { - await createBaseFields(specificDataStreamDir, packageName, dataStreamName); - await createCustomFields(specificDataStreamDir, docs); +): void { + createBaseFields(specificDataStreamDir, packageName, dataStreamName); + createCustomFields(specificDataStreamDir, docs); } -async function createBaseFields( +function createBaseFields( specificDataStreamDir: string, packageName: string, dataStreamName: string -): Promise { +): void { const datasetName = `${packageName}.${dataStreamName}`; - const baseFields = nunjucks.render('base-fields.yml.njk', { + const baseFields = nunjucks.render('base_fields.yml.njk', { module: packageName, dataset: datasetName, }); - await asyncCreate(`${specificDataStreamDir}/base-fields.yml`, baseFields); + createSync(`${specificDataStreamDir}/base-fields.yml`, baseFields); } -async function createCustomFields( - specificDataStreamDir: string, - pipelineResults: Doc[] -): Promise { +function createCustomFields(specificDataStreamDir: string, pipelineResults: Doc[]): void { const mergedResults = mergeSamples(pipelineResults); const fieldKeys = generateFields(mergedResults); - await asyncCreate(`${specificDataStreamDir}/fields/fields.yml`, fieldKeys); + createSync(`${specificDataStreamDir}/fields/fields.yml`, fieldKeys); } diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts b/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts index e3f8726ae69de..1194b6a536535 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts @@ -6,10 +6,10 @@ */ import { join as joinPath } from 'path'; import yaml from 'js-yaml'; -import { asyncCreate } from '../util'; +import { createSync } from '../util'; -export async function createPipeline(specificDataStreamDir: string, pipeline: object) { +export function createPipeline(specificDataStreamDir: string, pipeline: object): void { const filePath = joinPath(specificDataStreamDir, 'elasticsearch/ingest_pipeline/default.yml'); const yamlContent = '---\n' + yaml.dump(pipeline, { sortKeys: false }); - await asyncCreate(filePath, yamlContent); + createSync(filePath, yamlContent); } diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.hbs b/x-pack/plugins/integration_assistant/server/templates/agent/aws_cloudwatch.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/aws-cloudwatch.yml.hbs rename to x-pack/plugins/integration_assistant/server/templates/agent/aws_cloudwatch.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.hbs b/x-pack/plugins/integration_assistant/server/templates/agent/aws_s3.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/aws-s3.yml.hbs rename to x-pack/plugins/integration_assistant/server/templates/agent/aws_s3.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.hbs b/x-pack/plugins/integration_assistant/server/templates/agent/azure_blob_storage.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/azure-blob-storage.yml.hbs rename to x-pack/plugins/integration_assistant/server/templates/agent/azure_blob_storage.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.hbs b/x-pack/plugins/integration_assistant/server/templates/agent/azure_eventhub.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/azure-eventhub.yml.hbs rename to x-pack/plugins/integration_assistant/server/templates/agent/azure_eventhub.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.hbs b/x-pack/plugins/integration_assistant/server/templates/agent/gcp_pubsub.yml.hbs similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/agent/gcp-pubsub.yml.hbs rename to x-pack/plugins/integration_assistant/server/templates/agent/gcp_pubsub.yml.hbs diff --git a/x-pack/plugins/integration_assistant/server/templates/base-fields.yml.njk b/x-pack/plugins/integration_assistant/server/templates/base_fields.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/base-fields.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/base_fields.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/aws-cloudwatch_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/aws_cloudwatch_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/aws-cloudwatch_manifest.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/aws_cloudwatch_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/aws-s3.yml_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/aws_s3.yml_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/aws-s3.yml_manifest.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/aws_s3.yml_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/azure-blob-storage_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/azure_blob_storage_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/azure-blob-storage_manifest.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/azure_blob_storage_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/azure-eventhub_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/azure_eventhub_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/azure-eventhub_manifest.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/azure_eventhub_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/manifest/gcp-pubsub_manifest.yml.njk b/x-pack/plugins/integration_assistant/server/templates/manifest/gcp_pubsub_manifest.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/manifest/gcp-pubsub_manifest.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/manifest/gcp_pubsub_manifest.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test-common-config.yml b/x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test_common_config.yml similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test-common-config.yml rename to x-pack/plugins/integration_assistant/server/templates/pipeline_tests/test_common_config.yml diff --git a/x-pack/plugins/integration_assistant/server/templates/README.md.njk b/x-pack/plugins/integration_assistant/server/templates/readme.md.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/README.md.njk rename to x-pack/plugins/integration_assistant/server/templates/readme.md.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/docker-compose.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/docker_compose.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/docker-compose.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/docker_compose.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-filestream.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service_filestream.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/service-filestream.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/service_filestream.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-gcs.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service_gcs.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/service-gcs.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/service_gcs.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-logfile.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service_logfile.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/service-logfile.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/service_logfile.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-tcp.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service_tcp.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/service-tcp.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/service_tcp.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/service-udp.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/service_udp.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/service-udp.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/service_udp.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-filestream-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test_filestream_config.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/test-filestream-config.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/test_filestream_config.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-gcs-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test_gcs_config.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/test-gcs-config.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/test_gcs_config.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-logfile-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test_logfile_config.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/test-logfile-config.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/test_logfile_config.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-tcp-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test_tcp_config.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/test-tcp-config.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/test_tcp_config.yml.njk diff --git a/x-pack/plugins/integration_assistant/server/templates/system_tests/test-udp-config.yml.njk b/x-pack/plugins/integration_assistant/server/templates/system_tests/test_udp_config.yml.njk similarity index 100% rename from x-pack/plugins/integration_assistant/server/templates/system_tests/test-udp-config.yml.njk rename to x-pack/plugins/integration_assistant/server/templates/system_tests/test_udp_config.yml.njk From 4242078e21454713308ab98ed4c36974d8f79d7f Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 15:08:32 +0200 Subject: [PATCH 35/62] revert commented out request validation --- .../actions/server/sub_action_framework/sub_action_connector.ts | 2 +- .../server/routes/post_actions_connector_execute.ts | 2 -- .../search_playground/server/lib/conversational_chain.test.ts | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts b/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts index c5a6477d03e80..19cc7e90d6254 100644 --- a/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts +++ b/x-pack/plugins/actions/server/sub_action_framework/sub_action_connector.ts @@ -162,7 +162,7 @@ export abstract class SubActionConnector { timeout, }); - // this.validateResponse(responseSchema, res.data); + this.validateResponse(responseSchema, res.data); return res; } catch (error) { diff --git a/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts b/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts index 981e0cb61de81..2d53106bacf13 100644 --- a/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts +++ b/x-pack/plugins/elastic_assistant/server/routes/post_actions_connector_execute.ts @@ -5,8 +5,6 @@ * 2.0. */ -/* eslint-disable complexity */ - import { IRouter, Logger } from '@kbn/core/server'; import { transformError } from '@kbn/securitysolution-es-utils'; import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; diff --git a/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts b/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts index 8d67f6f03b8d1..73601a8fdf47c 100644 --- a/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts +++ b/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts @@ -9,7 +9,7 @@ import type { Client } from '@elastic/elasticsearch'; import { createAssist as Assist } from '../utils/assist'; import { ConversationalChain } from './conversational_chain'; import { FakeListChatModel } from '@langchain/core/utils/testing'; -import { FakeListLLM } from 'langchain/llms/fake'; +import { FakeListLLM } from '@langchain/core/utils/testing'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; import { Message } from 'ai'; From 62668fef1bca465d3df75b31f7ac1fc030bee450 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Fri, 31 May 2024 17:01:55 +0200 Subject: [PATCH 36/62] fix sending object instead of string to missing keys prompt --- .../integration_assistant/server/graphs/ecs/constants.ts | 2 +- .../plugins/integration_assistant/server/graphs/ecs/missing.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts index cb5f2e34d6076..2c55087fcf952 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts @@ -1844,8 +1844,8 @@ export const ECS_EXAMPLE_ANSWER = { confidence: 0.85, type: 'date', date_formats: ['UNIX'], - version: null, }, + version: null, event: { DeviceId: null, CustomerId: null, diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts index bfb83f94f9b0d..e1fd5996e9da1 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts @@ -16,7 +16,7 @@ export async function handleMissingKeys(state: EcsMappingState, model: BedrockCh const currentMapping = await ecsMissingGraph.invoke({ ecs: state.ecs, - current_mapping: state.currentMapping, + current_mapping: JSON.stringify(state.currentMapping, null, 2), ex_answer: state.exAnswer, formatted_samples: state.formattedSamples, missing_keys: state?.missingKeys, From d238614874eeb26d7178d15a090cbe951357fc3c Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Mon, 3 Jun 2024 14:49:16 +0200 Subject: [PATCH 37/62] Skip fields_to_remove if an empty array --- .../integration_assistant/server/graphs/ecs/pipeline.ts | 4 ++-- .../integration_assistant/server/templates/pipeline.yml.njk | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts index 05d54e71fa624..0c17d56157814 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -7,8 +7,8 @@ import { load } from 'js-yaml'; import { Environment, FileSystemLoader } from 'nunjucks'; import { join as joinPath } from 'path'; -import { ECS_TYPES } from './constants'; import { EcsMappingState } from '../../types'; +import { ECS_TYPES } from './constants'; interface IngestPipeline { [key: string]: any; @@ -155,7 +155,7 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { const processors = generateProcessors(state.currentMapping, samples); // Retrieve all source field names from convert processors to populate single remove processor: - const fieldsToRemove = processors.filter((p: any) => p.convert).map((p: any) => p.convert.field); + const fieldsToRemove = processors.map((p: any) => p.convert?.field).filter((f: any) => f != null); const mappedValues = { processors, ecs_version: state.ecsVersion, diff --git a/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk b/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk index d9096154595da..ba4bee23cf633 100644 --- a/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk +++ b/x-pack/plugins/integration_assistant/server/templates/pipeline.yml.njk @@ -108,7 +108,7 @@ processors: tag: rename_destination_as_organization_name target_field: destination.as.organization.name ignore_missing: true -{% if fields_to_remove %} +{% if fields_to_remove.length > 0 %} - remove: field: {% for field in fields_to_remove %} From a568780c1fc2705f61e8fbefc9d51ea62f5dc408 Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Mon, 3 Jun 2024 20:31:22 +0200 Subject: [PATCH 38/62] fix --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 41e94410ccaea..6d4d5ab996969 100644 --- a/package.json +++ b/package.json @@ -1748,4 +1748,4 @@ "zod-to-json-schema": "^3.22.3" }, "packageManager": "yarn@1.22.21" -} +} \ No newline at end of file From b79d05c3481054b6c75473b234464a4d06e165f9 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Tue, 4 Jun 2024 09:03:40 +0200 Subject: [PATCH 39/62] Update docs --- x-pack/plugins/integration_assistant/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugins/integration_assistant/README.md b/x-pack/plugins/integration_assistant/README.md index 17b2b8fd46a26..7d7d8bc7ecf5f 100644 --- a/x-pack/plugins/integration_assistant/README.md +++ b/x-pack/plugins/integration_assistant/README.md @@ -14,7 +14,7 @@ Exposes 4 API's that can be consumed by any frontend plugin, which are: - Categorization API - Related Fields API - Build Integration API -- Optional Test Pipeline API (Used to update pipeline results if the ingest pipeline changes by a user in the UI). +- Optional Test Pipeline API (Used to update pipeline results if the ingest pipeline is changed by a user in the UI). ## Development @@ -45,15 +45,15 @@ All routes are defined under `./server/routes` in its own file, and then include **Integration Builder** The integration builder is the last step in the expected API flow (ECS Mapping -> Categorization -> Related Fields -> Integration Builder). -With the provided package and data stream details, a optional logo and a list of sample logs, the API will build out the entire folder structure and files required for the integration package, archive it and return it as a `Buffer`. +With the provided package and data stream details, an optional logo and a list of sample logs, the API will build out the entire folder structure and files required for the integration package, archive it and return it as a `Buffer`. **Templates** -Currently the templates are stored as nunjucks files as they were converted from jinja2 templates, which uses the exact same format. Longer term this will most likely be switched to the Kibana forked Handlebars templating engine. +Currently the templates are stored as `nunjucks` files as they were converted from `jinja2` templates, which use the exact same format. Longer term this will most likely be switched to the Kibana forked Handlebars templating engine. The templates are stored in the `./server/templates` directory and are used to generate the integration package files while running the Integration Builder API. -One template (pipeline.yml.njk) is used by the ECS Mapping API to generate provide the boilerplate ingest pipeline structure we want to use for all generated integrations. +One template (pipeline.yml.njk) is used by the ECS Mapping API to generate the boilerplate ingest pipeline structure we want to use for all generated integrations. ## Tests From ead0fd69107e0eeb4f3aca9f73e3e55af6f83a5f Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Tue, 4 Jun 2024 10:50:47 +0200 Subject: [PATCH 40/62] Update yarn.lock --- yarn.lock | 27 +++++---------------------- 1 file changed, 5 insertions(+), 22 deletions(-) diff --git a/yarn.lock b/yarn.lock index 9febd5f7b8621..5d15224196da7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -14968,16 +14968,11 @@ common-path-prefix@^3.0.0: resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== -common-tags@^1.4.0: +common-tags@^1.4.0, common-tags@^1.8.0: version "1.8.2" resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== -common-tags@^1.8.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" - integrity sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw== - commondir@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" @@ -22783,16 +22778,11 @@ loglevel-plugin-prefix@^0.8.4: resolved "https://registry.yarnpkg.com/loglevel-plugin-prefix/-/loglevel-plugin-prefix-0.8.4.tgz#2fe0e05f1a820317d98d8c123e634c1bd84ff644" integrity sha512-WpG9CcFAOjz/FtNht+QJeGpvVl/cdR6P0z6OcXSkr8wFJOsV2GRj2j10JLfjuA4aYkcKCNIEqRGCyTife9R8/g== -loglevel@^1.4.1: +loglevel@^1.4.1, loglevel@^1.6.0: version "1.9.1" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.9.1.tgz#d63976ac9bcd03c7c873116d41c2a85bafff1be7" integrity sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg== -loglevel@^1.6.0: - version "1.6.8" - resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171" - integrity sha512-bsU7+gc9AJ2SqpzxwU3+1fedl8zAntbtC5XYlt3s2j1hJcn2PsXSmgN8TaLG/J1/2mod4+cE/3vNL70/c1RNCA== - lolex@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/lolex/-/lolex-4.2.0.tgz#ddbd7f6213ca1ea5826901ab1222b65d714b3cd7" @@ -23563,7 +23553,7 @@ minimatch@5.0.1: dependencies: brace-expansion "^2.0.1" -minimatch@9.0.3: +minimatch@9.0.3, minimatch@^9.0.1: version "9.0.3" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== @@ -23584,13 +23574,6 @@ minimatch@^5.0.1, minimatch@^5.1.0: dependencies: brace-expansion "^2.0.1" -minimatch@^9.0.1: - version "9.0.1" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.1.tgz#8a555f541cf976c622daf078bb28f29fb927c253" - integrity sha512-0jWhJpD/MdhPXwPuiRkCbfYfSKp2qnn2eOc279qI7f+osl/l+prKSrvhg157zSYvx/1nmgn2NqdT6k2Z7zSH9w== - dependencies: - brace-expansion "^2.0.1" - minimist-options@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" @@ -28708,7 +28691,7 @@ semver@7.5.4: dependencies: lru-cache "^6.0.0" -semver@7.6.0, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.0, semver@^7.5.2, semver@^7.5.3, semver@^7.5.4: +semver@7.6.0: version "7.6.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d" integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg== @@ -28720,7 +28703,7 @@ semver@^6.0.0, semver@^6.1.0, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.6: +semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.6, semver@^7.3.7, semver@^7.5.0, semver@^7.5.2, semver@^7.5.3, semver@^7.5.4: version "7.6.2" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13" integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w== From 9fa57d2665989ebad53fcc58a023acd1f1e814fb Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Tue, 4 Jun 2024 13:46:15 +0200 Subject: [PATCH 41/62] fix changed FakeListLLM to FakeStreamingLLM --- .../server/lib/conversational_chain.test.ts | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts b/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts index f02b8d235a1db..9441ad34fab5b 100644 --- a/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts +++ b/x-pack/plugins/search_playground/server/lib/conversational_chain.test.ts @@ -6,13 +6,12 @@ */ import type { Client } from '@elastic/elasticsearch'; -import { createAssist as Assist } from '../utils/assist'; -import { clipContext, ConversationalChain } from './conversational_chain'; -import { FakeListChatModel } from '@langchain/core/utils/testing'; -import { FakeListLLM } from '@langchain/core/utils/testing'; import { BaseChatModel } from '@langchain/core/language_models/chat_models'; -import { experimental_StreamData, Message } from 'ai'; import { ChatPromptTemplate } from '@langchain/core/prompts'; +import { FakeListChatModel, FakeStreamingLLM } from '@langchain/core/utils/testing'; +import { Message, experimental_StreamData } from 'ai'; +import { createAssist as Assist } from '../utils/assist'; +import { ConversationalChain, clipContext } from './conversational_chain'; describe('conversational chain', () => { const createTestChain = async ({ @@ -76,7 +75,7 @@ describe('conversational chain', () => { ? new FakeListChatModel({ responses, }) - : new FakeListLLM({ responses }); + : new FakeStreamingLLM({ responses }); const aiClient = Assist({ es_client: mockElasticsearchClient as unknown as Client, From a3aac1f7404b58ea764e7f05c729906a61fbd263 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Tue, 4 Jun 2024 18:54:42 +0200 Subject: [PATCH 42/62] Add integration_assistant to eslint --- .eslintrc.js | 29 ++++++++++++++++++++++++++++- package.json | 2 +- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index d7956c1590688..023be3da75e9c 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1007,6 +1007,29 @@ module.exports = { }, }, + /** + * Integration assistant overrides + */ + { + // front end and common typescript and javascript files only + files: [ + 'x-pack/plugins/integration_assistant/public/**/*.{js,mjs,ts,tsx}', + 'x-pack/plugins/integration_assistant/common/**/*.{js,mjs,ts,tsx}', + ], + rules: { + 'import/no-nodejs-modules': 'error', + 'no-duplicate-imports': 'off', + '@typescript-eslint/no-duplicate-imports': 'error', + 'no-restricted-imports': [ + 'error', + { + // prevents UI code from importing server side code and then webpack including it when doing builds + patterns: ['**/server/*'], + }, + ], + }, + }, + /** * ML overrides */ @@ -1071,6 +1094,7 @@ module.exports = { files: [ 'x-pack/plugins/ecs_data_quality_dashboard/**/*.{ts,tsx}', 'x-pack/plugins/elastic_assistant/**/*.{ts,tsx}', + 'x-pack/plugins/integration_assistant/**/*.{ts,tsx}', 'x-pack/packages/kbn-elastic-assistant/**/*.{ts,tsx}', 'x-pack/packages/kbn-elastic-assistant-common/**/*.{ts,tsx}', 'x-pack/packages/kbn-langchain/**/*.{ts,tsx}', @@ -1085,6 +1109,7 @@ module.exports = { excludedFiles: [ 'x-pack/plugins/ecs_data_quality_dashboard/**/*.{test,mock,test_helper}.{ts,tsx}', 'x-pack/plugins/elastic_assistant/**/*.{test,mock,test_helper}.{ts,tsx}', + 'x-pack/plugins/integration_assistant/**/*.{test,mock,test_helper}.{ts,tsx}', 'x-pack/packages/kbn-elastic-assistant/**/*.{test,mock,test_helper}.{ts,tsx}', 'x-pack/packages/kbn-elastic-assistant-common/**/*.{test,mock,test_helper}.{ts,tsx}', 'x-pack/packages/kbn-langchain/**/*.{test,mock,test_helper}.{ts,tsx}', @@ -1105,6 +1130,7 @@ module.exports = { files: [ 'x-pack/plugins/ecs_data_quality_dashboard/**/*.{ts,tsx}', 'x-pack/plugins/elastic_assistant/**/*.{ts,tsx}', + 'x-pack/plugins/integration_assistant/**/*.{ts,tsx}', 'x-pack/packages/kbn-elastic-assistant/**/*.{ts,tsx}', 'x-pack/packages/kbn-elastic-assistant-common/**/*.{ts,tsx}', 'x-pack/packages/kbn-langchain/**/*.{ts,tsx}', @@ -1144,6 +1170,7 @@ module.exports = { files: [ 'x-pack/plugins/ecs_data_quality_dashboard/**/*.{js,mjs,ts,tsx}', 'x-pack/plugins/elastic_assistant/**/*.{js,mjs,ts,tsx}', + 'x-pack/plugins/integration_assistant/**/*.{js,mjs,ts,tsx}', 'x-pack/packages/kbn-elastic-assistant/**/*.{js,mjs,ts,tsx}', 'x-pack/packages/kbn-elastic-assistant-common/**/*.{js,mjs,ts,tsx}', 'x-pack/packages/kbn-langchain/**/*.{js,mjs,ts,tsx}', @@ -1248,7 +1275,7 @@ module.exports = { files: [ 'x-pack/packages/security-solution/features/**/*.{js,mjs,ts,tsx}', 'x-pack/packages/security-solution/navigation/**/*.{js,mjs,ts,tsx}', - 'x-pack/plugins/security_solution/**/*.{js,mjs,ts,tsx}', + 'x-pack/plugins/integration_assistant/**/*.{js,mjs,ts,tsx}', 'x-pack/plugins/security_solution_ess/**/*.{js,mjs,ts,tsx}', 'x-pack/plugins/security_solution_serverless/**/*.{js,mjs,ts,tsx}', 'x-pack/plugins/cases/**/*.{js,mjs,ts,tsx}', diff --git a/package.json b/package.json index 9138dccdd227f..037002417093d 100644 --- a/package.json +++ b/package.json @@ -1748,4 +1748,4 @@ "zod-to-json-schema": "^3.22.3" }, "packageManager": "yarn@1.22.21" -} \ No newline at end of file +} From 1b23cd22190e723a977d1b6315be2637af4c4b4a Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Tue, 4 Jun 2024 19:52:07 +0200 Subject: [PATCH 43/62] Fix files with eslint issues --- package.json | 2 +- .../plugins/integration_assistant/README.md | 4 +- .../integration_assistant/common/types.ts | 2 +- .../integration_assistant/kibana.jsonc | 24 ++------ .../categorization/categorization.test.ts | 2 +- .../graphs/categorization/categorization.ts | 8 +-- .../graphs/categorization/errors.test.ts | 2 +- .../server/graphs/categorization/errors.ts | 10 ++-- .../graphs/categorization/graph.test.ts | 2 +- .../server/graphs/categorization/graph.ts | 7 ++- .../graphs/categorization/invalid.test.ts | 2 +- .../server/graphs/categorization/invalid.ts | 10 ++-- .../graphs/categorization/review.test.ts | 2 +- .../server/graphs/categorization/review.ts | 8 +-- .../server/graphs/categorization/validate.ts | 2 +- .../server/graphs/ecs/constants.ts | 2 +- .../server/graphs/ecs/duplicates.test.ts | 2 +- .../server/graphs/ecs/duplicates.ts | 4 +- .../server/graphs/ecs/graph.ts | 7 ++- .../server/graphs/ecs/invalid.test.ts | 2 +- .../server/graphs/ecs/invalid.ts | 4 +- .../server/graphs/ecs/mapping.test.ts | 2 +- .../server/graphs/ecs/mapping.ts | 4 +- .../server/graphs/ecs/missing.test.ts | 2 +- .../server/graphs/ecs/missing.ts | 4 +- .../server/graphs/ecs/pipeline.ts | 59 +++++++++---------- .../server/graphs/ecs/validate.ts | 7 ++- .../server/graphs/related/errors.test.ts | 2 +- .../server/graphs/related/errors.ts | 10 ++-- .../server/graphs/related/graph.test.ts | 2 +- .../server/graphs/related/graph.ts | 7 ++- .../server/graphs/related/related.test.ts | 2 +- .../server/graphs/related/related.ts | 10 ++-- .../server/graphs/related/review.test.ts | 2 +- .../server/graphs/related/review.ts | 10 ++-- .../integration_assistant/server/index.ts | 2 +- .../server/integration_builder/agent.ts | 2 +- .../integration_builder/build_integration.ts | 2 +- .../server/integration_builder/data_stream.ts | 26 ++++---- .../server/integration_builder/dev_folders.ts | 2 +- .../server/integration_builder/fields.ts | 4 +- .../server/integration_builder/pipeline.ts | 2 +- .../integration_assistant/server/plugin.ts | 6 +- .../server/routes/build_integration_routes.ts | 4 +- .../server/routes/categorization_routes.ts | 4 +- .../server/routes/ecs_routes.ts | 6 +- .../server/routes/pipeline_routes.ts | 6 +- .../server/routes/register_routes.ts | 4 +- .../server/routes/related_routes.ts | 6 +- .../server/util/files.ts | 12 ++-- .../server/util/graph.ts | 4 +- .../server/util/processors.ts | 7 ++- .../server/util/samples.ts | 19 +++--- .../integration_assistant/tsconfig.json | 8 +-- 54 files changed, 167 insertions(+), 190 deletions(-) diff --git a/package.json b/package.json index 037002417093d..9138dccdd227f 100644 --- a/package.json +++ b/package.json @@ -1748,4 +1748,4 @@ "zod-to-json-schema": "^3.22.3" }, "packageManager": "yarn@1.22.21" -} +} \ No newline at end of file diff --git a/x-pack/plugins/integration_assistant/README.md b/x-pack/plugins/integration_assistant/README.md index 7d7d8bc7ecf5f..d05a0bf757c66 100644 --- a/x-pack/plugins/integration_assistant/README.md +++ b/x-pack/plugins/integration_assistant/README.md @@ -1,9 +1,9 @@ -Team owner: Security Integrations Scalability - # Integration Assistant ## Overview +Team owner: Security Integrations Scalability + This is a new Kibana plugin created to help users with automatically generating integration packages based on provided log samples and relevant information ## Features diff --git a/x-pack/plugins/integration_assistant/common/types.ts b/x-pack/plugins/integration_assistant/common/types.ts index 41e15c1d0a0c3..50da197460e1e 100644 --- a/x-pack/plugins/integration_assistant/common/types.ts +++ b/x-pack/plugins/integration_assistant/common/types.ts @@ -11,7 +11,7 @@ export interface ESProcessorOptions { ignore_missing?: boolean; if?: string; tag?: string; - [key: string]: any; + [key: string]: unknown; } export interface ESProcessorItem { diff --git a/x-pack/plugins/integration_assistant/kibana.jsonc b/x-pack/plugins/integration_assistant/kibana.jsonc index 613b01a9c65b6..9ce4f435c893b 100644 --- a/x-pack/plugins/integration_assistant/kibana.jsonc +++ b/x-pack/plugins/integration_assistant/kibana.jsonc @@ -7,25 +7,9 @@ "id": "integrationAssistant", "server": true, "browser": false, - "configPath": [ - "xpack", - "integration_assistant" - ], - "requiredPlugins": [ - "actions", - "licensing", - "management", - "features", - "share", - "fileUpload" - ], - "optionalPlugins": [ - "security", - "usageCollection", - "console" - ], - "extraPublicDirs": [ - "common" - ] + "configPath": ["xpack", "integration_assistant"], + "requiredPlugins": ["actions", "licensing", "management", "features", "share", "fileUpload"], + "optionalPlugins": ["security", "usageCollection", "console"], + "extraPublicDirs": ["common"] } } diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts index 7f36323191a13..f425941a90d9e 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleCategorization } from './categorization'; -import { CategorizationState } from '../../types'; +import type { CategorizationState } from '../../types'; import { categorizationTestState, categorizationMockProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts index 1358e05db29b3..b6ec3f0f3dfe3 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts @@ -4,11 +4,11 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { CategorizationState } from '../../types'; +import type { ESProcessorItem, Pipeline } from '../../../common'; +import type { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; -import { Pipeline } from '../../../common'; import { CATEGORIZATION_MAIN_PROMPT } from './prompts'; export async function handleCategorization(state: CategorizationState, model: BedrockChat) { @@ -21,7 +21,7 @@ export async function handleCategorization(state: CategorizationState, model: Be ex_answer: state?.exAnswer, ecs_categories: state?.ecsCategories, ecs_types: state?.ecsTypes, - })) as any[]; + })) as ESProcessorItem[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts index a85eb68ff846c..38b83d4d3add8 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleErrors } from './errors'; -import { CategorizationState } from '../../types'; +import type { CategorizationState } from '../../types'; import { categorizationTestState, categorizationMockProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts index b8119554d4d93..15e1fece549de 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts @@ -4,12 +4,12 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { CATEGORIZATION_ERROR_PROMPT } from './prompts'; -import { CategorizationState } from '../../types'; +import type { ESProcessorItem, Pipeline } from '../../../common'; +import type { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; -import { Pipeline } from '../../../common'; +import { CATEGORIZATION_ERROR_PROMPT } from './prompts'; export async function handleErrors(state: CategorizationState, model: BedrockChat) { const categorizationErrorPrompt = CATEGORIZATION_ERROR_PROMPT; @@ -23,7 +23,7 @@ export async function handleErrors(state: CategorizationState, model: BedrockCha errors: JSON.stringify(state.errors, null, 2), package_name: state.packageName, data_stream_name: state.dataStreamName, - })) as any[]; + })) as ESProcessorItem[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts index a255536a3da1f..15d52b13bd3cd 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { IScopedClusterClient } from '@kbn/core/server'; +import type { IScopedClusterClient } from '@kbn/core/server'; import { FakeLLM } from '@langchain/core/utils/testing'; import { getCategorizationGraph } from './graph'; import { diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index bd30e401b4f62..2d4cba2426c87 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -5,9 +5,10 @@ * 2.0. */ import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { CategorizationState } from '../../types'; +import type { StateGraphArgs } from '@langchain/langgraph'; +import { StateGraph, END, START } from '@langchain/langgraph'; +import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { CategorizationState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; import { handleCategorization } from './categorization'; import { handleValidatePipeline } from '../../util/graph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts index 6834aba357336..a9ddbe1404cb0 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleInvalidCategorization } from './invalid'; -import { CategorizationState } from '../../types'; +import type { CategorizationState } from '../../types'; import { categorizationTestState, categorizationMockProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts index 7bc2d024bd044..5d0b081e06fab 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -4,13 +4,13 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; -import { CategorizationState } from '../../types'; +import type { ESProcessorItem, Pipeline } from '../../../common'; +import type { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; -import { Pipeline } from '../../../common'; +import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; export async function handleInvalidCategorization(state: CategorizationState, model: BedrockChat) { const categorizationInvalidPrompt = CATEGORIZATION_VALIDATION_PROMPT; @@ -23,7 +23,7 @@ export async function handleInvalidCategorization(state: CategorizationState, mo invalid_categorization: JSON.stringify(state.invalidCategorization, null, 2), ex_answer: state.exAnswer, compatible_types: JSON.stringify(ECS_EVENT_TYPES_PER_CATEGORY, null, 2), - })) as any[]; + })) as ESProcessorItem[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts index 66f7b5446a958..71a5dfe281dc0 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleReview } from './review'; -import { CategorizationState } from '../../types'; +import type { CategorizationState } from '../../types'; import { categorizationTestState, categorizationMockProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts index bc46d273fc8ed..6f8d12bc40c05 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts @@ -4,14 +4,14 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; import { CATEGORIZATION_REVIEW_PROMPT } from './prompts'; -import { CategorizationState } from '../../types'; +import type { ESProcessorItem, Pipeline } from '../../../common'; +import type { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; -import { Pipeline } from '../../../common'; export async function handleReview(state: CategorizationState, model: BedrockChat) { const categorizationReviewPrompt = CATEGORIZATION_REVIEW_PROMPT; @@ -24,7 +24,7 @@ export async function handleReview(state: CategorizationState, model: BedrockCha ex_answer: state?.exAnswer, package_name: state?.packageName, compatibility_matrix: JSON.stringify(ECS_EVENT_TYPES_PER_CATEGORY, null, 2), - })) as any[]; + })) as ESProcessorItem[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts index 8d00bdc31f1fd..40e37cd3b0363 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/validate.ts @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { CategorizationState } from '../../types'; +import type { CategorizationState } from '../../types'; import { ECS_EVENT_TYPES_PER_CATEGORY, EVENT_CATEGORIES, EVENT_TYPES } from './constants'; import type { EventCategories } from './constants'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts index 2c55087fcf952..036e269eca11b 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts @@ -6,7 +6,7 @@ */ interface EcsFields { - [key: string]: any; + [key: string]: unknown; } export const ECS_TYPES: EcsFields = { diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts index 20ddda95d0e97..cf4629aebcd71 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleDuplicates } from './duplicates'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; const mockLlm = new FakeLLM({ diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts index ce4dbb6322462..f1c2f8ad2ce67 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts @@ -5,9 +5,9 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_DUPLICATES_PROMPT } from './prompts'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; export async function handleDuplicates(state: EcsMappingState, model: BedrockChat) { const ecsDuplicatesPrompt = ECS_DUPLICATES_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 048d366afe49f..6d598d0ebd8a5 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -5,8 +5,9 @@ * 2.0. */ -import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { StateGraphArgs } from '@langchain/langgraph'; +import { StateGraph, END, START } from '@langchain/langgraph'; +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_EXAMPLE_ANSWER, ECS_FIELDS } from './constants'; import { modifySamples, mergeSamples } from '../../util/samples'; import { createPipeline } from './pipeline'; @@ -15,7 +16,7 @@ import { handleDuplicates } from './duplicates'; import { handleMissingKeys } from './missing'; import { handleInvalidEcs } from './invalid'; import { handleValidateMappings } from './validate'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; const graphState: StateGraphArgs['channels'] = { ecs: { diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts index 899498ddab517..4e2c3b27469b5 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleInvalidEcs } from './invalid'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; const mockLlm = new FakeLLM({ diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts index 2e41f4a0bf01f..c3ff0e12bf44c 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts @@ -5,9 +5,9 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_INVALID_PROMPT } from './prompts'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; export async function handleInvalidEcs(state: EcsMappingState, model: BedrockChat) { const ecsInvalidEcsPrompt = ECS_INVALID_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts index beb1778e4e57d..038c5711ab2cb 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleEcsMapping } from './mapping'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; const mockLlm = new FakeLLM({ diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts index 82c65013362e7..dc88e4f390102 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts @@ -5,9 +5,9 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_MAIN_PROMPT } from './prompts'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; export async function handleEcsMapping(state: EcsMappingState, model: BedrockChat) { const ecsMainPrompt = ECS_MAIN_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts index f9826f5757c0d..38861cfc702b3 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleMissingKeys } from './missing'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; const mockLlm = new FakeLLM({ diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts index e1fd5996e9da1..ffb23c875f278 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts @@ -5,9 +5,9 @@ * 2.0. */ import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_MISSING_KEYS_PROMPT } from './prompts'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; export async function handleMissingKeys(state: EcsMappingState, model: BedrockChat) { const ecsMissingPrompt = ECS_MISSING_KEYS_PROMPT; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts index 0c17d56157814..c4ad10f47a9f2 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/pipeline.ts @@ -4,14 +4,15 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +/* eslint-disable @typescript-eslint/no-explicit-any */ import { load } from 'js-yaml'; import { Environment, FileSystemLoader } from 'nunjucks'; import { join as joinPath } from 'path'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; import { ECS_TYPES } from './constants'; interface IngestPipeline { - [key: string]: any; + [key: string]: unknown; } interface ECSField { @@ -25,7 +26,7 @@ function generateProcessor( currentPath: string, ecsField: ECSField, expectedEcsType: string, - sampleValue: any + sampleValue: unknown ): object { if (needsTypeConversion(sampleValue, expectedEcsType)) { return { @@ -58,20 +59,16 @@ function generateProcessor( }; } -function getSampleValue(key: string, samples: Record): any { - try { - const keyList = key.split('.'); - let value: any = samples; - for (const k of keyList) { - if (value === undefined || value === null) { - return null; - } - value = value[k]; +function getSampleValue(key: string, samples: Record): unknown { + const keyList = key.split('.'); + let value: any = samples; + for (const k of keyList) { + if (value === undefined || value === null) { + return null; } - return value; - } catch (e) { - throw e; + value = value[k]; } + return value; } function getEcsType(ecsField: ECSField, ecsTypes: Record): string { @@ -95,7 +92,7 @@ function getConvertProcessorType(expectedEcsType: string): string { return 'string'; } -function needsTypeConversion(sample: any, expected: string): boolean { +function needsTypeConversion(sample: unknown, expected: string): boolean { if (sample === null || sample === undefined) { return false; } @@ -155,7 +152,9 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { const processors = generateProcessors(state.currentMapping, samples); // Retrieve all source field names from convert processors to populate single remove processor: - const fieldsToRemove = processors.map((p: any) => p.convert?.field).filter((f: any) => f != null); + const fieldsToRemove = processors + .map((p: any) => p.convert?.field) + .filter((f: unknown) => f != null); const mappedValues = { processors, ecs_version: state.ecsVersion, @@ -164,19 +163,15 @@ export function createPipeline(state: EcsMappingState): IngestPipeline { log_format: state.logFormat, fields_to_remove: fieldsToRemove, }; - try { - const templatesPath = joinPath(__dirname, '../../templates'); - const env = new Environment(new FileSystemLoader(templatesPath), { - autoescape: false, - }); - env.addFilter('startswith', function (str, prefix) { - return str.startsWith(prefix); - }); - const template = env.getTemplate('pipeline.yml.njk'); - const renderedTemplate = template.render(mappedValues); - const ingestPipeline = load(renderedTemplate) as IngestPipeline; - return ingestPipeline; - } catch (e) { - throw e; - } + const templatesPath = joinPath(__dirname, '../../templates'); + const env = new Environment(new FileSystemLoader(templatesPath), { + autoescape: false, + }); + env.addFilter('startswith', function (str, prefix) { + return str.startsWith(prefix); + }); + const template = env.getTemplate('pipeline.yml.njk'); + const renderedTemplate = template.render(mappedValues); + const ingestPipeline = load(renderedTemplate) as IngestPipeline; + return ingestPipeline; } diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts index a40daf6d9c090..0a01b1f59dcf3 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/validate.ts @@ -4,8 +4,9 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +/* eslint-disable @typescript-eslint/no-explicit-any */ import { ECS_FULL } from '../../../common/ecs'; -import { EcsMappingState } from '../../types'; +import type { EcsMappingState } from '../../types'; const valueFieldKeys = new Set(['target', 'confidence', 'date_formats', 'type']); type AnyObject = Record; @@ -84,7 +85,7 @@ function processMapping(path: string[], value: any, output: Record (acc && acc[key] !== undefined ? acc[key] : null), obj); } @@ -133,7 +134,7 @@ function findInvalidEcsFields(ecsMapping: AnyObject): string[] { ); for (const [ecsValue, paths] of Object.entries(filteredOutput)) { - if (!ecsDict.hasOwnProperty(ecsValue)) { + if (!Object.prototype.hasOwnProperty.call(ecsDict, ecsValue)) { const field = paths.map((p) => p.join('.')); results.push(`Invalid ECS field mapping identified for ${ecsValue} : ${field.join(', ')}`); } diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts index 572c12d050090..97d249455cafd 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleErrors } from './errors'; -import { RelatedState } from '../../types'; +import type { RelatedState } from '../../types'; import { relatedTestState, relatedMockProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts index 2cdd3091b57ff..9ff0443078c6e 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts @@ -4,12 +4,12 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { RELATED_ERROR_PROMPT } from './prompts'; -import { RelatedState } from '../../types'; +import type { ESProcessorItem, Pipeline } from '../../../common'; +import type { RelatedState } from '../../types'; import { combineProcessors } from '../../util/processors'; -import { Pipeline } from '../../../common'; +import { RELATED_ERROR_PROMPT } from './prompts'; export async function handleErrors(state: RelatedState, model: BedrockChat) { const relatedErrorPrompt = RELATED_ERROR_PROMPT; @@ -22,7 +22,7 @@ export async function handleErrors(state: RelatedState, model: BedrockChat) { errors: JSON.stringify(state.errors, null, 2), package_name: state.packageName, data_stream_name: state.dataStreamName, - })) as any[]; + })) as ESProcessorItem[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); return { diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts index 3c783822d2bbf..eade32fbd8ab8 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { IScopedClusterClient } from '@kbn/core/server'; +import type { IScopedClusterClient } from '@kbn/core/server'; import { FakeLLM } from '@langchain/core/utils/testing'; import { getRelatedGraph } from './graph'; import { diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 1c67bf19cc884..f1f2bd5acc4b0 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -5,9 +5,10 @@ * 2.0. */ import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -import { StateGraph, StateGraphArgs, END, START } from '@langchain/langgraph'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { RelatedState } from '../../types'; +import type { StateGraphArgs } from '@langchain/langgraph'; +import { StateGraph, END, START } from '@langchain/langgraph'; +import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { RelatedState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; import { handleValidatePipeline } from '../../util/graph'; import { handleRelated } from './related'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts index ef0b0abe27983..bcae2194ad889 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleRelated } from './related'; -import { RelatedState } from '../../types'; +import type { RelatedState } from '../../types'; import { relatedTestState, relatedMockProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts index a3f6ccba57ef8..f25acf1b22cac 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts @@ -4,12 +4,12 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { RELATED_MAIN_PROMPT } from './prompts'; -import { RelatedState } from '../../types'; +import type { ESProcessorItem, Pipeline } from '../../../common'; +import type { RelatedState } from '../../types'; import { combineProcessors } from '../../util/processors'; -import { Pipeline } from '../../../common'; +import { RELATED_MAIN_PROMPT } from './prompts'; export async function handleRelated(state: RelatedState, model: BedrockChat) { const relatedMainPrompt = RELATED_MAIN_PROMPT; @@ -20,7 +20,7 @@ export async function handleRelated(state: RelatedState, model: BedrockChat) { pipeline_results: JSON.stringify(state.pipelineResults, null, 2), ex_answer: state.exAnswer, ecs: state.ecs, - })) as any[]; + })) as ESProcessorItem[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts index c39b68a7b5ad8..570a0e3157bfe 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts @@ -7,7 +7,7 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleReview } from './review'; -import { RelatedState } from '../../types'; +import type { RelatedState } from '../../types'; import { relatedTestState, relatedMockProcessors, diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts index bba2318ce6532..517b81eac73dc 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts @@ -4,12 +4,12 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { BedrockChat } from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; -import { RELATED_REVIEW_PROMPT } from './prompts'; -import { RelatedState } from '../../types'; +import type { ESProcessorItem, Pipeline } from '../../../common'; +import type { RelatedState } from '../../types'; import { combineProcessors } from '../../util/processors'; -import { Pipeline } from '../../../common'; +import { RELATED_REVIEW_PROMPT } from './prompts'; export async function handleReview(state: RelatedState, model: BedrockChat) { const relatedReviewPrompt = RELATED_REVIEW_PROMPT; @@ -20,7 +20,7 @@ export async function handleReview(state: RelatedState, model: BedrockChat) { current_processors: JSON.stringify(state.currentProcessors, null, 2), ex_answer: state.exAnswer, pipeline_results: JSON.stringify(state.pipelineResults, null, 2), - })) as any[]; + })) as ESProcessorItem[]; const currentPipeline = combineProcessors(state.initialPipeline as Pipeline, currentProcessors); diff --git a/x-pack/plugins/integration_assistant/server/index.ts b/x-pack/plugins/integration_assistant/server/index.ts index 71c9c10ecdd6e..d259ae80c53de 100644 --- a/x-pack/plugins/integration_assistant/server/index.ts +++ b/x-pack/plugins/integration_assistant/server/index.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { PluginInitializerContext } from '@kbn/core/server'; +import type { PluginInitializerContext } from '@kbn/core/server'; export { config } from './config'; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts index dac7d8fd1ed81..6d3282e00f18a 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/agent.ts @@ -6,7 +6,7 @@ */ import { join as joinPath } from 'path'; -import { InputTypes } from '../../common'; +import type { InputTypes } from '../../common'; import { ensureDirSync, createSync, readSync } from '../util'; export function createAgentInput(specificDataStreamDir: string, inputTypes: InputTypes[]): void { diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts index 2d416ee00a691..26031695bb432 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -9,7 +9,7 @@ import { join as joinPath } from 'path'; import { tmpdir } from 'os'; import nunjucks from 'nunjucks'; import AdmZip from 'adm-zip'; -import { Integration, DataStream } from '../../common'; +import type { Integration, DataStream } from '../../common'; import { createPackageSystemTests } from './dev_folders'; import { createDatastream } from './data_stream'; import { createAgentInput } from './agent'; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts index 6b99de53f0917..fa9a24475e92e 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/data_stream.ts @@ -5,10 +5,10 @@ * 2.0. */ -import { join as joinPath } from 'path'; import nunjucks from 'nunjucks'; -import { DataStream } from '../../common'; -import { copySync, ensureDirSync, createSync, listDirSync } from '../util'; +import { join as joinPath } from 'path'; +import type { DataStream } from '../../common'; +import { copySync, createSync, ensureDirSync, listDirSync } from '../util'; export function createDatastream( packageName: string, @@ -62,19 +62,15 @@ export function createDatastream( function createDataStreamFolders(specificDataStreamDir: string, pipelineDir: string): void { const dataStreamTemplatesDir = joinPath(__dirname, '../templates/data_stream'); - try { - const items = listDirSync(dataStreamTemplatesDir); - - for (const item of items) { - const s = joinPath(dataStreamTemplatesDir, item); - const d = joinPath(specificDataStreamDir, item); - copySync(s, d); - } - - ensureDirSync(pipelineDir); - } catch (error) { - throw error; + const items = listDirSync(dataStreamTemplatesDir); + + for (const item of items) { + const s = joinPath(dataStreamTemplatesDir, item); + const d = joinPath(specificDataStreamDir, item); + copySync(s, d); } + + ensureDirSync(pipelineDir); } function createPipelineTests( diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts index 6f3edf3feaa1a..bac22efe2d3b3 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/dev_folders.ts @@ -7,7 +7,7 @@ import { join as joinPath } from 'path'; import nunjucks from 'nunjucks'; -import { Integration } from '../../common'; +import type { Integration } from '../../common'; import { ensureDirSync, createSync } from '../util'; export function createPackageSystemTests(integrationDir: string, integration: Integration) { diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts index 674b4871626ac..60878fb82fbcb 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts @@ -7,10 +7,10 @@ import nunjucks from 'nunjucks'; -import { generateFields, mergeSamples, createSync } from '../util'; +import { createSync, generateFields, mergeSamples } from '../util'; interface Doc { - [key: string]: any; + [key: string]: unknown; } export function createFieldMapping( diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts b/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts index 1194b6a536535..805535322b8e7 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/pipeline.ts @@ -10,6 +10,6 @@ import { createSync } from '../util'; export function createPipeline(specificDataStreamDir: string, pipeline: object): void { const filePath = joinPath(specificDataStreamDir, 'elasticsearch/ingest_pipeline/default.yml'); - const yamlContent = '---\n' + yaml.dump(pipeline, { sortKeys: false }); + const yamlContent = `---\n${yaml.dump(pipeline, { sortKeys: false })}`; createSync(filePath, yamlContent); } diff --git a/x-pack/plugins/integration_assistant/server/plugin.ts b/x-pack/plugins/integration_assistant/server/plugin.ts index 87f261de28c3b..c05496f3bf8a0 100644 --- a/x-pack/plugins/integration_assistant/server/plugin.ts +++ b/x-pack/plugins/integration_assistant/server/plugin.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { +import type { Plugin, PluginInitializerContext, CoreSetup, @@ -13,9 +13,9 @@ import { Logger, CustomRequestHandlerContext, } from '@kbn/core/server'; -import { PluginStartContract as ActionsPluginsStart } from '@kbn/actions-plugin/server/plugin'; +import type { PluginStartContract as ActionsPluginsStart } from '@kbn/actions-plugin/server/plugin'; import { registerRoutes } from './routes'; -import { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; +import type { IntegrationAssistantPluginSetup, IntegrationAssistantPluginStart } from './types'; export type IntegrationAssistantRouteHandlerContext = CustomRequestHandlerContext<{ integrationAssistant: { diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 26f44414002a2..319ee8f7cb55c 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -5,12 +5,12 @@ * 2.0. */ -import { IRouter } from '@kbn/core/server'; +import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { INTEGRATION_BUILDER_PATH } from '../../common'; import { buildPackage } from '../integration_builder'; import type { BuildIntegrationApiRequest } from '../../common'; -import { IntegrationAssistantRouteHandlerContext } from '../plugin'; +import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerIntegrationBuilderRoutes( router: IRouter diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index 211456a97bcd0..5c09a6ca18e85 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -5,14 +5,14 @@ * 2.0. */ -import { IRouter } from '@kbn/core/server'; +import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { BedrockChat } from '@kbn/langchain/server/language_models'; import { CATEGORIZATION_GRAPH_PATH } from '../../common'; import type { CategorizationApiRequest, CategorizationApiResponse } from '../../common'; import { getCategorizationGraph } from '../graphs/categorization'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; -import { IntegrationAssistantRouteHandlerContext } from '../plugin'; +import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerCategorizationRoutes( router: IRouter diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts index 93cffb02ae187..b59488f2a56fb 100644 --- a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -5,14 +5,14 @@ * 2.0. */ -import { IRouter } from '@kbn/core/server'; +import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { BedrockChat } from '@kbn/langchain/server/language_models'; import { ECS_GRAPH_PATH } from '../../common'; -import { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common/types'; +import type { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common/types'; import { getEcsGraph } from '../graphs/ecs'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; -import { IntegrationAssistantRouteHandlerContext } from '../plugin'; +import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerEcsRoutes(router: IRouter) { router.post( diff --git a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts index f44d94c8cdad6..a3fc5387ffad4 100644 --- a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts @@ -5,12 +5,12 @@ * 2.0. */ -import { IRouter } from '@kbn/core/server'; +import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { TEST_PIPELINE_PATH } from '../../common'; import { testPipeline } from '../util/pipeline'; -import { TestPipelineApiRequest, TestPipelineApiResponse } from '../../common/types'; -import { IntegrationAssistantRouteHandlerContext } from '../plugin'; +import type { TestPipelineApiRequest, TestPipelineApiResponse } from '../../common/types'; +import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerPipelineRoutes(router: IRouter) { router.post( diff --git a/x-pack/plugins/integration_assistant/server/routes/register_routes.ts b/x-pack/plugins/integration_assistant/server/routes/register_routes.ts index d43472f6ad3dd..a8ccc39ff2a0f 100644 --- a/x-pack/plugins/integration_assistant/server/routes/register_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/register_routes.ts @@ -5,13 +5,13 @@ * 2.0. */ -import { IRouter } from '@kbn/core/server'; +import type { IRouter } from '@kbn/core/server'; import { registerEcsRoutes } from './ecs_routes'; import { registerIntegrationBuilderRoutes } from './build_integration_routes'; import { registerCategorizationRoutes } from './categorization_routes'; import { registerRelatedRoutes } from './related_routes'; import { registerPipelineRoutes } from './pipeline_routes'; -import { IntegrationAssistantRouteHandlerContext } from '../plugin'; +import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerRoutes(router: IRouter) { registerEcsRoutes(router); diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index 16cacfdd3b869..8580c6b1601d0 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -5,14 +5,14 @@ * 2.0. */ -import { IRouter } from '@kbn/core/server'; +import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; import { BedrockChat } from '@kbn/langchain/server/language_models'; import { RELATED_GRAPH_PATH } from '../../common'; -import { RelatedApiRequest, RelatedApiResponse } from '../../common/types'; +import type { RelatedApiRequest, RelatedApiResponse } from '../../common/types'; import { getRelatedGraph } from '../graphs/related'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; -import { IntegrationAssistantRouteHandlerContext } from '../plugin'; +import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerRelatedRoutes(router: IRouter) { router.post( diff --git a/x-pack/plugins/integration_assistant/server/util/files.ts b/x-pack/plugins/integration_assistant/server/util/files.ts index 0a5c55dce2658..77c508f81e4b0 100644 --- a/x-pack/plugins/integration_assistant/server/util/files.ts +++ b/x-pack/plugins/integration_assistant/server/util/files.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { readdirSync, writeFileSync, mkdirSync, statSync, readFileSync, cpSync } from 'fs'; +import { cpSync, mkdirSync, readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; import { dirname } from 'path'; export function existsSync(path: string): boolean { @@ -33,13 +33,9 @@ export function createSync(path: string, content: string | Buffer): void { } export function copySync(source: string, destination: string): void { - try { - // Ensure the destination directory exists - mkdirSync(dirname(destination), { recursive: true }); - cpSync(source, destination, { recursive: true }); - } catch (error) { - throw error; - } + // Ensure the destination directory exists + mkdirSync(dirname(destination), { recursive: true }); + cpSync(source, destination, { recursive: true }); } export function listDirSync(path: string): string[] { diff --git a/x-pack/plugins/integration_assistant/server/util/graph.ts b/x-pack/plugins/integration_assistant/server/util/graph.ts index adc2756d515a8..6149be42ce446 100644 --- a/x-pack/plugins/integration_assistant/server/util/graph.ts +++ b/x-pack/plugins/integration_assistant/server/util/graph.ts @@ -4,8 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -import { CategorizationState, RelatedState } from '../types'; +import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; +import type { CategorizationState, RelatedState } from '../types'; import { testPipeline } from './pipeline'; export async function handleValidatePipeline( diff --git a/x-pack/plugins/integration_assistant/server/util/processors.ts b/x-pack/plugins/integration_assistant/server/util/processors.ts index dd87c8d25a68a..8c28a21b16b3c 100644 --- a/x-pack/plugins/integration_assistant/server/util/processors.ts +++ b/x-pack/plugins/integration_assistant/server/util/processors.ts @@ -5,10 +5,13 @@ * 2.0. */ +import type { ESProcessorItem, Pipeline } from '../../common'; import { deepCopy } from './util'; -import type { Pipeline } from '../../common'; -export function combineProcessors(initialPipeline: Pipeline, processors: any[]): Pipeline { +export function combineProcessors( + initialPipeline: Pipeline, + processors: ESProcessorItem[] +): Pipeline { // Create a deep copy of the initialPipeline to avoid modifying the original input const currentPipeline = deepCopy(initialPipeline); diff --git a/x-pack/plugins/integration_assistant/server/util/samples.ts b/x-pack/plugins/integration_assistant/server/util/samples.ts index 23782c7a02d24..8b306213fd3fd 100644 --- a/x-pack/plugins/integration_assistant/server/util/samples.ts +++ b/x-pack/plugins/integration_assistant/server/util/samples.ts @@ -4,11 +4,12 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +/* eslint-disable @typescript-eslint/no-explicit-any */ import * as yaml from 'js-yaml'; -import { CategorizationState, EcsMappingState, RelatedState } from '../types'; +import type { CategorizationState, EcsMappingState, RelatedState } from '../types'; interface SampleObj { - [key: string]: any; + [key: string]: unknown; } interface NewObj { @@ -43,7 +44,7 @@ export function modifySamples(state: EcsMappingState | CategorizationState | Rel return modifiedSamples; } -function isEmptyValue(value: any): boolean { +function isEmptyValue(value: unknown): boolean { return ( value === null || value === undefined || @@ -52,7 +53,7 @@ function isEmptyValue(value: any): boolean { ); } -function merge(target: Record, source: Record): Record { +function merge(target: Record, source: Record): Record { for (const [key, sourceValue] of Object.entries(source)) { const targetValue = target[key]; if (Array.isArray(sourceValue)) { @@ -76,10 +77,10 @@ function merge(target: Record, source: Record): Record } export function mergeSamples(objects: any[]): string { - let result: Record = {}; + let result: Record = {}; for (const obj of objects) { - let sample: Record = obj; + let sample: Record = obj; if (typeof obj === 'string') { sample = JSON.parse(obj); } @@ -90,7 +91,7 @@ export function mergeSamples(objects: any[]): string { } export function formatSamples(samples: string[]): string { - const formattedSamples: any[] = []; + const formattedSamples: unknown[] = []; for (const sample of samples) { const sampleObj = JSON.parse(sample); @@ -100,7 +101,7 @@ export function formatSamples(samples: string[]): string { return JSON.stringify(formattedSamples, null, 2); } -function determineType(value: any): string { +function determineType(value: unknown): string { if (typeof value === 'object' && value !== null) { if (Array.isArray(value)) { return 'group'; @@ -119,7 +120,7 @@ function determineType(value: any): string { return 'keyword'; // Default type for null or other undetermined types } -function recursiveParse(obj: any, path: string[]): Field { +function recursiveParse(obj: unknown, path: string[]): Field { if (typeof obj === 'object' && obj !== null) { if (Array.isArray(obj)) { // Assume list elements are uniform and use the first element as representative diff --git a/x-pack/plugins/integration_assistant/tsconfig.json b/x-pack/plugins/integration_assistant/tsconfig.json index 129cfc4e8e174..95d323dce6d5b 100644 --- a/x-pack/plugins/integration_assistant/tsconfig.json +++ b/x-pack/plugins/integration_assistant/tsconfig.json @@ -8,16 +8,14 @@ "server/**/*.ts", "common/**/*.ts", "__jest__/**/*", - "../../typings/**/*", - ], - "exclude": [ - "target/**/*", + "../../typings/**/*" ], + "exclude": ["target/**/*"], "kbn_references": [ "@kbn/core", "@kbn/config-schema", "@kbn/langchain", "@kbn/core-elasticsearch-server", - "@kbn/actions-plugin", + "@kbn/actions-plugin" ] } From 7cb77997c8648bfa6069d03b114286d7fe69e54f Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Tue, 4 Jun 2024 20:34:55 +0200 Subject: [PATCH 44/62] Use SimpleChatModel --- package.json | 5 - .../language_models/bedrock_chat_model.ts | 132 ---- .../server/language_models/chat_openai.ts | 3 + .../server/language_models/index.ts | 1 - .../language_models/simple_chat_model.ts | 7 +- .../server/language_models/utils_bedrock.ts | 491 ------------- .../integration_assistant/server/config.ts | 2 +- .../integration_assistant/server/plugin.ts | 2 + .../server/routes/categorization_routes.ts | 28 +- .../server/routes/ecs_routes.ts | 28 +- .../server/routes/related_routes.ts | 28 +- .../common/bedrock/constants.ts | 1 - .../stack_connectors/common/bedrock/schema.ts | 12 +- .../server/connector_types/bedrock/bedrock.ts | 78 +- yarn.lock | 677 +----------------- 15 files changed, 81 insertions(+), 1414 deletions(-) delete mode 100644 x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts delete mode 100644 x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts diff --git a/package.json b/package.json index 9138dccdd227f..f4021f8e16d29 100644 --- a/package.json +++ b/package.json @@ -95,9 +95,6 @@ }, "dependencies": { "@appland/sql-parser": "^1.5.1", - "@aws-crypto/sha256-js": "^5.2.0", - "@aws-sdk/credential-provider-node": "^3.583.0", - "@aws-sdk/types": "^3.577.0", "@babel/runtime": "^7.24.4", "@cfworker/json-schema": "^1.12.7", "@dnd-kit/core": "^6.1.0", @@ -950,8 +947,6 @@ "@slack/webhook": "^7.0.1", "@smithy/eventstream-codec": "^3.0.0", "@smithy/eventstream-serde-node": "^3.0.0", - "@smithy/protocol-http": "^4.0.0", - "@smithy/signature-v4": "^3.0.0", "@smithy/types": "^3.0.0", "@smithy/util-utf8": "^3.0.0", "@tanstack/react-query": "^4.29.12", diff --git a/x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts b/x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts deleted file mode 100644 index 74582271fda9e..0000000000000 --- a/x-pack/packages/kbn-langchain/server/language_models/bedrock_chat_model.ts +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import { - BedrockChat as _BedrockChat, - convertMessagesToPromptAnthropic, -} from '@langchain/community/chat_models/bedrock/web'; -import { AIMessage, BaseMessage } from '@langchain/core/messages'; -import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'; -import { ChatResult, ChatGenerationChunk } from '@langchain/core/outputs'; -import { PluginStartContract } from '@kbn/actions-plugin/server/plugin'; -import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'; -import { BaseBedrockInput, BedrockLLMInputOutputAdapter } from './utils_bedrock'; - -export class BedrockChat extends _BedrockChat { - actionsClient: Awaited>; - connectorId: string; - - constructor({ - actionsClient, - connectorId, - ...params - }: { - actionsClient: Awaited>; - connectorId: string; - } & Partial & - BaseChatModelParams) { - // Just to make Langchain BedrockChat happy - super({ ...params, credentials: { accessKeyId: '', secretAccessKey: '' } }); - - this.actionsClient = actionsClient; - this.connectorId = connectorId; - } - - async _generate( - messages: BaseMessage[], - options: this['ParsedCallOptions'], - runManager?: CallbackManagerForLLMRun - ): Promise { - const service = 'bedrock-runtime'; - const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`; - const provider = this.model.split('.')[0]; - if (this.streaming) { - const stream = this._streamResponseChunks(messages, options, runManager); - let finalResult: ChatGenerationChunk | undefined; - for await (const chunk of stream) { - if (finalResult === undefined) { - finalResult = chunk; - } else { - finalResult = finalResult.concat(chunk); - } - } - if (finalResult === undefined) { - throw new Error('Could not parse final output from Bedrock streaming call.'); - } - return { - generations: [finalResult], - llmOutput: finalResult.generationInfo, - }; - } - - const response = await this._signedFetch(messages, options, { - bedrockMethod: 'invoke', - endpointHost, - provider, - }); - const json = await response.data.json(); - if (response.status !== 'ok') { - throw new Error(`Error ${response.status}: ${json.message ?? JSON.stringify(json)}`); - } - if (this.usesMessagesApi) { - const outputGeneration = BedrockLLMInputOutputAdapter.prepareMessagesOutput(provider, json); - if (outputGeneration === undefined) { - throw new Error('Failed to parse output generation.'); - } - return { - generations: [outputGeneration], - llmOutput: outputGeneration.generationInfo, - }; - } else { - const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, json); - return { generations: [{ text, message: new AIMessage(text) }] }; - } - } - - async _signedFetch( - messages: BaseMessage[], - options: this['ParsedCallOptions'], - fields: { - bedrockMethod: 'invoke' | 'invoke-with-response-stream'; - endpointHost: string; - provider: string; - } - ) { - const { bedrockMethod, endpointHost, provider } = fields; - const inputBody = this.usesMessagesApi - ? BedrockLLMInputOutputAdapter.prepareMessagesInput( - provider, - messages, - this.maxTokens, - this.temperature, - options.stop ?? this.stopSequences, - this.modelKwargs - ) - : BedrockLLMInputOutputAdapter.prepareInput( - provider, - convertMessagesToPromptAnthropic(messages), - this.maxTokens, - this.temperature, - options.stop ?? this.stopSequences, - this.modelKwargs, - fields.bedrockMethod - ); - - return this.actionsClient.execute({ - actionId: this.connectorId, - params: { - subAction: 'runApiRaw', - subActionParams: { - bedrockMethod, - model: this.model, - endpointHost, - body: JSON.stringify(inputBody), - }, - }, - }) as unknown as Promise; - } -} diff --git a/x-pack/packages/kbn-langchain/server/language_models/chat_openai.ts b/x-pack/packages/kbn-langchain/server/language_models/chat_openai.ts index 7675e2442e598..c2dada0dafa3b 100644 --- a/x-pack/packages/kbn-langchain/server/language_models/chat_openai.ts +++ b/x-pack/packages/kbn-langchain/server/language_models/chat_openai.ts @@ -27,6 +27,7 @@ export interface ActionsClientChatOpenAIParams { streaming?: boolean; traceId?: string; maxRetries?: number; + maxTokens?: number; model?: string; temperature?: number; signal?: AbortSignal; @@ -75,9 +76,11 @@ export class ActionsClientChatOpenAI extends ChatOpenAI { streaming = true, temperature, timeout, + maxTokens, }: ActionsClientChatOpenAIParams) { super({ maxRetries, + maxTokens, streaming, // matters only for the LangSmith logs (Metadata > Invocation Params), which are misleading if this is not set modelName: model ?? DEFAULT_OPEN_AI_MODEL, diff --git a/x-pack/packages/kbn-langchain/server/language_models/index.ts b/x-pack/packages/kbn-langchain/server/language_models/index.ts index ac42bd2f81c2c..fcde4156e0d02 100644 --- a/x-pack/packages/kbn-langchain/server/language_models/index.ts +++ b/x-pack/packages/kbn-langchain/server/language_models/index.ts @@ -8,4 +8,3 @@ export { ActionsClientChatOpenAI } from './chat_openai'; export { ActionsClientLlm } from './llm'; export { ActionsClientSimpleChatModel } from './simple_chat_model'; -export { BedrockChat } from './bedrock_chat_model'; diff --git a/x-pack/packages/kbn-langchain/server/language_models/simple_chat_model.ts b/x-pack/packages/kbn-langchain/server/language_models/simple_chat_model.ts index f13b0a53611ef..9f6f3a331a7fb 100644 --- a/x-pack/packages/kbn-langchain/server/language_models/simple_chat_model.ts +++ b/x-pack/packages/kbn-langchain/server/language_models/simple_chat_model.ts @@ -35,6 +35,7 @@ export interface CustomChatModelInput extends BaseChatModelParams { temperature?: number; request: KibanaRequest; streaming: boolean; + maxTokens?: number; } export class ActionsClientSimpleChatModel extends SimpleChatModel { @@ -44,6 +45,7 @@ export class ActionsClientSimpleChatModel extends SimpleChatModel { #request: KibanaRequest; #traceId: string; #signal?: AbortSignal; + #maxTokens?: number; llmType: string; streaming: boolean; model?: string; @@ -59,6 +61,7 @@ export class ActionsClientSimpleChatModel extends SimpleChatModel { temperature, signal, streaming, + maxTokens, }: CustomChatModelInput) { super({}); @@ -68,6 +71,7 @@ export class ActionsClientSimpleChatModel extends SimpleChatModel { this.#logger = logger; this.#signal = signal; this.#request = request; + this.#maxTokens = maxTokens; this.llmType = llmType ?? 'ActionsClientSimpleChatModel'; this.model = model; this.temperature = temperature; @@ -95,7 +99,7 @@ export class ActionsClientSimpleChatModel extends SimpleChatModel { throw new Error('No messages provided.'); } const formattedMessages = []; - if (messages.length === 2) { + if (messages.length >= 2) { messages.forEach((message, i) => { if (typeof message.content !== 'string') { throw new Error('Multimodal messages are not supported.'); @@ -121,6 +125,7 @@ export class ActionsClientSimpleChatModel extends SimpleChatModel { subActionParams: { model: this.model, messages: formattedMessages, + maxTokens: this.#maxTokens, ...getDefaultArguments(this.llmType, this.temperature, options.stop), }, }, diff --git a/x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts b/x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts deleted file mode 100644 index c14b94809931f..0000000000000 --- a/x-pack/packages/kbn-langchain/server/language_models/utils_bedrock.ts +++ /dev/null @@ -1,491 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -import type { AwsCredentialIdentity, Provider } from '@aws-sdk/types'; -import { AIMessage, AIMessageChunk, BaseMessage } from '@langchain/core/messages'; -import { ChatGeneration, ChatGenerationChunk } from '@langchain/core/outputs'; - -export type CredentialType = AwsCredentialIdentity | Provider; - -function _formatImage(imageUrl: string) { - const regex = /^data:(image\/.+);base64,(.+)$/; - const match = imageUrl.match(regex); - if (match === null) { - throw new Error( - [ - 'Anthropic only supports base64-encoded images currently.', - 'Example: data:image/png;base64,/9j/4AAQSk...', - ].join('\n\n') - ); - } - return { - type: 'base64', - media_type: match[1] ?? '', - data: match[2] ?? '', - // eslint-disable-next-line @typescript-eslint/no-explicit-any - } as any; -} - -function formatMessagesForAnthropic(messages: BaseMessage[]): { - system?: string; - messages: Array>; -} { - let system: string | undefined; - if (messages.length > 0 && messages[0]._getType() === 'system') { - if (typeof messages[0].content !== 'string') { - throw new Error('System message content must be a string.'); - } - system = messages[0].content; - } - const conversationMessages = system !== undefined ? messages.slice(1) : messages; - const formattedMessages = conversationMessages.map((message) => { - let role; - if (message._getType() === 'human') { - role = 'user' as const; - } else if (message._getType() === 'ai') { - role = 'assistant' as const; - } else if (message._getType() === 'system') { - throw new Error('System messages are only permitted as the first passed message.'); - } else { - throw new Error(`Message type "${message._getType()}" is not supported.`); - } - if (typeof message.content === 'string') { - return { - role, - content: message.content, - }; - } else { - return { - role, - content: message.content.map((contentPart) => { - if (contentPart.type === 'image_url') { - let source; - if (typeof contentPart.image_url === 'string') { - source = _formatImage(contentPart.image_url); - } else { - source = _formatImage(contentPart.image_url.url); - } - return { - type: 'image' as const, - source, - }; - } else { - return contentPart; - } - }), - }; - } - }); - return { - messages: formattedMessages, - system, - }; -} - -/** - * format messages for Cohere Command-R and CommandR+ via AWS Bedrock. - * - * @param messages messages The base messages to format as a prompt. - * - * @returns The formatted prompt for Cohere. - * - * `system`: user system prompts. Overrides the default preamble for search query generation. Has no effect on tool use generations.\ - * `message`: (Required) Text input for the model to respond to.\ - * `chatHistory`: A list of previous messages between the user and the model, meant to give the model conversational context for responding to the user's message.\ - * The following are required fields. - * - `role` - The role for the message. Valid values are USER or CHATBOT.\ - * - `message` – Text contents of the message.\ - * - * The following is example JSON for the chat_history field.\ - * "chat_history": [ - * {"role": "USER", "message": "Who discovered gravity?"}, - * {"role": "CHATBOT", "message": "The man who is widely credited with discovering gravity is Sir Isaac Newton"}]\ - * - * docs: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-cohere-command-r-plus.html - */ -function formatMessagesForCohere(messages: BaseMessage[]): { - system?: string; - message: string; - chatHistory: Array>; -} { - const systemMessages = messages.filter((system) => system._getType() === 'system'); - - const system = systemMessages - .filter((m) => typeof m.content === 'string') - .map((m) => m.content) - .join('\n\n'); - - const conversationMessages = messages.filter((message) => message._getType() !== 'system'); - - const questionContent = conversationMessages.slice(-1); - - if (!questionContent.length || questionContent[0]._getType() !== 'human') { - throw new Error('question message content must be a human message.'); - } - - if (typeof questionContent[0].content !== 'string') { - throw new Error('question message content must be a string.'); - } - - const formattedMessage = questionContent[0].content; - - const formattedChatHistories = conversationMessages.slice(0, -1).map((message) => { - let role; - switch (message._getType()) { - case 'human': - role = 'USER' as const; - break; - case 'ai': - role = 'CHATBOT' as const; - break; - case 'system': - throw new Error('chat_history can not include system prompts.'); - default: - throw new Error(`Message type "${message._getType()}" is not supported.`); - } - - if (typeof message.content !== 'string') { - throw new Error('message content must be a string.'); - } - return { - role, - message: message.content, - }; - }); - - return { - chatHistory: formattedChatHistories, - message: formattedMessage, - system, - }; -} - -/** Bedrock models. - To authenticate, the AWS client uses the following methods to automatically load credentials: - https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html - If a specific credential profile should be used, you must pass the name of the profile from the ~/.aws/credentials file that is to be used. - Make sure the credentials / roles used have the required policies to access the Bedrock service. -*/ -export interface BaseBedrockInput { - /** Model to use. - For example, "amazon.titan-tg1-large", this is equivalent to the modelId property in the list-foundation-models api. - */ - model: string; - - /** The AWS region e.g. `us-west-2`. - Fallback to AWS_DEFAULT_REGION env variable or region specified in ~/.aws/config in case it is not provided here. - */ - region?: string; - - /** AWS Credentials. - If no credentials are provided, the default credentials from `@aws-sdk/credential-provider-node` will be used. - */ - credentials?: CredentialType; - - /** Temperature. */ - temperature?: number; - - /** Max tokens. */ - maxTokens?: number; - - /** A custom fetch function for low-level access to AWS API. Defaults to fetch(). */ - fetchFn?: typeof fetch; - - /** @deprecated Use endpointHost instead Override the default endpoint url. */ - endpointUrl?: string; - - /** Override the default endpoint hostname. */ - endpointHost?: string; - - /** - * Optional additional stop sequences to pass to the model. Currently only supported for Anthropic and AI21. - * @deprecated Use .bind({ "stop": [...] }) instead - * */ - stopSequences?: string[]; - - /** Additional kwargs to pass to the model. */ - modelKwargs?: Record; - - /** Whether or not to stream responses */ - streaming: boolean; -} - -interface Dict { - [key: string]: unknown; -} - -/** - * A helper class used within the `Bedrock` class. It is responsible for - * preparing the input and output for the Bedrock service. It formats the - * input prompt based on the provider (e.g., "anthropic", "ai21", - * "amazon") and extracts the generated text from the service response. - */ -export class BedrockLLMInputOutputAdapter { - /** Adapter class to prepare the inputs from Langchain to a format - that LLM model expects. Also, provides a helper function to extract - the generated text from the model response. */ - - static prepareInput( - provider: string, - prompt: string, - maxTokens = 50, - temperature = 0, - stopSequences: string[] | undefined = undefined, - modelKwargs: Record = {}, - bedrockMethod: 'invoke' | 'invoke-with-response-stream' = 'invoke' - ): Dict { - const inputBody: Dict = {}; - - if (provider === 'anthropic') { - inputBody.prompt = prompt; - inputBody.max_tokens_to_sample = maxTokens; - inputBody.temperature = temperature; - inputBody.stop_sequences = stopSequences; - } else if (provider === 'ai21') { - inputBody.prompt = prompt; - inputBody.maxTokens = maxTokens; - inputBody.temperature = temperature; - inputBody.stopSequences = stopSequences; - } else if (provider === 'meta') { - inputBody.prompt = prompt; - inputBody.max_gen_len = maxTokens; - inputBody.temperature = temperature; - } else if (provider === 'amazon') { - inputBody.inputText = prompt; - inputBody.textGenerationConfig = { - maxTokenCount: maxTokens, - temperature, - }; - } else if (provider === 'cohere') { - inputBody.prompt = prompt; - inputBody.max_tokens = maxTokens; - inputBody.temperature = temperature; - inputBody.stop_sequences = stopSequences; - if (bedrockMethod === 'invoke-with-response-stream') { - inputBody.stream = true; - } - } else if (provider === 'mistral') { - inputBody.prompt = prompt; - inputBody.max_tokens = maxTokens; - inputBody.temperature = temperature; - inputBody.stop = stopSequences; - } - return { ...inputBody, ...modelKwargs }; - } - - static prepareMessagesInput( - provider: string, - messages: BaseMessage[], - maxTokens = 1024, - temperature = 0, - stopSequences: string[] | undefined = undefined, - modelKwargs: Record = {} - ): Dict { - const inputBody: Dict = {}; - - if (provider === 'anthropic') { - const { system, messages: formattedMessages } = formatMessagesForAnthropic(messages); - if (system !== undefined) { - inputBody.system = system; - } - inputBody.anthropic_version = 'bedrock-2023-05-31'; - inputBody.messages = formattedMessages; - inputBody.max_tokens = maxTokens; - inputBody.temperature = temperature; - inputBody.stop_sequences = stopSequences; - return { ...inputBody, ...modelKwargs }; - } else if (provider === 'cohere') { - const { - system, - message: formattedMessage, - chatHistory: formattedChatHistories, - } = formatMessagesForCohere(messages); - - if (system !== undefined && system.length > 0) { - inputBody.preamble = system; - } - inputBody.message = formattedMessage; - inputBody.chat_history = formattedChatHistories; - inputBody.max_tokens = maxTokens; - inputBody.temperature = temperature; - inputBody.stop_sequences = stopSequences; - return { ...inputBody, ...modelKwargs }; - } else { - throw new Error('The messages API is currently only supported by Anthropic or Cohere'); - } - } - - /** - * Extracts the generated text from the service response. - * @param provider The provider name. - * @param responseBody The response body from the service. - * @returns The generated text. - */ - // eslint-disable-next-line @typescript-eslint/no-explicit-any - static prepareOutput(provider: string, responseBody: any): string { - if (provider === 'anthropic') { - return responseBody.completion; - } else if (provider === 'ai21') { - return responseBody?.completions?.[0]?.data?.text ?? ''; - } else if (provider === 'cohere') { - return responseBody?.generations?.[0]?.text ?? responseBody?.text ?? ''; - } else if (provider === 'meta') { - return responseBody.generation; - } else if (provider === 'mistral') { - return responseBody?.outputs?.[0]?.text; - } - - // I haven't been able to get a response with more than one result in it. - return responseBody.results?.[0]?.outputText; - } - - static prepareMessagesOutput( - provider: string, - // eslint-disable-next-line @typescript-eslint/no-explicit-any - response: any - ): ChatGeneration | undefined { - const responseBody = response ?? {}; - if (provider === 'anthropic') { - if (responseBody.type === 'message_start') { - return parseMessage(responseBody.message, true); - } else if ( - responseBody.type === 'content_block_delta' && - responseBody.delta?.type === 'text_delta' && - typeof responseBody.delta?.text === 'string' - ) { - return new ChatGenerationChunk({ - message: new AIMessageChunk({ - content: responseBody.delta.text, - }), - text: responseBody.delta.text, - }); - } else if (responseBody.type === 'message_delta') { - return new ChatGenerationChunk({ - message: new AIMessageChunk({ content: '' }), - text: '', - generationInfo: { - ...responseBody.delta, - usage: responseBody.usage, - }, - }); - } else if ( - responseBody.type === 'message_stop' && - responseBody['amazon-bedrock-invocationMetrics'] !== undefined - ) { - return new ChatGenerationChunk({ - message: new AIMessageChunk({ content: '' }), - text: '', - generationInfo: { - 'amazon-bedrock-invocationMetrics': responseBody['amazon-bedrock-invocationMetrics'], - }, - }); - } else if (responseBody.type === 'message') { - return parseMessage(responseBody); - } else { - return undefined; - } - } else if (provider === 'cohere') { - if (responseBody.event_type === 'stream-start') { - return parseMessageCohere(responseBody.message, true); - } else if ( - responseBody.event_type === 'text-generation' && - typeof responseBody?.text === 'string' - ) { - return new ChatGenerationChunk({ - message: new AIMessageChunk({ - content: responseBody.text, - }), - text: responseBody.text, - }); - } else if (responseBody.event_type === 'search-queries-generation') { - return parseMessageCohere(responseBody); - } else if ( - responseBody.event_type === 'stream-end' && - responseBody.response !== undefined && - responseBody['amazon-bedrock-invocationMetrics'] !== undefined - ) { - return new ChatGenerationChunk({ - message: new AIMessageChunk({ content: '' }), - text: '', - generationInfo: { - response: responseBody.response, - 'amazon-bedrock-invocationMetrics': responseBody['amazon-bedrock-invocationMetrics'], - }, - }); - } else { - if ( - responseBody.finish_reason === 'COMPLETE' || - responseBody.finish_reason === 'MAX_TOKENS' - ) { - return parseMessageCohere(responseBody); - } else { - return undefined; - } - } - } else { - throw new Error('The messages API is currently only supported by Anthropic or Cohere.'); - } - } -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function parseMessage(responseBody: any, asChunk?: boolean): ChatGeneration { - const { content, id, ...generationInfo } = responseBody; - let parsedContent; - if (Array.isArray(content) && content.length === 1 && content[0].type === 'text') { - parsedContent = content[0].text; - } else if (Array.isArray(content) && content.length === 0) { - parsedContent = ''; - } else { - parsedContent = content; - } - if (asChunk) { - return new ChatGenerationChunk({ - message: new AIMessageChunk({ - content: parsedContent, - additional_kwargs: { id }, - }), - text: typeof parsedContent === 'string' ? parsedContent : '', - generationInfo, - }); - } else { - return { - message: new AIMessage({ - content: parsedContent, - additional_kwargs: { id }, - }), - text: typeof parsedContent === 'string' ? parsedContent : '', - generationInfo, - }; - } -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function parseMessageCohere(responseBody: any, asChunk?: boolean): ChatGeneration { - const { text, ...generationInfo } = responseBody; - let parsedContent = text; - if (typeof text !== 'string') { - parsedContent = ''; - } - if (asChunk) { - return new ChatGenerationChunk({ - message: new AIMessageChunk({ - content: parsedContent, - }), - text: parsedContent, - generationInfo, - }); - } else { - return { - message: new AIMessage({ - content: parsedContent, - }), - text: parsedContent, - generationInfo, - }; - } -} diff --git a/x-pack/plugins/integration_assistant/server/config.ts b/x-pack/plugins/integration_assistant/server/config.ts index c8c81b9f63743..c2490982ca9f6 100644 --- a/x-pack/plugins/integration_assistant/server/config.ts +++ b/x-pack/plugins/integration_assistant/server/config.ts @@ -9,7 +9,7 @@ import { schema, type TypeOf } from '@kbn/config-schema'; import type { PluginConfigDescriptor } from '@kbn/core/server'; export const configSchema = schema.object({ - enabled: schema.boolean({ defaultValue: false }), + enabled: schema.boolean({ defaultValue: true }), }); export type ServerlessSecuritySchema = TypeOf; diff --git a/x-pack/plugins/integration_assistant/server/plugin.ts b/x-pack/plugins/integration_assistant/server/plugin.ts index 87f261de28c3b..4c729bc77a8ec 100644 --- a/x-pack/plugins/integration_assistant/server/plugin.ts +++ b/x-pack/plugins/integration_assistant/server/plugin.ts @@ -22,6 +22,7 @@ export type IntegrationAssistantRouteHandlerContext = CustomRequestHandlerContex getStartServices: CoreSetup<{ actions: ActionsPluginsStart; }>['getStartServices']; + logger: Logger; }; }>; @@ -43,6 +44,7 @@ export class IntegrationAssistantPlugin 'integrationAssistant' >('integrationAssistant', () => ({ getStartServices: core.getStartServices, + logger: this.logger, })); const router = core.http.createRouter(); this.logger.debug('integrationAssistant api: Setup'); diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index 211456a97bcd0..ddb19048de374 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -7,7 +7,11 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; +import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; import { CATEGORIZATION_GRAPH_PATH } from '../../common'; import type { CategorizationApiRequest, CategorizationApiResponse } from '../../common'; import { getCategorizationGraph } from '../graphs/categorization'; @@ -43,7 +47,7 @@ export function registerCategorizationRoutes( const services = await context.resolve(['core']); const { client } = services.core.elasticsearch; - const { getStartServices } = await context.integrationAssistant; + const { getStartServices, logger } = await context.integrationAssistant; const [, { actions: actionsPlugin }] = await getStartServices(); const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); const connector = req.body.connectorId @@ -52,19 +56,21 @@ export function registerCategorizationRoutes( (connectorItem) => connectorItem.actionTypeId === '.bedrock' )[0]; - const model = new BedrockChat({ - actionsClient, + const abortSignal = getRequestAbortedSignal(req.events.aborted$); + const isOpenAI = connector.actionTypeId === '.gen-ai'; + const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; + + const model = new llmClass({ + actions: actionsPlugin, connectorId: connector.id, + request: req, + logger, + llmType: isOpenAI ? 'openai' : 'bedrock', model: req.body.model || connector.config?.defaultModel, - region: req.body.region || connector.config?.apiUrl.split('.')[1], temperature: 0.05, maxTokens: 4096, - modelKwargs: { - top_k: 200, - temperature: 0.05, - top_p: 0.4, - stop_sequences: ['Human:'], - }, + signal: abortSignal, + streaming: false, }); const graph = await getCategorizationGraph(client, model); diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts index 93cffb02ae187..8a180383ea70b 100644 --- a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -7,7 +7,11 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; +import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; import { ECS_GRAPH_PATH } from '../../common'; import { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common/types'; import { getEcsGraph } from '../graphs/ecs'; @@ -39,7 +43,7 @@ export function registerEcsRoutes(router: IRouter { const { packageName, dataStreamName, rawSamples, mapping } = req.body as EcsMappingApiRequest; - const { getStartServices } = await context.integrationAssistant; + const { getStartServices, logger } = await context.integrationAssistant; const [, { actions: actionsPlugin }] = await getStartServices(); const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); const connector = req.body.connectorId @@ -48,19 +52,21 @@ export function registerEcsRoutes(router: IRouter connectorItem.actionTypeId === '.bedrock' )[0]; - const model = new BedrockChat({ - actionsClient, + const abortSignal = getRequestAbortedSignal(req.events.aborted$); + const isOpenAI = connector.actionTypeId === '.gen-ai'; + const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; + + const model = new llmClass({ + actions: actionsPlugin, connectorId: connector.id, + request: req, + logger, + llmType: isOpenAI ? 'openai' : 'bedrock', model: req.body.model || connector.config?.defaultModel, - region: req.body.region || connector.config?.apiUrl.split('.')[1], temperature: 0.05, maxTokens: 4096, - modelKwargs: { - top_k: 200, - temperature: 0.05, - top_p: 0.4, - stop_sequences: ['Human:'], - }, + signal: abortSignal, + streaming: false, }); const graph = await getEcsGraph(model); diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index 16cacfdd3b869..e039c8cf6df57 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -7,7 +7,11 @@ import { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; -import { BedrockChat } from '@kbn/langchain/server/language_models'; +import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { RELATED_GRAPH_PATH } from '../../common'; import { RelatedApiRequest, RelatedApiResponse } from '../../common/types'; import { getRelatedGraph } from '../graphs/related'; @@ -42,7 +46,7 @@ export function registerRelatedRoutes(router: IRouter connectorItem.actionTypeId === '.bedrock' )[0]; - const model = new BedrockChat({ - actionsClient, + const isOpenAI = connector.actionTypeId === '.gen-ai'; + const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; + const abortSignal = getRequestAbortedSignal(req.events.aborted$); + + const model = new llmClass({ + actions: actionsPlugin, connectorId: connector.id, + request: req, + logger, + llmType: isOpenAI ? 'openai' : 'bedrock', model: req.body.model || connector.config?.defaultModel, - region: req.body.region || connector.config?.apiUrl.split('.')[1], temperature: 0.05, maxTokens: 4096, - modelKwargs: { - top_k: 200, - temperature: 0.05, - top_p: 0.4, - stop_sequences: ['Human:'], - }, + signal: abortSignal, + streaming: false, }); const graph = await getRelatedGraph(client, model); diff --git a/x-pack/plugins/stack_connectors/common/bedrock/constants.ts b/x-pack/plugins/stack_connectors/common/bedrock/constants.ts index 81a4f8bf3aa83..053ca82e0e274 100644 --- a/x-pack/plugins/stack_connectors/common/bedrock/constants.ts +++ b/x-pack/plugins/stack_connectors/common/bedrock/constants.ts @@ -16,7 +16,6 @@ export const BEDROCK_TITLE = i18n.translate( export const BEDROCK_CONNECTOR_ID = '.bedrock'; export enum SUB_ACTION { RUN = 'run', - RUN_RAW = 'runApiRaw', INVOKE_AI = 'invokeAI', INVOKE_STREAM = 'invokeStream', DASHBOARD = 'getDashboard', diff --git a/x-pack/plugins/stack_connectors/common/bedrock/schema.ts b/x-pack/plugins/stack_connectors/common/bedrock/schema.ts index aa1100ba01657..bf35aa6bb8e0d 100644 --- a/x-pack/plugins/stack_connectors/common/bedrock/schema.ts +++ b/x-pack/plugins/stack_connectors/common/bedrock/schema.ts @@ -19,17 +19,6 @@ export const SecretsSchema = schema.object({ secret: schema.string(), }); -export const RunRawActionParamsSchema = schema.object( - { - // body: schema.string(), - // model: schema.maybe(schema.string()), - // // abort signal from client - // signal: schema.maybe(schema.any()), - // timeout: schema.maybe(schema.number()), - }, - { unknowns: 'allow' } -); - export const RunActionParamsSchema = schema.object({ body: schema.string(), model: schema.maybe(schema.string()), @@ -49,6 +38,7 @@ export const InvokeAIActionParamsSchema = schema.object({ temperature: schema.maybe(schema.number()), stopSequences: schema.maybe(schema.arrayOf(schema.string())), system: schema.maybe(schema.string()), + maxTokens: schema.maybe(schema.number()), // abort signal from client signal: schema.maybe(schema.any()), timeout: schema.maybe(schema.number()), diff --git a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts index 66cbdf68be293..8b05c30a5b0cb 100644 --- a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts +++ b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts @@ -5,10 +5,6 @@ * 2.0. */ -import fetch from 'node-fetch'; -import { SignatureV4 } from '@smithy/signature-v4'; -import { HttpRequest } from '@smithy/protocol-http'; -import { Sha256 } from '@aws-crypto/sha256-js'; import { ServiceParams, SubActionConnector } from '@kbn/actions-plugin/server'; import aws from 'aws4'; import { AxiosError, Method } from 'axios'; @@ -22,7 +18,6 @@ import { StreamingResponseSchema, RunActionResponseSchema, RunApiLatestResponseSchema, - RunRawActionParamsSchema, } from '../../../common/bedrock/schema'; import { Config, @@ -84,12 +79,6 @@ export class BedrockConnector extends SubActionConnector { schema: RunActionParamsSchema, }); - this.registerSubAction({ - name: SUB_ACTION.RUN_RAW, - method: 'runApiRaw', - schema: RunRawActionParamsSchema, - }); - this.registerSubAction({ name: SUB_ACTION.INVOKE_AI, method: 'invokeAI', @@ -214,50 +203,6 @@ The Kibana Connector in use may need to be reconfigured with an updated Amazon B }; } - public async runApiRaw({ - body, - bedrockMethod, - model, - signal, - timeout, - endpointHost, - }: RunActionParams): Promise { - const url = new URL(`https://${endpointHost}/model/${model}/${bedrockMethod}`); - - const request = new HttpRequest({ - hostname: url.hostname, - path: url.pathname, - protocol: url.protocol, - method: 'POST', // method must be uppercase - body, - query: Object.fromEntries(url.searchParams.entries()), - headers: { - // host is required by AWS Signature V4: https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html - host: url.host, - accept: 'application/json', - 'content-type': 'application/json', - }, - }); - - const signer = new SignatureV4({ - credentials: { - secretAccessKey: this.secrets.secret, - accessKeyId: this.secrets.accessKey, - }, - service: 'bedrock', - region: 'us-west-2', // this.region, - sha256: Sha256, - }); - - const signedRequest = await signer.sign(request); - - return fetch(url, { - headers: signedRequest.headers, - body: signedRequest.body, - method: signedRequest.method, - }); - } - /** * responsible for making a POST request to the external API endpoint and returning the response data * @param body The stringified request body to be sent in the POST request. @@ -361,11 +306,14 @@ The Kibana Connector in use may need to be reconfigured with an updated Amazon B stopSequences, system, temperature, + maxTokens, signal, timeout, }: InvokeAIActionParams): Promise { const res = await this.runApi({ - body: JSON.stringify(formatBedrockBody({ messages, stopSequences, system, temperature })), + body: JSON.stringify( + formatBedrockBody({ messages, stopSequences, system, temperature, maxTokens }) + ), model, signal, timeout, @@ -379,16 +327,18 @@ const formatBedrockBody = ({ stopSequences, temperature = 0, system, + maxTokens = DEFAULT_TOKEN_LIMIT, }: { messages: Array<{ role: string; content: string }>; stopSequences?: string[]; temperature?: number; + maxTokens?: number; // optional system message to be sent to the API system?: string; }) => ({ anthropic_version: 'bedrock-2023-05-31', ...ensureMessageFormat(messages, system), - max_tokens: DEFAULT_TOKEN_LIMIT, + max_tokens: maxTokens, stop_sequences: stopSequences, temperature, }); @@ -407,6 +357,11 @@ const ensureMessageFormat = ( const newMessages = messages.reduce((acc: Array<{ role: string; content: string }>, m) => { const lastMessage = acc[acc.length - 1]; + if (m.role === 'system') { + system = `${system.length ? `${system}\n` : ''}${m.content}`; + return acc; + } + if (lastMessage && lastMessage.role === m.role) { // Bedrock only accepts assistant and user roles. // If 2 user or 2 assistant messages are sent in a row, combine the messages into a single message @@ -415,13 +370,12 @@ const ensureMessageFormat = ( { content: `${lastMessage.content}\n${m.content}`, role: m.role }, ]; } - if (m.role === 'system') { - system = `${system.length ? `${system}\n` : ''}${m.content}`; - return acc; - } // force role outside of system to ensure it is either assistant or user - return [...acc, { content: m.content, role: m.role === 'assistant' ? 'assistant' : 'user' }]; + return [ + ...acc, + { content: m.content, role: ['assistant', 'ai'].includes(m.role) ? 'assistant' : 'user' }, + ]; }, []); return system.length ? { system, messages: newMessages } : { messages: newMessages }; }; diff --git a/yarn.lock b/yarn.lock index 5d15224196da7..a541fb815c81f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -86,52 +86,6 @@ "@aws-sdk/types" "^3.222.0" tslib "^1.11.1" -"@aws-crypto/ie11-detection@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz#640ae66b4ec3395cee6a8e94ebcd9f80c24cd688" - integrity sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q== - dependencies: - tslib "^1.11.1" - -"@aws-crypto/sha256-browser@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz#05f160138ab893f1c6ba5be57cfd108f05827766" - integrity sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ== - dependencies: - "@aws-crypto/ie11-detection" "^3.0.0" - "@aws-crypto/sha256-js" "^3.0.0" - "@aws-crypto/supports-web-crypto" "^3.0.0" - "@aws-crypto/util" "^3.0.0" - "@aws-sdk/types" "^3.222.0" - "@aws-sdk/util-locate-window" "^3.0.0" - "@aws-sdk/util-utf8-browser" "^3.0.0" - tslib "^1.11.1" - -"@aws-crypto/sha256-js@3.0.0", "@aws-crypto/sha256-js@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz#f06b84d550d25521e60d2a0e2a90139341e007c2" - integrity sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ== - dependencies: - "@aws-crypto/util" "^3.0.0" - "@aws-sdk/types" "^3.222.0" - tslib "^1.11.1" - -"@aws-crypto/sha256-js@^5.2.0": - version "5.2.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz#c4fdb773fdbed9a664fc1a95724e206cf3860042" - integrity sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA== - dependencies: - "@aws-crypto/util" "^5.2.0" - "@aws-sdk/types" "^3.222.0" - tslib "^2.6.2" - -"@aws-crypto/supports-web-crypto@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz#5d1bf825afa8072af2717c3e455f35cda0103ec2" - integrity sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg== - dependencies: - tslib "^1.11.1" - "@aws-crypto/util@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-3.0.0.tgz#1c7ca90c29293f0883468ad48117937f0fe5bfb0" @@ -141,229 +95,7 @@ "@aws-sdk/util-utf8-browser" "^3.0.0" tslib "^1.11.1" -"@aws-crypto/util@^5.2.0": - version "5.2.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-5.2.0.tgz#71284c9cffe7927ddadac793c14f14886d3876da" - integrity sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ== - dependencies: - "@aws-sdk/types" "^3.222.0" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.6.2" - -"@aws-sdk/client-sso@3.583.0": - version "3.583.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.583.0.tgz#fa18cadd19abe80e0c0378b6cbe6225ed0296595" - integrity sha512-FNJ2MmiBtZZwgkj4+GLVrzqwmD6D8FBptrFZk7PnGkSf7v1Q8txYNI6gY938RRhYJ4lBW4cNbhPvWoDxAl90Hw== - dependencies: - "@aws-crypto/sha256-browser" "3.0.0" - "@aws-crypto/sha256-js" "3.0.0" - "@aws-sdk/core" "3.582.0" - "@aws-sdk/middleware-host-header" "3.577.0" - "@aws-sdk/middleware-logger" "3.577.0" - "@aws-sdk/middleware-recursion-detection" "3.577.0" - "@aws-sdk/middleware-user-agent" "3.583.0" - "@aws-sdk/region-config-resolver" "3.577.0" - "@aws-sdk/types" "3.577.0" - "@aws-sdk/util-endpoints" "3.583.0" - "@aws-sdk/util-user-agent-browser" "3.577.0" - "@aws-sdk/util-user-agent-node" "3.577.0" - "@smithy/config-resolver" "^3.0.0" - "@smithy/core" "^2.0.1" - "@smithy/fetch-http-handler" "^3.0.1" - "@smithy/hash-node" "^3.0.0" - "@smithy/invalid-dependency" "^3.0.0" - "@smithy/middleware-content-length" "^3.0.0" - "@smithy/middleware-endpoint" "^3.0.0" - "@smithy/middleware-retry" "^3.0.1" - "@smithy/middleware-serde" "^3.0.0" - "@smithy/middleware-stack" "^3.0.0" - "@smithy/node-config-provider" "^3.0.0" - "@smithy/node-http-handler" "^3.0.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/smithy-client" "^3.0.1" - "@smithy/types" "^3.0.0" - "@smithy/url-parser" "^3.0.0" - "@smithy/util-base64" "^3.0.0" - "@smithy/util-body-length-browser" "^3.0.0" - "@smithy/util-body-length-node" "^3.0.0" - "@smithy/util-defaults-mode-browser" "^3.0.1" - "@smithy/util-defaults-mode-node" "^3.0.1" - "@smithy/util-endpoints" "^2.0.0" - "@smithy/util-middleware" "^3.0.0" - "@smithy/util-retry" "^3.0.0" - "@smithy/util-utf8" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/core@3.582.0": - version "3.582.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/core/-/core-3.582.0.tgz#9ebb295290cba3d68738401fe4e3d51dfb0d1bfc" - integrity sha512-ofmD96IQc9g1dbyqlCyxu5fCG7kIl9p1NoN5+vGBUyLdbmPCV3Pdg99nRHYEJuv2MgGx5AUFGDPMHcqbJpnZIw== - dependencies: - "@smithy/core" "^2.0.1" - "@smithy/protocol-http" "^4.0.0" - "@smithy/signature-v4" "^3.0.0" - "@smithy/smithy-client" "^3.0.1" - "@smithy/types" "^3.0.0" - fast-xml-parser "4.2.5" - tslib "^2.6.2" - -"@aws-sdk/credential-provider-env@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.577.0.tgz#d587ea01a2288840e8483a236516c0f26cb4ba36" - integrity sha512-Jxu255j0gToMGEiqufP8ZtKI8HW90lOLjwJ3LrdlD/NLsAY0tOQf1fWc53u28hWmmNGMxmCrL2p66IOgMDhDUw== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/credential-provider-http@3.582.0": - version "3.582.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-http/-/credential-provider-http-3.582.0.tgz#6ea9377461c4ce38d487ea0ae5888155f7c495a6" - integrity sha512-kGOUKw5ryPkDIYB69PjK3SicVLTbWB06ouFN2W1EvqUJpkQGPAUGzYcomKtt3mJaCTf/1kfoaHwARAl6KKSP8Q== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/fetch-http-handler" "^3.0.1" - "@smithy/node-http-handler" "^3.0.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/smithy-client" "^3.0.1" - "@smithy/types" "^3.0.0" - "@smithy/util-stream" "^3.0.1" - tslib "^2.6.2" - -"@aws-sdk/credential-provider-ini@3.583.0": - version "3.583.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.583.0.tgz#948ebd3ca257d7d9362d3294259e0be9526cd662" - integrity sha512-8I0oWNg/yps6ctjhEeL/qJ9BIa/+xXP7RPDQqFKZ2zBkWbmLLOoMWXRvl8uKUBD6qCe+DGmcu9skfVXeXSesEQ== - dependencies: - "@aws-sdk/credential-provider-env" "3.577.0" - "@aws-sdk/credential-provider-process" "3.577.0" - "@aws-sdk/credential-provider-sso" "3.583.0" - "@aws-sdk/credential-provider-web-identity" "3.577.0" - "@aws-sdk/types" "3.577.0" - "@smithy/credential-provider-imds" "^3.0.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/shared-ini-file-loader" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/credential-provider-node@^3.583.0": - version "3.583.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.583.0.tgz#8ce316409d91cddca0c85851ca50726ee666cff5" - integrity sha512-yBNypBXny7zJH85SzxDj8s1mbLXv9c/Vbq0qR3R3POj2idZ6ywB/qlIRC1XwBuv49Wvg8kA1wKXk3K3jrpcVIw== - dependencies: - "@aws-sdk/credential-provider-env" "3.577.0" - "@aws-sdk/credential-provider-http" "3.582.0" - "@aws-sdk/credential-provider-ini" "3.583.0" - "@aws-sdk/credential-provider-process" "3.577.0" - "@aws-sdk/credential-provider-sso" "3.583.0" - "@aws-sdk/credential-provider-web-identity" "3.577.0" - "@aws-sdk/types" "3.577.0" - "@smithy/credential-provider-imds" "^3.0.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/shared-ini-file-loader" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/credential-provider-process@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.577.0.tgz#ba35b4f012563762bbd86a71989d366272ee0f07" - integrity sha512-Gin6BWtOiXxIgITrJ3Nwc+Y2P1uVT6huYR4EcbA/DJUPWyO0n9y5UFLewPvVbLkRn15JeEqErBLUrHclkiOKtw== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/shared-ini-file-loader" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/credential-provider-sso@3.583.0": - version "3.583.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.583.0.tgz#468bb6ca9bd7b89370d5ec7865a8e29e98772abc" - integrity sha512-G/1EvL9tBezSiU+06tG4K/kOvFfPjnheT4JSXqjPM7+vjKzgp2jxp1J9MMd69zs4jVWon932zMeGgjrCplzMEg== - dependencies: - "@aws-sdk/client-sso" "3.583.0" - "@aws-sdk/token-providers" "3.577.0" - "@aws-sdk/types" "3.577.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/shared-ini-file-loader" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/credential-provider-web-identity@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.577.0.tgz#294fb71fa832d9f55ea1c56678357efa3cd7ca55" - integrity sha512-ZGHGNRaCtJJmszb9UTnC7izNCtRUttdPlLdMkh41KPS32vfdrBDHs1JrpbZijItRj1xKuOXsiYSXLAaHGcLh8Q== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/middleware-host-header@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.577.0.tgz#a3fc626d409ec850296740478c64ef5806d8b878" - integrity sha512-9ca5MJz455CODIVXs0/sWmJm7t3QO4EUa1zf8pE8grLpzf0J94bz/skDWm37Pli13T3WaAQBHCTiH2gUVfCsWg== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/middleware-logger@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.577.0.tgz#6da3b13ae284fb3930961f0fc8e20b1f6cf8be30" - integrity sha512-aPFGpGjTZcJYk+24bg7jT4XdIp42mFXSuPt49lw5KygefLyJM/sB0bKKqPYYivW0rcuZ9brQ58eZUNthrzYAvg== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/middleware-recursion-detection@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.577.0.tgz#fff76abc6d4521636f9e654ce5bf2c4c79249417" - integrity sha512-pn3ZVEd2iobKJlR3H+bDilHjgRnNrQ6HMmK9ZzZw89Ckn3Dcbv48xOv4RJvu0aU8SDLl/SNCxppKjeLDTPGBNA== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/middleware-user-agent@3.583.0": - version "3.583.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.583.0.tgz#5554b0de431cb3700368f01eb7425210fd3ee9a9" - integrity sha512-xVNXXXDWvBVI/AeVtSdA9SVumqxiZaESk/JpUn9GMkmtTKfter0Cweap+1iQ9j8bRAO0vNhmIkbcvdB1S4WVUw== - dependencies: - "@aws-sdk/types" "3.577.0" - "@aws-sdk/util-endpoints" "3.583.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/region-config-resolver@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.577.0.tgz#1fab6dc6c4ec3ad9a0352c1ce1a757464219fb00" - integrity sha512-4ChCFACNwzqx/xjg3zgFcW8Ali6R9C95cFECKWT/7CUM1D0MGvkclSH2cLarmHCmJgU6onKkJroFtWp0kHhgyg== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/node-config-provider" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-config-provider" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/token-providers@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.577.0.tgz#8f9e96ff42994dfd0b5b3692b583644ccda04893" - integrity sha512-0CkIZpcC3DNQJQ1hDjm2bdSy/Xjs7Ny5YvSsacasGOkNfk+FdkiQy6N67bZX3Zbc9KIx+Nz4bu3iDeNSNplnnQ== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/shared-ini-file-loader" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@aws-sdk/types@3.577.0", "@aws-sdk/types@^3.222.0", "@aws-sdk/types@^3.577.0": +"@aws-sdk/types@^3.222.0": version "3.577.0" resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.577.0.tgz#7700784d368ce386745f8c340d9d68cea4716f90" integrity sha512-FT2JZES3wBKN/alfmhlo+3ZOq/XJ0C7QOZcDNrpKjB0kqYoKjhVKZ/Hx6ArR0czkKfHzBBEs6y40ebIHx2nSmA== @@ -371,43 +103,6 @@ "@smithy/types" "^3.0.0" tslib "^2.6.2" -"@aws-sdk/util-endpoints@3.583.0": - version "3.583.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.583.0.tgz#1554d3b4124be21a72a519603e9727d973845504" - integrity sha512-ZC9mb2jq6BFXPYsUsD2tmYcnlmd+9PGNwnFNn8jk4abna5Jjk2wDknN81ybktmBR5ttN9W8ugmktuKtvAMIDCQ== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/types" "^3.0.0" - "@smithy/util-endpoints" "^2.0.0" - tslib "^2.6.2" - -"@aws-sdk/util-locate-window@^3.0.0": - version "3.568.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.568.0.tgz#2acc4b2236af0d7494f7e517401ba6b3c4af11ff" - integrity sha512-3nh4TINkXYr+H41QaPelCceEB2FXP3fxp93YZXB/kqJvX0U9j0N0Uk45gvsjmEPzG8XxkPEeLIfT2I1M7A6Lig== - dependencies: - tslib "^2.6.2" - -"@aws-sdk/util-user-agent-browser@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.577.0.tgz#d4d2cdb3a2b3d1c8b35f239ee9f7b2c87bee66ea" - integrity sha512-zEAzHgR6HWpZOH7xFgeJLc6/CzMcx4nxeQolZxVZoB5pPaJd3CjyRhZN0xXeZB0XIRCWmb4yJBgyiugXLNMkLA== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/types" "^3.0.0" - bowser "^2.11.0" - tslib "^2.6.2" - -"@aws-sdk/util-user-agent-node@3.577.0": - version "3.577.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.577.0.tgz#0215ea10ead622a61b575a7181a4c51ae8e71449" - integrity sha512-XqvtFjbSMtycZTWVwDe8DRWovuoMbA54nhUoZwVU6rW9OSD6NZWGR512BUGHFaWzW0Wg8++Dj10FrKTG2XtqfA== - dependencies: - "@aws-sdk/types" "3.577.0" - "@smithy/node-config-provider" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - "@aws-sdk/util-utf8-browser@^3.0.0": version "3.259.0" resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff" @@ -8506,50 +8201,6 @@ "@types/node" ">=18.0.0" axios "^1.6.0" -"@smithy/abort-controller@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-3.0.0.tgz#5815f5d4618e14bf8d031bb98a99adabbb831168" - integrity sha512-p6GlFGBt9K4MYLu72YuJ523NVR4A8oHlC5M2JO6OmQqN8kAc/uh1JqLE+FizTokrSJGg0CSvC+BrsmGzKtsZKA== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/config-resolver@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-3.0.0.tgz#d37b31e3202c5ce54d9bd2406dcde7c7b5073cbd" - integrity sha512-2GzOfADwYLQugYkKQhIyZyQlM05K+tMKvRnc6eFfZcpJGRfKoMUMYdPlBKmqHwQFXQKBrGV6cxL9oymWgDzvFw== - dependencies: - "@smithy/node-config-provider" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-config-provider" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - tslib "^2.6.2" - -"@smithy/core@^2.0.1": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@smithy/core/-/core-2.0.1.tgz#8a7ac8faa0227912ce260bc3f976a5e254323920" - integrity sha512-rcMkjvwxH/bER+oZUPR0yTA0ELD6m3A+d92+CFkdF6HJFCBB1bXo7P5pm21L66XwTN01B6bUhSCQ7cymWRD8zg== - dependencies: - "@smithy/middleware-endpoint" "^3.0.0" - "@smithy/middleware-retry" "^3.0.1" - "@smithy/middleware-serde" "^3.0.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/smithy-client" "^3.0.1" - "@smithy/types" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - tslib "^2.6.2" - -"@smithy/credential-provider-imds@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-3.0.0.tgz#a290eb0224ef045742e5c806685cf63d44a084f3" - integrity sha512-lfmBiFQcA3FsDAPxNfY0L7CawcWtbyWsBOHo34nF095728JLkBX4Y9q/VPPE2r7fqMVK+drmDigqE2/SSQeVRA== - dependencies: - "@smithy/node-config-provider" "^3.0.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/url-parser" "^3.0.0" - tslib "^2.6.2" - "@smithy/eventstream-codec@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-3.0.0.tgz#81d30391220f73d41f432f65384b606d67673e46" @@ -8578,42 +8229,6 @@ "@smithy/types" "^3.0.0" tslib "^2.6.2" -"@smithy/fetch-http-handler@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-3.0.1.tgz#dacfdf6e70d639fac4a0f57c42ce13f0ed14ff22" - integrity sha512-uaH74i5BDj+rBwoQaXioKpI0SHBJFtOVwzrCpxZxphOW0ki5jhj7dXvDMYM2IJem8TpdFvS2iC08sjOblfFGFg== - dependencies: - "@smithy/protocol-http" "^4.0.0" - "@smithy/querystring-builder" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-base64" "^3.0.0" - tslib "^2.6.2" - -"@smithy/hash-node@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-3.0.0.tgz#f44b5fff193e241c1cdcc957b296b60f186f0e59" - integrity sha512-84qXstNemP3XS5jcof0el6+bDfjzuvhJPQTEfro3lgtbCtKgzPm3MgiS6ehXVPjeQ5+JS0HqmTz8f/RYfzHVxw== - dependencies: - "@smithy/types" "^3.0.0" - "@smithy/util-buffer-from" "^3.0.0" - "@smithy/util-utf8" "^3.0.0" - tslib "^2.6.2" - -"@smithy/invalid-dependency@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-3.0.0.tgz#21cb6b5203ee15321bfcc751f21f7a19536d4ae8" - integrity sha512-F6wBBaEFgJzj0s4KUlliIGPmqXemwP6EavgvDqYwCH40O5Xr2iMHvS8todmGVZtuJCorBkXsYLyTu4PuizVq5g== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/is-array-buffer@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz#8fa9b8040651e7ba0b2f6106e636a91354ff7d34" - integrity sha512-z3PjFjMyZNI98JFRJi/U0nGoLWMSJlDjAW4QUX2WNZLas5C0CmVV6LJ01JI0k90l7FvpmixjWxPFmENSClQ7ug== - dependencies: - tslib "^2.5.0" - "@smithy/is-array-buffer@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz#9a95c2d46b8768946a9eec7f935feaddcffa5e7a" @@ -8621,153 +8236,6 @@ dependencies: tslib "^2.6.2" -"@smithy/middleware-content-length@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-3.0.0.tgz#084b3d22248967885d496eb0b105d9090e8ababd" - integrity sha512-3C4s4d/iGobgCtk2tnWW6+zSTOBg1PRAm2vtWZLdriwTroFbbWNSr3lcyzHdrQHnEXYCC5K52EbpfodaIUY8sg== - dependencies: - "@smithy/protocol-http" "^4.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/middleware-endpoint@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-3.0.0.tgz#54c9e1bd8f35b7d004c803eaf3702e61e32b8295" - integrity sha512-aXOAWztw/5qAfp0NcA2OWpv6ZI/E+Dh9mByif7i91D/0iyYNUcKvskmXiowKESFkuZ7PIMd3VOR4fTibZDs2OQ== - dependencies: - "@smithy/middleware-serde" "^3.0.0" - "@smithy/node-config-provider" "^3.0.0" - "@smithy/shared-ini-file-loader" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/url-parser" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - tslib "^2.6.2" - -"@smithy/middleware-retry@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-3.0.1.tgz#167b75e9b79395f11a799f22030eaaf7d40da410" - integrity sha512-hBhSEuL841FhJBK/19WpaGk5YWSzFk/P2UaVjANGKRv3eYNO8Y1lANWgqnuPWjOyCEWMPr58vELFDWpxvRKANw== - dependencies: - "@smithy/node-config-provider" "^3.0.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/service-error-classification" "^3.0.0" - "@smithy/smithy-client" "^3.0.1" - "@smithy/types" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - "@smithy/util-retry" "^3.0.0" - tslib "^2.6.2" - uuid "^9.0.1" - -"@smithy/middleware-serde@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-3.0.0.tgz#786da6a6bc0e5e51d669dac834c19965245dd302" - integrity sha512-I1vKG1foI+oPgG9r7IMY1S+xBnmAn1ISqployvqkwHoSb8VPsngHDTOgYGYBonuOKndaWRUGJZrKYYLB+Ane6w== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/middleware-stack@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-3.0.0.tgz#00f112bae7af5fc3bd37d4fab95ebce0f17a7774" - integrity sha512-+H0jmyfAyHRFXm6wunskuNAqtj7yfmwFB6Fp37enytp2q047/Od9xetEaUbluyImOlGnGpaVGaVfjwawSr+i6Q== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/node-config-provider@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-3.0.0.tgz#4cd5dcf6132c75d6a582fcd6243482dac703865a" - integrity sha512-buqfaSdDh0zo62EPLf8rGDvcpKwGpO5ho4bXS2cdFhlOta7tBkWJt+O5uiaAeICfIOfPclNOndshDNSanX2X9g== - dependencies: - "@smithy/property-provider" "^3.0.0" - "@smithy/shared-ini-file-loader" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/node-http-handler@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-3.0.0.tgz#e771ea95d03e259f04b7b37e8aece8a4fffc8cdc" - integrity sha512-3trD4r7NOMygwLbUJo4eodyQuypAWr7uvPnebNJ9a70dQhVn+US8j/lCnvoJS6BXfZeF7PkkkI0DemVJw+n+eQ== - dependencies: - "@smithy/abort-controller" "^3.0.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/querystring-builder" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/property-provider@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-3.0.0.tgz#ef7a26557c855cc1471b9aa0e05529183e99b978" - integrity sha512-LmbPgHBswdXCrkWWuUwBm9w72S2iLWyC/5jet9/Y9cGHtzqxi+GVjfCfahkvNV4KXEwgnH8EMpcrD9RUYe0eLQ== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/protocol-http@^4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-4.0.0.tgz#04df3b5674b540323f678e7c4113e8abd8b26432" - integrity sha512-qOQZOEI2XLWRWBO9AgIYuHuqjZ2csyr8/IlgFDHDNuIgLAMRx2Bl8ck5U5D6Vh9DPdoaVpuzwWMa0xcdL4O/AQ== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/querystring-builder@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-3.0.0.tgz#48a9aa7b700e8409368c21bc0adf7564e001daea" - integrity sha512-bW8Fi0NzyfkE0TmQphDXr1AmBDbK01cA4C1Z7ggwMAU5RDz5AAv/KmoRwzQAS0kxXNf/D2ALTEgwK0U2c4LtRg== - dependencies: - "@smithy/types" "^3.0.0" - "@smithy/util-uri-escape" "^3.0.0" - tslib "^2.6.2" - -"@smithy/querystring-parser@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-3.0.0.tgz#fa1ed0cee408cd4d622070fa874bc50ac1a379b7" - integrity sha512-UzHwthk0UEccV4dHzPySnBy34AWw3V9lIqUTxmozQ+wPDAO9csCWMfOLe7V9A2agNYy7xE+Pb0S6K/J23JSzfQ== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/service-error-classification@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-3.0.0.tgz#06a45cb91b15b8b0d5f3b1df2b3743d2ca42f5c4" - integrity sha512-3BsBtOUt2Gsnc3X23ew+r2M71WwtpHfEDGhHYHSDg6q1t8FrWh15jT25DLajFV1H+PpxAJ6gqe9yYeRUsmSdFA== - dependencies: - "@smithy/types" "^3.0.0" - -"@smithy/shared-ini-file-loader@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.0.0.tgz#8739b7cd24f55fb4e276a74f00f0c2bb4e3f25d8" - integrity sha512-REVw6XauXk8xE4zo5aGL7Rz4ywA8qNMUn8RtWeTRQsgAlmlvbJ7CEPBcaXU2NDC3AYBgYAXrGyWD8XrN8UGDog== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/signature-v4@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-3.0.0.tgz#f536d0abebfeeca8e9aab846a4042658ca07d3b7" - integrity sha512-kXFOkNX+BQHe2qnLxpMEaCRGap9J6tUGLzc3A9jdn+nD4JdMwCKTJ+zFwQ20GkY+mAXGatyTw3HcoUlR39HwmA== - dependencies: - "@smithy/is-array-buffer" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-hex-encoding" "^3.0.0" - "@smithy/util-middleware" "^3.0.0" - "@smithy/util-uri-escape" "^3.0.0" - "@smithy/util-utf8" "^3.0.0" - tslib "^2.6.2" - -"@smithy/smithy-client@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-3.0.1.tgz#c440473f6fb5dfbe86eaf015565fc56f66533bb4" - integrity sha512-KAiFY4Y4jdHxR+4zerH/VBhaFKM8pbaVmJZ/CWJRwtM/CmwzTfXfvYwf6GoUwiHepdv+lwiOXCuOl6UBDUEINw== - dependencies: - "@smithy/middleware-endpoint" "^3.0.0" - "@smithy/middleware-stack" "^3.0.0" - "@smithy/protocol-http" "^4.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-stream" "^3.0.1" - tslib "^2.6.2" - "@smithy/types@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/types/-/types-3.0.0.tgz#00231052945159c64ffd8b91e8909d8d3006cb7e" @@ -8775,46 +8243,6 @@ dependencies: tslib "^2.6.2" -"@smithy/url-parser@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-3.0.0.tgz#5fdc77cd22051c1aac6531be0315bfcba0fa705d" - integrity sha512-2XLazFgUu+YOGHtWihB3FSLAfCUajVfNBXGGYjOaVKjLAuAxx3pSBY3hBgLzIgB17haf59gOG3imKqTy8mcrjw== - dependencies: - "@smithy/querystring-parser" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-base64@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-3.0.0.tgz#f7a9a82adf34e27a72d0719395713edf0e493017" - integrity sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ== - dependencies: - "@smithy/util-buffer-from" "^3.0.0" - "@smithy/util-utf8" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-body-length-browser@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-3.0.0.tgz#86ec2f6256310b4845a2f064e2f571c1ca164ded" - integrity sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ== - dependencies: - tslib "^2.6.2" - -"@smithy/util-body-length-node@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-3.0.0.tgz#99a291bae40d8932166907fe981d6a1f54298a6d" - integrity sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA== - dependencies: - tslib "^2.6.2" - -"@smithy/util-buffer-from@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz#7eb75d72288b6b3001bc5f75b48b711513091deb" - integrity sha512-/YNnLoHsR+4W4Vf2wL5lGv0ksg8Bmk3GEGxn2vEQt52AQaPSCuaO5PM5VM7lP1K9qHRKHwrPGktqVoAHKWHxzw== - dependencies: - "@smithy/is-array-buffer" "^2.0.0" - tslib "^2.5.0" - "@smithy/util-buffer-from@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz#559fc1c86138a89b2edaefc1e6677780c24594e3" @@ -8823,46 +8251,6 @@ "@smithy/is-array-buffer" "^3.0.0" tslib "^2.6.2" -"@smithy/util-config-provider@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-3.0.0.tgz#62c6b73b22a430e84888a8f8da4b6029dd5b8efe" - integrity sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ== - dependencies: - tslib "^2.6.2" - -"@smithy/util-defaults-mode-browser@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-3.0.1.tgz#0ba33ec90f6dd311599bed3a3dd604f3adba9acd" - integrity sha512-nW5kEzdJn1Bn5TF+gOPHh2rcPli8JU9vSSXLbfg7uPnfR1TMRQqs9zlYRhIb87NeSxIbpdXOI94tvXSy+fvDYg== - dependencies: - "@smithy/property-provider" "^3.0.0" - "@smithy/smithy-client" "^3.0.1" - "@smithy/types" "^3.0.0" - bowser "^2.11.0" - tslib "^2.6.2" - -"@smithy/util-defaults-mode-node@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-3.0.1.tgz#71242a6978240a6f559445d4cc26f2cce91c90e1" - integrity sha512-TFk+Qb+elLc/MOhtSp+50fstyfZ6avQbgH2d96xUBpeScu+Al9elxv+UFAjaTHe0HQe5n+wem8ZLpXvU8lwV6Q== - dependencies: - "@smithy/config-resolver" "^3.0.0" - "@smithy/credential-provider-imds" "^3.0.0" - "@smithy/node-config-provider" "^3.0.0" - "@smithy/property-provider" "^3.0.0" - "@smithy/smithy-client" "^3.0.1" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-endpoints@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-endpoints/-/util-endpoints-2.0.0.tgz#5a16a723c1220f536a9b1b3e01787e69e77b6f12" - integrity sha512-+exaXzEY3DNt2qtA2OtRNSDlVrE4p32j1JSsQkzA5AdP0YtJNjkYbYhJxkFmPYcjI1abuwopOZCwUmv682QkiQ== - dependencies: - "@smithy/node-config-provider" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - "@smithy/util-hex-encoding@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz#32938b33d5bf2a15796cd3f178a55b4155c535e6" @@ -8870,52 +8258,6 @@ dependencies: tslib "^2.6.2" -"@smithy/util-middleware@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-3.0.0.tgz#64d775628b99a495ca83ce982f5c83aa45f1e894" - integrity sha512-q5ITdOnV2pXHSVDnKWrwgSNTDBAMHLptFE07ua/5Ty5WJ11bvr0vk2a7agu7qRhrCFRQlno5u3CneU5EELK+DQ== - dependencies: - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-retry@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-3.0.0.tgz#8a0c47496aab74e1dfde4905d462ad636a8824bb" - integrity sha512-nK99bvJiziGv/UOKJlDvFF45F00WgPLKVIGUfAK+mDhzVN2hb/S33uW2Tlhg5PVBoqY7tDVqL0zmu4OxAHgo9g== - dependencies: - "@smithy/service-error-classification" "^3.0.0" - "@smithy/types" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-stream@^3.0.1": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-3.0.1.tgz#3cf527bcd3fec82c231c38d47dd75f3364747edb" - integrity sha512-7F7VNNhAsfMRA8I986YdOY5fE0/T1/ZjFF6OLsqkvQVNP3vZ/szYDfGCyphb7ioA09r32K/0qbSFfNFU68aSzA== - dependencies: - "@smithy/fetch-http-handler" "^3.0.1" - "@smithy/node-http-handler" "^3.0.0" - "@smithy/types" "^3.0.0" - "@smithy/util-base64" "^3.0.0" - "@smithy/util-buffer-from" "^3.0.0" - "@smithy/util-hex-encoding" "^3.0.0" - "@smithy/util-utf8" "^3.0.0" - tslib "^2.6.2" - -"@smithy/util-uri-escape@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz#e43358a78bf45d50bb736770077f0f09195b6f54" - integrity sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg== - dependencies: - tslib "^2.6.2" - -"@smithy/util-utf8@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.0.0.tgz#b4da87566ea7757435e153799df9da717262ad42" - integrity sha512-rctU1VkziY84n5OXe3bPNpKR001ZCME2JCaBBFgtiM2hfKbHFudc/BkMuPab8hRbLd0j3vbnBTTZ1igBf0wgiQ== - dependencies: - "@smithy/util-buffer-from" "^2.0.0" - tslib "^2.5.0" - "@smithy/util-utf8@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-3.0.0.tgz#1a6a823d47cbec1fd6933e5fc87df975286d9d6a" @@ -13761,11 +13103,6 @@ bowser@^1.7.3: resolved "https://registry.yarnpkg.com/bowser/-/bowser-1.9.4.tgz#890c58a2813a9d3243704334fa81b96a5c150c9a" integrity sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ== -bowser@^2.11.0: - version "2.11.0" - resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" - integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== - boxen@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50" @@ -18246,13 +17583,6 @@ fast-stream-to-buffer@^1.0.0: dependencies: end-of-stream "^1.4.1" -fast-xml-parser@4.2.5: - version "4.2.5" - resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz#a6747a09296a6cb34f2ae634019bf1738f3b421f" - integrity sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g== - dependencies: - strnum "^1.0.5" - fastest-levenshtein@^1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz#9990f7d3a88cc5a9ffd1f1745745251700d497e2" @@ -29990,11 +29320,6 @@ strip-json-comments@~2.0.1: resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a" integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo= -strnum@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db" - integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== - style-loader@^1.1.3, style-loader@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-1.3.0.tgz#828b4a3b3b7e7aa5847ce7bae9e874512114249e" From e32fc9d75bb1d24f5b58264f571a2c66dcc4d2c6 Mon Sep 17 00:00:00 2001 From: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Date: Tue, 4 Jun 2024 18:50:22 +0000 Subject: [PATCH 45/62] [CI] Auto-commit changed files from 'node scripts/lint_ts_projects --fix' --- x-pack/plugins/integration_assistant/tsconfig.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugins/integration_assistant/tsconfig.json b/x-pack/plugins/integration_assistant/tsconfig.json index 95d323dce6d5b..67a763a73f7f3 100644 --- a/x-pack/plugins/integration_assistant/tsconfig.json +++ b/x-pack/plugins/integration_assistant/tsconfig.json @@ -16,6 +16,7 @@ "@kbn/config-schema", "@kbn/langchain", "@kbn/core-elasticsearch-server", - "@kbn/actions-plugin" + "@kbn/actions-plugin", + "@kbn/data-plugin" ] } From c5678e38c4bd66747c86a78a599352c221fcd20a Mon Sep 17 00:00:00 2001 From: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Date: Tue, 4 Jun 2024 19:54:31 +0000 Subject: [PATCH 46/62] [CI] Auto-commit changed files from 'yarn openapi:generate' --- .../rule_management/crud/create_rule/create_rule_route.gen.ts | 2 +- .../rule_management/crud/patch_rule/patch_rule_route.gen.ts | 2 +- .../rule_management/crud/update_rule/update_rule_route.gen.ts | 2 +- .../rule_management/read_tags/read_tags_route.gen.ts | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts index b11a300523966..4f0af497e4986 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts index a404eb652988a..1d1025d39ff04 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts index faa285a8c62f1..2cab78d432a32 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts index 403653a5beda0..0f997aaae5ef4 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. From bce115deffd166f81a301dec7a6884325dcf78df Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Tue, 4 Jun 2024 23:38:53 +0200 Subject: [PATCH 47/62] fix --- .../connector_types.test.ts.snap | 26 +++++++++++++++++++ .../server/graphs/categorization/graph.ts | 10 +++++-- .../server/graphs/ecs/graph.ts | 7 +++-- .../server/graphs/related/graph.ts | 10 +++++-- 4 files changed, 47 insertions(+), 6 deletions(-) diff --git a/x-pack/plugins/actions/server/integration_tests/__snapshots__/connector_types.test.ts.snap b/x-pack/plugins/actions/server/integration_tests/__snapshots__/connector_types.test.ts.snap index b41e9ef70df78..7897f6492fbc5 100644 --- a/x-pack/plugins/actions/server/integration_tests/__snapshots__/connector_types.test.ts.snap +++ b/x-pack/plugins/actions/server/integration_tests/__snapshots__/connector_types.test.ts.snap @@ -212,6 +212,19 @@ Object { "presence": "optional", }, "keys": Object { + "maxTokens": Object { + "flags": Object { + "default": [Function], + "error": [Function], + "presence": "optional", + }, + "metas": Array [ + Object { + "x-oas-optional": true, + }, + ], + "type": "number", + }, "messages": Object { "flags": Object { "error": [Function], @@ -399,6 +412,19 @@ Object { "presence": "optional", }, "keys": Object { + "maxTokens": Object { + "flags": Object { + "default": [Function], + "error": [Function], + "presence": "optional", + }, + "metas": Array [ + Object { + "x-oas-optional": true, + }, + ], + "type": "number", + }, "messages": Object { "flags": Object { "error": [Function], diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index 2d4cba2426c87..4a9ac93ab65a1 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -7,7 +7,10 @@ import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import type { CategorizationState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; import { handleCategorization } from './categorization'; @@ -145,7 +148,10 @@ function chainRouter(state: CategorizationState): string { return END; } -export async function getCategorizationGraph(client: IScopedClusterClient, model: BedrockChat) { +export async function getCategorizationGraph( + client: IScopedClusterClient, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const workflow = new StateGraph({ channels: graphState, }) diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 6d598d0ebd8a5..8c7347a8b5058 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -7,7 +7,10 @@ import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { ECS_EXAMPLE_ANSWER, ECS_FIELDS } from './constants'; import { modifySamples, mergeSamples } from '../../util/samples'; import { createPipeline } from './pipeline'; @@ -137,7 +140,7 @@ function chainRouter(state: EcsMappingState): string { return END; } -export async function getEcsGraph(model: BedrockChat) { +export async function getEcsGraph(model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel) { const workflow = new StateGraph({ channels: graphState, }) diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index f1f2bd5acc4b0..8170a145b08d8 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -7,7 +7,10 @@ import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import type { RelatedState } from '../../types'; import { modifySamples, formatSamples } from '../../util/samples'; import { handleValidatePipeline } from '../../util/graph'; @@ -133,7 +136,10 @@ function chainRouter(state: RelatedState): string { return END; } -export async function getRelatedGraph(client: IScopedClusterClient, model: BedrockChat) { +export async function getRelatedGraph( + client: IScopedClusterClient, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const workflow = new StateGraph({ channels: graphState }) .addNode('modelInput', modelInput) .addNode('modelOutput', modelOutput) From d42f6fd6bc22fef5cf5c6c9bc37d4f20e32b543b Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Wed, 5 Jun 2024 06:54:15 +0200 Subject: [PATCH 48/62] fix eslint issue --- .../integration_assistant/server/graphs/ecs/constants.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts index 036e269eca11b..607655541ca9d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/constants.ts @@ -6,7 +6,7 @@ */ interface EcsFields { - [key: string]: unknown; + [key: string]: string; } export const ECS_TYPES: EcsFields = { From e0b9cd8e2c7bc0ae5f4e07ee3dfd047fd6a4dd4f Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Fri, 7 Jun 2024 08:45:21 +0200 Subject: [PATCH 49/62] Add versioned routes and make the APIs internal --- .../server/routes/build_integration_routes.ts | 95 ++++++------ .../server/routes/categorization_routes.ts | 129 ++++++++-------- .../server/routes/ecs_routes.ts | 138 +++++++++--------- .../server/routes/pipeline_routes.ts | 73 +++++---- .../server/routes/related_routes.ts | 127 ++++++++-------- 5 files changed, 302 insertions(+), 260 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts index 319ee8f7cb55c..191f189bacf8d 100644 --- a/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/build_integration_routes.ts @@ -5,61 +5,68 @@ * 2.0. */ -import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import type { IRouter } from '@kbn/core/server'; +import type { BuildIntegrationApiRequest } from '../../common'; import { INTEGRATION_BUILDER_PATH } from '../../common'; import { buildPackage } from '../integration_builder'; -import type { BuildIntegrationApiRequest } from '../../common'; import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerIntegrationBuilderRoutes( router: IRouter ) { - router.post( - { - path: `${INTEGRATION_BUILDER_PATH}`, - validate: { - body: schema.object({ - integration: schema.object({ - name: schema.string(), - title: schema.string(), - description: schema.string(), - logo: schema.maybe(schema.string()), - dataStreams: schema.arrayOf( - schema.object({ + router.versioned + .post({ + path: INTEGRATION_BUILDER_PATH, + access: 'internal', + }) + .addVersion( + { + version: '1', + validate: { + request: { + body: schema.object({ + integration: schema.object({ name: schema.string(), title: schema.string(), description: schema.string(), - inputTypes: schema.arrayOf(schema.string()), - rawSamples: schema.arrayOf(schema.string()), - pipeline: schema.object({ - name: schema.maybe(schema.string()), - description: schema.maybe(schema.string()), - version: schema.maybe(schema.number()), - processors: schema.arrayOf( - schema.recordOf(schema.string(), schema.object({}, { unknowns: 'allow' })) - ), - on_failure: schema.maybe( - schema.arrayOf( - schema.recordOf(schema.string(), schema.object({}, { unknowns: 'allow' })) - ) - ), - }), - docs: schema.arrayOf(schema.object({}, { unknowns: 'allow' })), - }) - ), - }), - }), + logo: schema.maybe(schema.string()), + dataStreams: schema.arrayOf( + schema.object({ + name: schema.string(), + title: schema.string(), + description: schema.string(), + inputTypes: schema.arrayOf(schema.string()), + rawSamples: schema.arrayOf(schema.string()), + pipeline: schema.object({ + name: schema.maybe(schema.string()), + description: schema.maybe(schema.string()), + version: schema.maybe(schema.number()), + processors: schema.arrayOf( + schema.recordOf(schema.string(), schema.object({}, { unknowns: 'allow' })) + ), + on_failure: schema.maybe( + schema.arrayOf( + schema.recordOf(schema.string(), schema.object({}, { unknowns: 'allow' })) + ) + ), + }), + docs: schema.arrayOf(schema.object({}, { unknowns: 'allow' })), + }) + ), + }), + }), + }, + }, }, - }, - async (_, req, res) => { - const { integration } = req.body as BuildIntegrationApiRequest; - try { - const zippedIntegration = await buildPackage(integration); - return res.custom({ statusCode: 200, body: zippedIntegration }); - } catch (e) { - return res.customError({ statusCode: 500, body: e }); + async (_, request, response) => { + const { integration } = request.body as BuildIntegrationApiRequest; + try { + const zippedIntegration = await buildPackage(integration); + return response.custom({ statusCode: 200, body: zippedIntegration }); + } catch (e) { + return response.customError({ statusCode: 500, body: e }); + } } - } - ); + ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts index 3bb3b697d47c2..4feb1c8b3bd8f 100644 --- a/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/categorization_routes.ts @@ -5,88 +5,95 @@ * 2.0. */ -import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import type { IRouter } from '@kbn/core/server'; +import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; import { ActionsClientChatOpenAI, ActionsClientSimpleChatModel, } from '@kbn/langchain/server/language_models'; -import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; -import { CATEGORIZATION_GRAPH_PATH } from '../../common'; import type { CategorizationApiRequest, CategorizationApiResponse } from '../../common'; -import { getCategorizationGraph } from '../graphs/categorization'; +import { CATEGORIZATION_GRAPH_PATH } from '../../common'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; +import { getCategorizationGraph } from '../graphs/categorization'; import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerCategorizationRoutes( router: IRouter ) { - router.post( - { - path: `${CATEGORIZATION_GRAPH_PATH}`, + router.versioned + .post({ + path: CATEGORIZATION_GRAPH_PATH, + access: 'internal', options: { timeout: { idleSocket: ROUTE_HANDLER_TIMEOUT, }, }, - validate: { - body: schema.object({ - packageName: schema.string(), - dataStreamName: schema.string(), - rawSamples: schema.arrayOf(schema.string()), - currentPipeline: schema.any(), - connectorId: schema.maybe(schema.string()), - model: schema.maybe(schema.string()), - region: schema.maybe(schema.string()), - }), + }) + .addVersion( + { + version: '1', + validate: { + request: { + body: schema.object({ + packageName: schema.string(), + dataStreamName: schema.string(), + rawSamples: schema.arrayOf(schema.string()), + currentPipeline: schema.any(), + connectorId: schema.maybe(schema.string()), + model: schema.maybe(schema.string()), + region: schema.maybe(schema.string()), + }), + }, + }, }, - }, - async (context, req, res) => { - const { packageName, dataStreamName, rawSamples, currentPipeline } = - req.body as CategorizationApiRequest; + async (context, req, res) => { + const { packageName, dataStreamName, rawSamples, currentPipeline } = + req.body as CategorizationApiRequest; - const services = await context.resolve(['core']); - const { client } = services.core.elasticsearch; - const { getStartServices, logger } = await context.integrationAssistant; - const [, { actions: actionsPlugin }] = await getStartServices(); - const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); - const connector = req.body.connectorId - ? await actionsClient.get({ id: req.body.connectorId }) - : (await actionsClient.getAll()).filter( - (connectorItem) => connectorItem.actionTypeId === '.bedrock' - )[0]; + const services = await context.resolve(['core']); + const { client } = services.core.elasticsearch; + const { getStartServices, logger } = await context.integrationAssistant; + const [, { actions: actionsPlugin }] = await getStartServices(); + const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); + const connector = req.body.connectorId + ? await actionsClient.get({ id: req.body.connectorId }) + : (await actionsClient.getAll()).filter( + (connectorItem) => connectorItem.actionTypeId === '.bedrock' + )[0]; - const abortSignal = getRequestAbortedSignal(req.events.aborted$); - const isOpenAI = connector.actionTypeId === '.gen-ai'; - const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; + const abortSignal = getRequestAbortedSignal(req.events.aborted$); + const isOpenAI = connector.actionTypeId === '.gen-ai'; + const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; - const model = new llmClass({ - actions: actionsPlugin, - connectorId: connector.id, - request: req, - logger, - llmType: isOpenAI ? 'openai' : 'bedrock', - model: req.body.model || connector.config?.defaultModel, - temperature: 0.05, - maxTokens: 4096, - signal: abortSignal, - streaming: false, - }); + const model = new llmClass({ + actions: actionsPlugin, + connectorId: connector.id, + request: req, + logger, + llmType: isOpenAI ? 'openai' : 'bedrock', + model: req.body.model || connector.config?.defaultModel, + temperature: 0.05, + maxTokens: 4096, + signal: abortSignal, + streaming: false, + }); - const graph = await getCategorizationGraph(client, model); - let results = { results: { docs: {}, pipeline: {} } }; - try { - results = (await graph.invoke({ - packageName, - dataStreamName, - rawSamples, - currentPipeline, - })) as CategorizationApiResponse; - } catch (e) { - return res.badRequest({ body: e }); - } + const graph = await getCategorizationGraph(client, model); + let results = { results: { docs: {}, pipeline: {} } }; + try { + results = (await graph.invoke({ + packageName, + dataStreamName, + rawSamples, + currentPipeline, + })) as CategorizationApiResponse; + } catch (e) { + return res.badRequest({ body: e }); + } - return res.ok({ body: results }); - } - ); + return res.ok({ body: results }); + } + ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts index 0a7e18f138850..d62e31389af46 100644 --- a/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/ecs_routes.ts @@ -5,91 +5,99 @@ * 2.0. */ -import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import type { IRouter } from '@kbn/core/server'; +import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; import { ActionsClientChatOpenAI, ActionsClientSimpleChatModel, } from '@kbn/langchain/server/language_models'; -import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; import { ECS_GRAPH_PATH } from '../../common'; import type { EcsMappingApiRequest, EcsMappingApiResponse } from '../../common/types'; -import { getEcsGraph } from '../graphs/ecs'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; +import { getEcsGraph } from '../graphs/ecs'; import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerEcsRoutes(router: IRouter) { - router.post( - { - path: `${ECS_GRAPH_PATH}`, + router.versioned + .post({ + path: ECS_GRAPH_PATH, + access: 'internal', options: { timeout: { idleSocket: ROUTE_HANDLER_TIMEOUT, }, }, - validate: { - body: schema.object({ - packageName: schema.string(), - dataStreamName: schema.string(), - rawSamples: schema.arrayOf(schema.string()), - // TODO: This is a single nested object of any key or shape, any better schema? - mapping: schema.maybe(schema.any()), - connectorId: schema.maybe(schema.string()), - region: schema.maybe(schema.string()), - model: schema.maybe(schema.string()), - }), + }) + .addVersion( + { + version: '1', + validate: { + request: { + body: schema.object({ + packageName: schema.string(), + dataStreamName: schema.string(), + rawSamples: schema.arrayOf(schema.string()), + // TODO: This is a single nested object of any key or shape, any better schema? + mapping: schema.maybe(schema.any()), + connectorId: schema.maybe(schema.string()), + region: schema.maybe(schema.string()), + model: schema.maybe(schema.string()), + }), + }, + }, }, - }, - async (context, req, res) => { - const { packageName, dataStreamName, rawSamples, mapping } = req.body as EcsMappingApiRequest; + async (context, req, res) => { + const { packageName, dataStreamName, rawSamples, mapping } = + req.body as EcsMappingApiRequest; - const { getStartServices, logger } = await context.integrationAssistant; - const [, { actions: actionsPlugin }] = await getStartServices(); - const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); - const connector = req.body.connectorId - ? await actionsClient.get({ id: req.body.connectorId }) - : (await actionsClient.getAll()).filter( - (connectorItem) => connectorItem.actionTypeId === '.bedrock' - )[0]; + const { getStartServices, logger } = await context.integrationAssistant; + const [, { actions: actionsPlugin }] = await getStartServices(); + const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); + const connector = req.body.connectorId + ? await actionsClient.get({ id: req.body.connectorId }) + : (await actionsClient.getAll()).filter( + (connectorItem) => connectorItem.actionTypeId === '.bedrock' + )[0]; - const abortSignal = getRequestAbortedSignal(req.events.aborted$); - const isOpenAI = connector.actionTypeId === '.gen-ai'; - const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; + const abortSignal = getRequestAbortedSignal(req.events.aborted$); + const isOpenAI = connector.actionTypeId === '.gen-ai'; + const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; - const model = new llmClass({ - actions: actionsPlugin, - connectorId: connector.id, - request: req, - logger, - llmType: isOpenAI ? 'openai' : 'bedrock', - model: req.body.model || connector.config?.defaultModel, - temperature: 0.05, - maxTokens: 4096, - signal: abortSignal, - streaming: false, - }); + const model = new llmClass({ + actions: actionsPlugin, + connectorId: connector.id, + request: req, + logger, + llmType: isOpenAI ? 'openai' : 'bedrock', + model: req.body.model || connector.config?.defaultModel, + temperature: 0.05, + maxTokens: 4096, + signal: abortSignal, + streaming: false, + }); - const graph = await getEcsGraph(model); - let results = { results: { mapping: {}, pipeline: {} } }; - try { - if (req.body?.mapping) { - results = (await graph.invoke({ - packageName, - dataStreamName, - rawSamples, - mapping, - })) as EcsMappingApiResponse; - } else - results = (await graph.invoke({ - packageName, - dataStreamName, - rawSamples, - })) as EcsMappingApiResponse; - } catch (e) { - return res.badRequest({ body: e }); - } + const graph = await getEcsGraph(model); + let results = { results: { mapping: {}, pipeline: {} } }; + try { + if (req.body?.mapping) { + results = (await graph.invoke({ + packageName, + dataStreamName, + rawSamples, + mapping, + })) as EcsMappingApiResponse; + } else + results = (await graph.invoke({ + packageName, + dataStreamName, + rawSamples, + })) as EcsMappingApiResponse; + } catch (e) { + return res.badRequest({ body: e }); + } - return res.ok({ body: results }); - } - ); + return res.ok({ body: results }); + } + ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts index a3fc5387ffad4..17e4f667a6df5 100644 --- a/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/pipeline_routes.ts @@ -5,43 +5,56 @@ * 2.0. */ -import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import type { IRouter } from '@kbn/core/server'; import { TEST_PIPELINE_PATH } from '../../common'; -import { testPipeline } from '../util/pipeline'; import type { TestPipelineApiRequest, TestPipelineApiResponse } from '../../common/types'; +import { ROUTE_HANDLER_TIMEOUT } from '../constants'; import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; +import { testPipeline } from '../util/pipeline'; export function registerPipelineRoutes(router: IRouter) { - router.post( - { - path: `${TEST_PIPELINE_PATH}`, - validate: { - body: schema.object({ - pipeline: schema.any(), - rawSamples: schema.arrayOf(schema.string()), - }), + router.versioned + .post({ + path: TEST_PIPELINE_PATH, + access: 'internal', + options: { + timeout: { + idleSocket: ROUTE_HANDLER_TIMEOUT, + }, }, - }, - async (context, req, res) => { - const { rawSamples, currentPipeline } = req.body as TestPipelineApiRequest; - const services = await context.resolve(['core']); - const { client } = services.core.elasticsearch; - let results: TestPipelineApiResponse = { pipelineResults: [], errors: [] }; - try { - results = (await testPipeline( - rawSamples, - currentPipeline, - client - )) as TestPipelineApiResponse; - if (results?.errors && results.errors.length > 0) { - return res.badRequest({ body: JSON.stringify(results.errors) }); + }) + .addVersion( + { + version: '1', + validate: { + request: { + body: schema.object({ + pipeline: schema.any(), + rawSamples: schema.arrayOf(schema.string()), + }), + }, + }, + }, + async (context, req, res) => { + const { rawSamples, currentPipeline } = req.body as TestPipelineApiRequest; + const services = await context.resolve(['core']); + const { client } = services.core.elasticsearch; + let results: TestPipelineApiResponse = { pipelineResults: [], errors: [] }; + try { + results = (await testPipeline( + rawSamples, + currentPipeline, + client + )) as TestPipelineApiResponse; + if (results?.errors && results.errors.length > 0) { + return res.badRequest({ body: JSON.stringify(results.errors) }); + } + } catch (e) { + return res.badRequest({ body: e }); } - } catch (e) { - return res.badRequest({ body: e }); - } - return res.ok({ body: results }); - } - ); + return res.ok({ body: results }); + } + ); } diff --git a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts index bd34dc799218f..934b9711027ee 100644 --- a/x-pack/plugins/integration_assistant/server/routes/related_routes.ts +++ b/x-pack/plugins/integration_assistant/server/routes/related_routes.ts @@ -5,8 +5,8 @@ * 2.0. */ -import type { IRouter } from '@kbn/core/server'; import { schema } from '@kbn/config-schema'; +import type { IRouter } from '@kbn/core/server'; import { getRequestAbortedSignal } from '@kbn/data-plugin/server'; import { ActionsClientChatOpenAI, @@ -14,78 +14,85 @@ import { } from '@kbn/langchain/server/language_models'; import { RELATED_GRAPH_PATH } from '../../common'; import type { RelatedApiRequest, RelatedApiResponse } from '../../common/types'; -import { getRelatedGraph } from '../graphs/related'; import { ROUTE_HANDLER_TIMEOUT } from '../constants'; +import { getRelatedGraph } from '../graphs/related'; import type { IntegrationAssistantRouteHandlerContext } from '../plugin'; export function registerRelatedRoutes(router: IRouter) { - router.post( - { - path: `${RELATED_GRAPH_PATH}`, + router.versioned + .post({ + path: RELATED_GRAPH_PATH, + access: 'internal', options: { timeout: { idleSocket: ROUTE_HANDLER_TIMEOUT, }, }, - validate: { - body: schema.object({ - packageName: schema.string(), - dataStreamName: schema.string(), - rawSamples: schema.arrayOf(schema.string()), - // TODO: This is a single nested object of any key or shape, any better schema? - currentPipeline: schema.maybe(schema.any()), - connectorId: schema.maybe(schema.string()), - region: schema.maybe(schema.string()), - model: schema.maybe(schema.string()), - }), + }) + .addVersion( + { + version: '1', + validate: { + request: { + body: schema.object({ + packageName: schema.string(), + dataStreamName: schema.string(), + rawSamples: schema.arrayOf(schema.string()), + // TODO: This is a single nested object of any key or shape, any better schema? + currentPipeline: schema.maybe(schema.any()), + connectorId: schema.maybe(schema.string()), + region: schema.maybe(schema.string()), + model: schema.maybe(schema.string()), + }), + }, + }, }, - }, - async (context, req, res) => { - const { packageName, dataStreamName, rawSamples, currentPipeline } = - req.body as RelatedApiRequest; + async (context, req, res) => { + const { packageName, dataStreamName, rawSamples, currentPipeline } = + req.body as RelatedApiRequest; - const services = await context.resolve(['core']); - const { client } = services.core.elasticsearch; - const { getStartServices, logger } = await context.integrationAssistant; - const [, { actions: actionsPlugin }] = await getStartServices(); - const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); - const connector = req.body.connectorId - ? await actionsClient.get({ id: req.body.connectorId }) - : (await actionsClient.getAll()).filter( - (connectorItem) => connectorItem.actionTypeId === '.bedrock' - )[0]; + const services = await context.resolve(['core']); + const { client } = services.core.elasticsearch; + const { getStartServices, logger } = await context.integrationAssistant; + const [, { actions: actionsPlugin }] = await getStartServices(); + const actionsClient = await actionsPlugin.getActionsClientWithRequest(req); + const connector = req.body.connectorId + ? await actionsClient.get({ id: req.body.connectorId }) + : (await actionsClient.getAll()).filter( + (connectorItem) => connectorItem.actionTypeId === '.bedrock' + )[0]; - const isOpenAI = connector.actionTypeId === '.gen-ai'; - const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; - const abortSignal = getRequestAbortedSignal(req.events.aborted$); + const isOpenAI = connector.actionTypeId === '.gen-ai'; + const llmClass = isOpenAI ? ActionsClientChatOpenAI : ActionsClientSimpleChatModel; + const abortSignal = getRequestAbortedSignal(req.events.aborted$); - const model = new llmClass({ - actions: actionsPlugin, - connectorId: connector.id, - request: req, - logger, - llmType: isOpenAI ? 'openai' : 'bedrock', - model: req.body.model || connector.config?.defaultModel, - temperature: 0.05, - maxTokens: 4096, - signal: abortSignal, - streaming: false, - }); + const model = new llmClass({ + actions: actionsPlugin, + connectorId: connector.id, + request: req, + logger, + llmType: isOpenAI ? 'openai' : 'bedrock', + model: req.body.model || connector.config?.defaultModel, + temperature: 0.05, + maxTokens: 4096, + signal: abortSignal, + streaming: false, + }); - const graph = await getRelatedGraph(client, model); - let results = { results: { docs: {}, pipeline: {} } }; - try { - results = (await graph.invoke({ - packageName, - dataStreamName, - rawSamples, - currentPipeline, - })) as RelatedApiResponse; - } catch (e) { - return res.badRequest({ body: e }); - } + const graph = await getRelatedGraph(client, model); + let results = { results: { docs: {}, pipeline: {} } }; + try { + results = (await graph.invoke({ + packageName, + dataStreamName, + rawSamples, + currentPipeline, + })) as RelatedApiResponse; + } catch (e) { + return res.badRequest({ body: e }); + } - return res.ok({ body: results }); - } - ); + return res.ok({ body: results }); + } + ); } From 2367be855494e450c115680c3e3b1953930b19a1 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Fri, 7 Jun 2024 10:00:38 +0200 Subject: [PATCH 50/62] Remove unnecessary addition in package.json --- package.json | 1 - 1 file changed, 1 deletion(-) diff --git a/package.json b/package.json index 410f55924c5c9..c1efd489857b5 100644 --- a/package.json +++ b/package.json @@ -1104,7 +1104,6 @@ "pdfmake": "^0.2.7", "peggy": "^1.2.0", "polished": "^3.7.2", - "prettier-eslint": "^16.3.0", "pretty-ms": "6.0.0", "prop-types": "^15.8.1", "proxy-from-env": "1.0.0", From 0cadcc07fa074763ae89605b1a1e7850b2628efc Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Fri, 7 Jun 2024 11:05:01 +0200 Subject: [PATCH 51/62] update yarn.lock --- yarn.lock | 141 +++++++----------------------------------------------- 1 file changed, 18 insertions(+), 123 deletions(-) diff --git a/yarn.lock b/yarn.lock index 92facd21a0844..66ad154c6a60d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11256,17 +11256,6 @@ "@typescript-eslint/typescript-estree" "5.62.0" debug "^4.3.4" -"@typescript-eslint/parser@^6.7.5": - version "6.21.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.21.0.tgz#af8fcf66feee2edc86bc5d1cf45e33b0630bf35b" - integrity sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ== - dependencies: - "@typescript-eslint/scope-manager" "6.21.0" - "@typescript-eslint/types" "6.21.0" - "@typescript-eslint/typescript-estree" "6.21.0" - "@typescript-eslint/visitor-keys" "6.21.0" - debug "^4.3.4" - "@typescript-eslint/scope-manager@5.62.0": version "5.62.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.62.0.tgz#d9457ccc6a0b8d6b37d0eb252a23022478c5460c" @@ -11275,14 +11264,6 @@ "@typescript-eslint/types" "5.62.0" "@typescript-eslint/visitor-keys" "5.62.0" -"@typescript-eslint/scope-manager@6.21.0": - version "6.21.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.21.0.tgz#ea8a9bfc8f1504a6ac5d59a6df308d3a0630a2b1" - integrity sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg== - dependencies: - "@typescript-eslint/types" "6.21.0" - "@typescript-eslint/visitor-keys" "6.21.0" - "@typescript-eslint/type-utils@5.62.0": version "5.62.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.62.0.tgz#286f0389c41681376cdad96b309cedd17d70346a" @@ -11298,11 +11279,6 @@ resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.62.0.tgz#258607e60effa309f067608931c3df6fed41fd2f" integrity sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ== -"@typescript-eslint/types@6.21.0": - version "6.21.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.21.0.tgz#205724c5123a8fef7ecd195075fa6e85bac3436d" - integrity sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg== - "@typescript-eslint/typescript-estree@5.62.0", "@typescript-eslint/typescript-estree@^5.59.5", "@typescript-eslint/typescript-estree@^5.62.0": version "5.62.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.62.0.tgz#7d17794b77fabcac615d6a48fb143330d962eb9b" @@ -11316,20 +11292,6 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/typescript-estree@6.21.0": - version "6.21.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.21.0.tgz#c47ae7901db3b8bddc3ecd73daff2d0895688c46" - integrity sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ== - dependencies: - "@typescript-eslint/types" "6.21.0" - "@typescript-eslint/visitor-keys" "6.21.0" - debug "^4.3.4" - globby "^11.1.0" - is-glob "^4.0.3" - minimatch "9.0.3" - semver "^7.5.4" - ts-api-utils "^1.0.1" - "@typescript-eslint/utils@5.62.0", "@typescript-eslint/utils@^5.10.0", "@typescript-eslint/utils@^6.18.1": version "5.62.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.62.0.tgz#141e809c71636e4a75daa39faed2fb5f4b10df86" @@ -11352,14 +11314,6 @@ "@typescript-eslint/types" "5.62.0" eslint-visitor-keys "^3.3.0" -"@typescript-eslint/visitor-keys@6.21.0": - version "6.21.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.21.0.tgz#87a99d077aa507e20e238b11d56cc26ade45fe47" - integrity sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A== - dependencies: - "@typescript-eslint/types" "6.21.0" - eslint-visitor-keys "^3.4.1" - "@ungap/structured-clone@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" @@ -13736,7 +13690,7 @@ chalk@2.4.2, chalk@^2.3.0, chalk@^2.4.1, chalk@^2.4.2: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^1.0.0, chalk@^1.1.3: +chalk@^1.0.0: version "1.1.3" resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= @@ -14313,7 +14267,7 @@ common-path-prefix@^3.0.0: resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== -common-tags@^1.4.0, common-tags@^1.8.0: +common-tags@^1.8.0: version "1.8.2" resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== @@ -15933,11 +15887,6 @@ discontinuous-range@1.0.0: resolved "https://registry.yarnpkg.com/discontinuous-range/-/discontinuous-range-1.0.0.tgz#e38331f0844bba49b9a9cb71c771585aab1bc65a" integrity sha1-44Mx8IRLukm5qctxx3FYWqsbxlo= -dlv@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" - integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== - dns-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/dns-equal/-/dns-equal-1.0.0.tgz#b39e7f1da6eb0a75ba9c17324b34753c47e0654d" @@ -17024,7 +16973,7 @@ eslint-scope@^4.0.3: esrecurse "^4.1.0" estraverse "^4.1.1" -eslint-scope@^7.1.1, eslint-scope@^7.2.2: +eslint-scope@^7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f" integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg== @@ -17066,7 +17015,7 @@ eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4 resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== -eslint@^8.57.0, eslint@^8.7.0: +eslint@^8.57.0: version "8.57.0" resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.57.0.tgz#c786a6fd0e0b68941aaf624596fb987089195668" integrity sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ== @@ -17120,7 +17069,7 @@ esniff@^2.0.1: event-emitter "^0.3.5" type "^2.7.2" -espree@^9.3.1, espree@^9.6.0, espree@^9.6.1: +espree@^9.6.0, espree@^9.6.1: version "9.6.1" resolved "https://registry.yarnpkg.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f" integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ== @@ -17139,7 +17088,7 @@ esprima@~1.0.4: resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.0.4.tgz#9f557e08fc3b4d26ece9dd34f8fbf476b62585ad" integrity sha1-n1V+CPw7TSbs6d00+Pv0drYlha0= -esquery@^1.4.0, esquery@^1.4.2: +esquery@^1.4.2: version "1.5.0" resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b" integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg== @@ -22072,7 +22021,7 @@ lodash.memoize@^4.1.2: resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= -lodash.merge@4.6.2, lodash.merge@^4.6.0, lodash.merge@^4.6.2: +lodash.merge@4.6.2, lodash.merge@^4.6.2: version "4.6.2" resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== @@ -22159,20 +22108,12 @@ logform@^2.3.2, logform@^2.4.0: safe-stable-stringify "^2.3.1" triple-beam "^1.3.0" -loglevel-colored-level-prefix@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/loglevel-colored-level-prefix/-/loglevel-colored-level-prefix-1.0.0.tgz#6a40218fdc7ae15fc76c3d0f3e676c465388603e" - integrity sha512-u45Wcxxc+SdAlh4yeF/uKlC1SPUPCy0gullSNKXod5I4bmifzk+Q4lSLExNEVn19tGaJipbZ4V4jbFn79/6mVA== - dependencies: - chalk "^1.1.3" - loglevel "^1.4.1" - loglevel-plugin-prefix@^0.8.4: version "0.8.4" resolved "https://registry.yarnpkg.com/loglevel-plugin-prefix/-/loglevel-plugin-prefix-0.8.4.tgz#2fe0e05f1a820317d98d8c123e634c1bd84ff644" integrity sha512-WpG9CcFAOjz/FtNht+QJeGpvVl/cdR6P0z6OcXSkr8wFJOsV2GRj2j10JLfjuA4aYkcKCNIEqRGCyTife9R8/g== -loglevel@^1.4.1, loglevel@^1.6.0: +loglevel@^1.6.0: version "1.9.1" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.9.1.tgz#d63976ac9bcd03c7c873116d41c2a85bafff1be7" integrity sha512-hP3I3kCrDIMuRwAwHltphhDM1r8i55H33GgqjXbrisuJhF4kRhW1dNuxsRklp4bXl8DSdLaNLuiL4A/LWRfxvg== @@ -22947,13 +22888,6 @@ minimatch@5.0.1: dependencies: brace-expansion "^2.0.1" -minimatch@9.0.3, minimatch@^9.0.1: - version "9.0.3" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" - integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== - dependencies: - brace-expansion "^2.0.1" - minimatch@^3.0.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" @@ -22968,6 +22902,13 @@ minimatch@^5.0.1, minimatch@^5.1.0: dependencies: brace-expansion "^2.0.1" +minimatch@^9.0.1: + version "9.0.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.3.tgz#a6e00c3de44c3a542bfaae70abfc22420a6da825" + integrity sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg== + dependencies: + brace-expansion "^2.0.1" + minimist-options@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" @@ -25553,24 +25494,6 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= -prettier-eslint@^16.3.0: - version "16.3.0" - resolved "https://registry.yarnpkg.com/prettier-eslint/-/prettier-eslint-16.3.0.tgz#8f7bbc863f35939948e386eafe72ffd653b2d80b" - integrity sha512-Lh102TIFCr11PJKUMQ2kwNmxGhTsv/KzUg9QYF2Gkw259g/kPgndZDWavk7/ycbRvj2oz4BPZ1gCU8bhfZH/Xg== - dependencies: - "@typescript-eslint/parser" "^6.7.5" - common-tags "^1.4.0" - dlv "^1.1.0" - eslint "^8.7.0" - indent-string "^4.0.0" - lodash.merge "^4.6.0" - loglevel-colored-level-prefix "^1.0.0" - prettier "^3.0.1" - pretty-format "^29.7.0" - require-relative "^0.8.7" - typescript "^5.2.2" - vue-eslint-parser "^9.1.0" - prettier-linter-helpers@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" @@ -25588,11 +25511,6 @@ prettier@^2.0.0, prettier@^2.7.1: resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== -prettier@^3.0.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.3.0.tgz#d173ea0524a691d4c0b1181752f2b46724328cdf" - integrity sha512-J9odKxERhCQ10OC2yb93583f6UnYutOeiV5i0zEDS7UGTdUt0u+y8erxl3lBKvwo/JHyyoEdXjwp4dke9oyZ/g== - pretty-bytes@^5.6.0: version "5.6.0" resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" @@ -25625,7 +25543,7 @@ pretty-format@^27.0.2: ansi-styles "^5.0.0" react-is "^17.0.1" -pretty-format@^29.0.0, pretty-format@^29.6.1, pretty-format@^29.7.0: +pretty-format@^29.0.0, pretty-format@^29.6.1: version "29.7.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812" integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ== @@ -27420,11 +27338,6 @@ require-main-filename@^2.0.0: resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== -require-relative@^0.8.7: - version "0.8.7" - resolved "https://registry.yarnpkg.com/require-relative/-/require-relative-0.8.7.tgz#7999539fc9e047a37928fa196f8e1563dabd36de" - integrity sha512-AKGr4qvHiryxRb19m3PsLRGuKVAbJLUD7E6eOaHkfKhwc+vSgVOCY5xNvm9EkolBKTOf0GrQAZKLimOCz81Khg== - requireindex@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/requireindex/-/requireindex-1.2.0.tgz#3463cdb22ee151902635aa6c9535d4de9c2ef1ef" @@ -28097,7 +28010,7 @@ semver@^6.0.0, semver@^6.1.0, semver@^6.1.2, semver@^6.3.0, semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.6, semver@^7.3.7, semver@^7.5.0, semver@^7.5.2, semver@^7.5.3, semver@^7.5.4: +semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.5.0, semver@^7.5.2, semver@^7.5.3, semver@^7.5.4: version "7.6.2" resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.2.tgz#1e3b34759f896e8f14d6134732ce798aeb0c6e13" integrity sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w== @@ -30268,11 +30181,6 @@ ts-algebra@^1.2.0: resolved "https://registry.yarnpkg.com/ts-algebra/-/ts-algebra-1.2.0.tgz#f91c481207a770f0d14d055c376cbee040afdfc9" integrity sha512-kMuJJd8B2N/swCvIvn1hIFcIOrLGbWl9m/J6O3kHx9VRaevh00nvgjPiEGaRee7DRaAczMYR2uwWvXU22VFltw== -ts-api-utils@^1.0.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/ts-api-utils/-/ts-api-utils-1.3.0.tgz#4b490e27129f1e8e686b45cc4ab63714dc60eea1" - integrity sha512-UQMIo7pb8WRomKR1/+MFVLTroIvDVtMX3K6OUir8ynLyzB8Jeriont2bTAtmNPa1ekAgN7YPDyf6V+ygrdU+eQ== - ts-debounce@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/ts-debounce/-/ts-debounce-4.0.0.tgz#33440ef64fab53793c3d546a8ca6ae539ec15841" @@ -30552,7 +30460,7 @@ typescript-tuple@^2.2.1: dependencies: typescript-compare "^0.0.2" -typescript@4.9.5, typescript@5, typescript@^3.3.3333, typescript@^5.0.4, typescript@^5.2.2: +typescript@4.9.5, typescript@5, typescript@^3.3.3333, typescript@^5.0.4: version "4.9.5" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== @@ -31655,19 +31563,6 @@ vt-pbf@^3.1.3: "@mapbox/vector-tile" "^1.3.1" pbf "^3.2.1" -vue-eslint-parser@^9.1.0: - version "9.4.3" - resolved "https://registry.yarnpkg.com/vue-eslint-parser/-/vue-eslint-parser-9.4.3.tgz#9b04b22c71401f1e8bca9be7c3e3416a4bde76a8" - integrity sha512-2rYRLWlIpaiN8xbPiDyXZXRgLGOtWxERV7ND5fFAv5qo1D2N9Fu9MNajBNc6o13lZ+24DAWCkQCvj4klgmcITg== - dependencies: - debug "^4.3.4" - eslint-scope "^7.1.1" - eslint-visitor-keys "^3.3.0" - espree "^9.3.1" - esquery "^1.4.0" - lodash "^4.17.21" - semver "^7.3.6" - w3c-xmlserializer@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-3.0.0.tgz#06cdc3eefb7e4d0b20a560a5a3aeb0d2d9a65923" From 4ce28774846be26a823c437a9af66c49ca6cef52 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Mon, 10 Jun 2024 15:20:46 +0200 Subject: [PATCH 52/62] Revert unnecessary changes --- .../rule_management/crud/create_rule/create_rule_route.gen.ts | 2 +- .../rule_management/crud/patch_rule/patch_rule_route.gen.ts | 2 +- .../rule_management/crud/update_rule/update_rule_route.gen.ts | 4 ++-- .../rule_management/export_rules/export_rules_route.gen.ts | 2 +- .../rule_management/read_tags/read_tags_route.gen.ts | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts index 4f0af497e4986..b11a300523966 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts index 1d1025d39ff04..a404eb652988a 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts index 2cab78d432a32..10339db0026ab 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. @@ -16,7 +16,7 @@ import { z } from 'zod'; * version: 2023-10-31 */ -import { RuleUpdateProps, RuleResponse } from '../../../model/rule_schema/rule_schemas.gen'; +import { RuleResponse, RuleUpdateProps } from '../../../model/rule_schema/rule_schemas.gen'; export type UpdateRuleRequestBody = z.infer; export const UpdateRuleRequestBody = RuleUpdateProps; diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts index d1c99bdf096cc..04145ac7d7a88 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts @@ -5,8 +5,8 @@ * 2.0. */ -import { z } from 'zod'; import { BooleanFromString } from '@kbn/zod-helpers'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts index 0f997aaae5ef4..403653a5beda0 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. From 4e4779caa518c8c32ee0eaa198aa1fdd60558914 Mon Sep 17 00:00:00 2001 From: kibanamachine <42973632+kibanamachine@users.noreply.github.com> Date: Mon, 10 Jun 2024 14:04:24 +0000 Subject: [PATCH 53/62] [CI] Auto-commit changed files from 'yarn openapi:generate' --- .../rule_management/crud/create_rule/create_rule_route.gen.ts | 2 +- .../rule_management/crud/patch_rule/patch_rule_route.gen.ts | 2 +- .../rule_management/crud/update_rule/update_rule_route.gen.ts | 4 ++-- .../rule_management/export_rules/export_rules_route.gen.ts | 2 +- .../rule_management/read_tags/read_tags_route.gen.ts | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts index b11a300523966..4f0af497e4986 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts index a404eb652988a..1d1025d39ff04 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts index 10339db0026ab..2cab78d432a32 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. @@ -16,7 +16,7 @@ import type { z } from 'zod'; * version: 2023-10-31 */ -import { RuleResponse, RuleUpdateProps } from '../../../model/rule_schema/rule_schemas.gen'; +import { RuleUpdateProps, RuleResponse } from '../../../model/rule_schema/rule_schemas.gen'; export type UpdateRuleRequestBody = z.infer; export const UpdateRuleRequestBody = RuleUpdateProps; diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts index 04145ac7d7a88..d1c99bdf096cc 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/export_rules/export_rules_route.gen.ts @@ -5,8 +5,8 @@ * 2.0. */ -import { BooleanFromString } from '@kbn/zod-helpers'; import { z } from 'zod'; +import { BooleanFromString } from '@kbn/zod-helpers'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts index 403653a5beda0..0f997aaae5ef4 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { z } from 'zod'; +import { z } from 'zod'; /* * NOTICE: Do not edit this file manually. From b37688fcebc75fd0d4ec94498327881777249255 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Mon, 10 Jun 2024 18:40:49 +0200 Subject: [PATCH 54/62] Revert eslint changes --- .eslintrc.js | 2 +- .../rule_management/crud/create_rule/create_rule_route.gen.ts | 2 +- .../rule_management/crud/patch_rule/patch_rule_route.gen.ts | 2 +- .../rule_management/crud/update_rule/update_rule_route.gen.ts | 2 +- .../rule_management/read_tags/read_tags_route.gen.ts | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index c3564ea9d5f6c..09c7428599ca2 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1272,7 +1272,7 @@ module.exports = { files: [ 'x-pack/packages/security-solution/features/**/*.{js,mjs,ts,tsx}', 'x-pack/packages/security-solution/navigation/**/*.{js,mjs,ts,tsx}', - 'x-pack/plugins/integration_assistant/**/*.{js,mjs,ts,tsx}', + 'x-pack/plugins/security_solution/**/*.{js,mjs,ts,tsx}', 'x-pack/plugins/security_solution_ess/**/*.{js,mjs,ts,tsx}', 'x-pack/plugins/security_solution_serverless/**/*.{js,mjs,ts,tsx}', 'x-pack/plugins/cases/**/*.{js,mjs,ts,tsx}', diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts index 4f0af497e4986..b11a300523966 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/create_rule/create_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts index 1d1025d39ff04..a404eb652988a 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/patch_rule/patch_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts index 2cab78d432a32..faa285a8c62f1 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/crud/update_rule/update_rule_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. diff --git a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts index 0f997aaae5ef4..403653a5beda0 100644 --- a/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts +++ b/x-pack/plugins/security_solution/common/api/detection_engine/rule_management/read_tags/read_tags_route.gen.ts @@ -5,7 +5,7 @@ * 2.0. */ -import { z } from 'zod'; +import type { z } from 'zod'; /* * NOTICE: Do not edit this file manually. From 40408ff0c01cba2d6987fc846daadb22269ec7d7 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Tue, 11 Jun 2024 08:29:53 +0200 Subject: [PATCH 55/62] Use SimpleChatModel --- .../server/graphs/categorization/categorization.ts | 10 ++++++++-- .../server/graphs/categorization/errors.ts | 10 ++++++++-- .../server/graphs/categorization/invalid.ts | 10 ++++++++-- .../server/graphs/categorization/review.ts | 10 ++++++++-- .../server/graphs/ecs/duplicates.ts | 12 +++++++++--- .../server/graphs/ecs/invalid.ts | 12 +++++++++--- .../server/graphs/ecs/mapping.ts | 12 +++++++++--- .../server/graphs/ecs/missing.ts | 12 +++++++++--- .../server/graphs/related/errors.ts | 10 ++++++++-- .../server/graphs/related/related.ts | 10 ++++++++-- .../server/graphs/related/review.ts | 10 ++++++++-- 11 files changed, 92 insertions(+), 26 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts index b6ec3f0f3dfe3..ed1a88c3a1cfd 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.ts @@ -4,14 +4,20 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; import type { ESProcessorItem, Pipeline } from '../../../common'; import type { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { CATEGORIZATION_MAIN_PROMPT } from './prompts'; -export async function handleCategorization(state: CategorizationState, model: BedrockChat) { +export async function handleCategorization( + state: CategorizationState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const categorizationMainPrompt = CATEGORIZATION_MAIN_PROMPT; const outputParser = new JsonOutputParser(); const categorizationMainGraph = categorizationMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts index 15e1fece549de..d8cb7beedc9bf 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.ts @@ -4,14 +4,20 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; import type { ESProcessorItem, Pipeline } from '../../../common'; import type { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { CATEGORIZATION_ERROR_PROMPT } from './prompts'; -export async function handleErrors(state: CategorizationState, model: BedrockChat) { +export async function handleErrors( + state: CategorizationState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const categorizationErrorPrompt = CATEGORIZATION_ERROR_PROMPT; const outputParser = new JsonOutputParser(); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts index 5d0b081e06fab..413694b594518 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.ts @@ -4,7 +4,10 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; import type { ESProcessorItem, Pipeline } from '../../../common'; import type { CategorizationState } from '../../types'; @@ -12,7 +15,10 @@ import { combineProcessors } from '../../util/processors'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; import { CATEGORIZATION_VALIDATION_PROMPT } from './prompts'; -export async function handleInvalidCategorization(state: CategorizationState, model: BedrockChat) { +export async function handleInvalidCategorization( + state: CategorizationState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const categorizationInvalidPrompt = CATEGORIZATION_VALIDATION_PROMPT; const outputParser = new JsonOutputParser(); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts index 6f8d12bc40c05..12b3880737237 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.ts @@ -4,7 +4,10 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; import { CATEGORIZATION_REVIEW_PROMPT } from './prompts'; @@ -13,7 +16,10 @@ import type { CategorizationState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { ECS_EVENT_TYPES_PER_CATEGORY } from './constants'; -export async function handleReview(state: CategorizationState, model: BedrockChat) { +export async function handleReview( + state: CategorizationState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const categorizationReviewPrompt = CATEGORIZATION_REVIEW_PROMPT; const outputParser = new JsonOutputParser(); const categorizationReview = categorizationReviewPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts index f1c2f8ad2ce67..fd11a660e75ab 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.ts @@ -4,12 +4,18 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import type { BedrockChat } from '@kbn/langchain/server/language_models'; -import { ECS_DUPLICATES_PROMPT } from './prompts'; import type { EcsMappingState } from '../../types'; +import { ECS_DUPLICATES_PROMPT } from './prompts'; -export async function handleDuplicates(state: EcsMappingState, model: BedrockChat) { +export async function handleDuplicates( + state: EcsMappingState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const ecsDuplicatesPrompt = ECS_DUPLICATES_PROMPT; const outputParser = new JsonOutputParser(); const ecsDuplicatesGraph = ecsDuplicatesPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts index c3ff0e12bf44c..dcbba0ebe9d13 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.ts @@ -4,12 +4,18 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import type { BedrockChat } from '@kbn/langchain/server/language_models'; -import { ECS_INVALID_PROMPT } from './prompts'; import type { EcsMappingState } from '../../types'; +import { ECS_INVALID_PROMPT } from './prompts'; -export async function handleInvalidEcs(state: EcsMappingState, model: BedrockChat) { +export async function handleInvalidEcs( + state: EcsMappingState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const ecsInvalidEcsPrompt = ECS_INVALID_PROMPT; const outputParser = new JsonOutputParser(); const ecsInvalidEcsGraph = ecsInvalidEcsPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts index dc88e4f390102..7ecb108659f45 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.ts @@ -4,12 +4,18 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import type { BedrockChat } from '@kbn/langchain/server/language_models'; -import { ECS_MAIN_PROMPT } from './prompts'; import type { EcsMappingState } from '../../types'; +import { ECS_MAIN_PROMPT } from './prompts'; -export async function handleEcsMapping(state: EcsMappingState, model: BedrockChat) { +export async function handleEcsMapping( + state: EcsMappingState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const ecsMainPrompt = ECS_MAIN_PROMPT; const outputParser = new JsonOutputParser(); const ecsMainGraph = ecsMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts index ffb23c875f278..d7f1f65b2b4ea 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.ts @@ -4,12 +4,18 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; -import type { BedrockChat } from '@kbn/langchain/server/language_models'; -import { ECS_MISSING_KEYS_PROMPT } from './prompts'; import type { EcsMappingState } from '../../types'; +import { ECS_MISSING_KEYS_PROMPT } from './prompts'; -export async function handleMissingKeys(state: EcsMappingState, model: BedrockChat) { +export async function handleMissingKeys( + state: EcsMappingState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const ecsMissingPrompt = ECS_MISSING_KEYS_PROMPT; const outputParser = new JsonOutputParser(); const ecsMissingGraph = ecsMissingPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts index 9ff0443078c6e..025422008c4dc 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.ts @@ -4,14 +4,20 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; import type { ESProcessorItem, Pipeline } from '../../../common'; import type { RelatedState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { RELATED_ERROR_PROMPT } from './prompts'; -export async function handleErrors(state: RelatedState, model: BedrockChat) { +export async function handleErrors( + state: RelatedState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const relatedErrorPrompt = RELATED_ERROR_PROMPT; const outputParser = new JsonOutputParser(); const relatedErrorGraph = relatedErrorPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts index f25acf1b22cac..2c98381510d9b 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.ts @@ -4,14 +4,20 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; import type { ESProcessorItem, Pipeline } from '../../../common'; import type { RelatedState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { RELATED_MAIN_PROMPT } from './prompts'; -export async function handleRelated(state: RelatedState, model: BedrockChat) { +export async function handleRelated( + state: RelatedState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const relatedMainPrompt = RELATED_MAIN_PROMPT; const outputParser = new JsonOutputParser(); const relatedMainGraph = relatedMainPrompt.pipe(model).pipe(outputParser); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts index 517b81eac73dc..6c07079e18f48 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.ts @@ -4,14 +4,20 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import type { BedrockChat } from '@kbn/langchain/server/language_models'; +import type { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; import { JsonOutputParser } from '@langchain/core/output_parsers'; import type { ESProcessorItem, Pipeline } from '../../../common'; import type { RelatedState } from '../../types'; import { combineProcessors } from '../../util/processors'; import { RELATED_REVIEW_PROMPT } from './prompts'; -export async function handleReview(state: RelatedState, model: BedrockChat) { +export async function handleReview( + state: RelatedState, + model: ActionsClientChatOpenAI | ActionsClientSimpleChatModel +) { const relatedReviewPrompt = RELATED_REVIEW_PROMPT; const outputParser = new JsonOutputParser(); const relatedReviewGraph = relatedReviewPrompt.pipe(model).pipe(outputParser); From ad8417f6da6e10ebb69e0728678a2b751755d025 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Tue, 11 Jun 2024 08:33:29 +0200 Subject: [PATCH 56/62] Update yarn.lock --- yarn.lock | 39 ++++++++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/yarn.lock b/yarn.lock index 88cf9ade1a24c..31ca09b91573a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -12486,7 +12486,7 @@ async@^1.4.2: resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= -async@^3.2.0, async@^3.2.3: +async@^3.2.0, async@^3.2.3, async@^3.2.4: version "3.2.4" resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== @@ -14100,16 +14100,16 @@ clone-stats@^1.0.0: resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-1.0.0.tgz#b3782dff8bb5474e18b9b6bf0fdfe782f8777680" integrity sha1-s3gt/4u1R04Yuba/D9/ngvh3doA= +clone@2.x, clone@^2.1.1, clone@^2.1.2, clone@~2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= + clone@^1.0.2, clone@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= -clone@^2.1.1, clone@^2.1.2, clone@~2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" - integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= - cloneable-readable@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/cloneable-readable/-/cloneable-readable-1.1.2.tgz#d591dee4a8f8bc15da43ce97dceeba13d43e2a65" @@ -21694,6 +21694,11 @@ latest-version@^7.0.0: dependencies: package-json "^8.1.0" +launchdarkly-eventsource@1.4.4: + version "1.4.4" + resolved "https://registry.yarnpkg.com/launchdarkly-eventsource/-/launchdarkly-eventsource-1.4.4.tgz#fa595af8602e487c61520787170376c6a1104459" + integrity sha512-GL+r2Y3WccJlhFyL2buNKel+9VaMnYpbE/FfCkOST5jSNSFodahlxtGyrE8o7R+Qhobyq0Ree4a7iafJDQi9VQ== + launchdarkly-eventsource@2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/launchdarkly-eventsource/-/launchdarkly-eventsource-2.0.3.tgz#8a7b8da5538153f438f7d452b1c87643d900f984" @@ -21716,6 +21721,19 @@ launchdarkly-js-sdk-common@5.2.0: fast-deep-equal "^2.0.1" uuid "^8.0.0" +launchdarkly-node-server-sdk@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/launchdarkly-node-server-sdk/-/launchdarkly-node-server-sdk-7.0.3.tgz#d7a8b996d992b0ca5d4972db5df1ae49332b094c" + integrity sha512-uSkBezAiQ9nwv8N6CmI7OmyJ9e3xpueJzYOso8+5vMf7VtBtPjz6RRsUkUsSzUDo7siclmW8USjCwqn9aX2EbQ== + dependencies: + async "^3.2.4" + launchdarkly-eventsource "1.4.4" + lru-cache "^6.0.0" + node-cache "^5.1.0" + semver "^7.5.4" + tunnel "0.0.6" + uuid "^8.3.2" + lazy-ass@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/lazy-ass/-/lazy-ass-1.6.0.tgz#7999655e8646c17f089fdd187d150d3324d54513" @@ -23626,6 +23644,13 @@ node-addon-api@^6.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== +node-cache@^5.1.0: + version "5.1.2" + resolved "https://registry.yarnpkg.com/node-cache/-/node-cache-5.1.2.tgz#f264dc2ccad0a780e76253a694e9fd0ed19c398d" + integrity sha512-t1QzWwnk4sjLWaQAS8CHgOJ+RAfmHpxFWmc36IWTiWHQfs0w5JDMBS1b1ZxQteo0vVVuWJvIUKHDkkeK7vIGCg== + dependencies: + clone "2.x" + node-dir@^0.1.10: version "0.1.17" resolved "https://registry.yarnpkg.com/node-dir/-/node-dir-0.1.17.tgz#5f5665d93351335caabef8f1c554516cf5f1e4e5" @@ -30318,7 +30343,7 @@ tunnel-agent@^0.6.0: dependencies: safe-buffer "^5.0.1" -tunnel@^0.0.6: +tunnel@0.0.6, tunnel@^0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c" integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== From a10dce03bb4cf5286e0e37dde993b784204fbcd1 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Wed, 12 Jun 2024 14:50:42 +0200 Subject: [PATCH 57/62] Disable the feature by default --- x-pack/plugins/integration_assistant/server/config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugins/integration_assistant/server/config.ts b/x-pack/plugins/integration_assistant/server/config.ts index c2490982ca9f6..c8c81b9f63743 100644 --- a/x-pack/plugins/integration_assistant/server/config.ts +++ b/x-pack/plugins/integration_assistant/server/config.ts @@ -9,7 +9,7 @@ import { schema, type TypeOf } from '@kbn/config-schema'; import type { PluginConfigDescriptor } from '@kbn/core/server'; export const configSchema = schema.object({ - enabled: schema.boolean({ defaultValue: true }), + enabled: schema.boolean({ defaultValue: false }), }); export type ServerlessSecuritySchema = TypeOf; From df8dbc59f1021487d2b13cdc49b0bfb84787d769 Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Wed, 12 Jun 2024 21:49:21 +0200 Subject: [PATCH 58/62] test --- package.json | 2 +- tsconfig.base.json | 3 ++- typings/@langchain/langgraph/index.d.ts | 9 +++++++++ .../server/graphs/categorization/categorization.test.ts | 6 +++++- .../server/graphs/categorization/errors.test.ts | 6 +++++- .../server/graphs/categorization/graph.test.ts | 6 +++++- .../server/graphs/categorization/graph.ts | 4 +++- .../server/graphs/categorization/invalid.test.ts | 6 +++++- .../server/graphs/categorization/review.test.ts | 6 +++++- .../server/graphs/ecs/duplicates.test.ts | 6 +++++- .../server/graphs/ecs/graph.test.ts | 6 +++++- .../integration_assistant/server/graphs/ecs/graph.ts | 4 +++- .../server/graphs/ecs/invalid.test.ts | 8 ++++++-- .../server/graphs/ecs/mapping.test.ts | 8 ++++++-- .../server/graphs/ecs/missing.test.ts | 8 ++++++-- .../server/graphs/related/errors.test.ts | 6 +++++- .../server/graphs/related/graph.test.ts | 6 +++++- .../integration_assistant/server/graphs/related/graph.ts | 4 +++- .../server/graphs/related/related.test.ts | 6 +++++- .../server/graphs/related/review.test.ts | 6 +++++- .../server/integration_builder/fields.ts | 8 ++------ yarn.lock | 8 ++++---- 22 files changed, 100 insertions(+), 32 deletions(-) create mode 100644 typings/@langchain/langgraph/index.d.ts diff --git a/package.json b/package.json index 51659cbd2f707..0417d6dbac3dd 100644 --- a/package.json +++ b/package.json @@ -927,7 +927,7 @@ "@kbn/zod-helpers": "link:packages/kbn-zod-helpers", "@langchain/community": "^0.2.4", "@langchain/core": "0.2.3", - "@langchain/langgraph": "^0.0.21", + "@langchain/langgraph": "^0.0.22", "@langchain/openai": "^0.0.34", "@langtrase/trace-attributes": "^3.0.8", "@langtrase/typescript-sdk": "^2.2.1", diff --git a/tsconfig.base.json b/tsconfig.base.json index 9b935f0de5c75..29a9eafddc58f 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -1862,7 +1862,8 @@ "@kbn/zod-helpers/*": ["packages/kbn-zod-helpers/*"], // END AUTOMATED PACKAGE LISTING // Allows for importing from `kibana` package for the exported types. - "@emotion/core": ["typings/@emotion"] + "@emotion/core": ["typings/@emotion"], + "@langchain/langgraph": ["typings/@langchain/langgraph"] }, // Support .tsx files and transform JSX into calls to React.createElement "jsx": "react", diff --git a/typings/@langchain/langgraph/index.d.ts b/typings/@langchain/langgraph/index.d.ts new file mode 100644 index 0000000000000..eed5051d6a275 --- /dev/null +++ b/typings/@langchain/langgraph/index.d.ts @@ -0,0 +1,9 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +declare module '@langchain/langgraph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts index f425941a90d9e..3ad0926297bbc 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/categorization.test.ts @@ -13,10 +13,14 @@ import { categorizationMockProcessors, categorizationExpectedHandlerResponse, } from '../../../__jest__/fixtures/categorization'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: CategorizationState = categorizationTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts index 38b83d4d3add8..18d8c1842080a 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/errors.test.ts @@ -13,10 +13,14 @@ import { categorizationMockProcessors, categorizationExpectedHandlerResponse, } from '../../../__jest__/fixtures/categorization'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: CategorizationState = categorizationTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts index 15d52b13bd3cd..4122d4540dbc0 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.test.ts @@ -25,10 +25,14 @@ import { handleCategorization } from './categorization'; import { handleErrors } from './errors'; import { handleInvalidCategorization } from './invalid'; import { testPipeline, combineProcessors } from '../../util'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: "I'll callback later.", -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; jest.mock('./errors'); jest.mock('./review'); diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index 4a9ac93ab65a1..4bd99b784b76e 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -4,8 +4,9 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ + import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -import type { StateGraphArgs } from '@langchain/langgraph'; +// import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; import type { ActionsClientChatOpenAI, @@ -21,6 +22,7 @@ import { handleErrors } from './errors'; import { handleReview } from './review'; import { CATEGORIZATION_EXAMPLE_ANSWER, ECS_CATEGORIES, ECS_TYPES } from './constants'; +// @ts-expect-error remove after bump to TS@5 const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { value: (x: string, y?: string) => y ?? x, diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts index a9ddbe1404cb0..10560137093d8 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/invalid.test.ts @@ -13,10 +13,14 @@ import { categorizationMockProcessors, categorizationExpectedHandlerResponse, } from '../../../__jest__/fixtures/categorization'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: CategorizationState = categorizationTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts index 71a5dfe281dc0..7775b69c5b6a8 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/review.test.ts @@ -13,10 +13,14 @@ import { categorizationMockProcessors, categorizationExpectedHandlerResponse, } from '../../../__jest__/fixtures/categorization'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: JSON.stringify(categorizationMockProcessors, null, 2), -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: CategorizationState = categorizationTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts index cf4629aebcd71..9270b2453e261 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/duplicates.test.ts @@ -9,10 +9,14 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleDuplicates } from './duplicates'; import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: '{ "message": "ll callback later."}', -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: EcsMappingState = ecsTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts index 34427d423054f..0ae626924c349 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.test.ts @@ -19,10 +19,14 @@ import { handleEcsMapping } from './mapping'; import { handleDuplicates } from './duplicates'; import { handleMissingKeys } from './missing'; import { handleInvalidEcs } from './invalid'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: "I'll callback later.", -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; jest.mock('./mapping'); jest.mock('./duplicates'); diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 8c7347a8b5058..173cacbdb51d7 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -5,7 +5,7 @@ * 2.0. */ -import type { StateGraphArgs } from '@langchain/langgraph'; +// import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; import type { ActionsClientChatOpenAI, @@ -21,6 +21,8 @@ import { handleInvalidEcs } from './invalid'; import { handleValidateMappings } from './validate'; import type { EcsMappingState } from '../../types'; +// @ts-expect-error remove after bump to TS@5 + const graphState: StateGraphArgs['channels'] = { ecs: { value: (x: string, y?: string) => y ?? x, diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts index 4e2c3b27469b5..7f252a46709a0 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts @@ -9,10 +9,14 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleInvalidEcs } from './invalid'; import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ - response: '{ "message": "ll callback later."}', -}); + response: "I'll callback later.", +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: EcsMappingState = ecsTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts index 038c5711ab2cb..cd82c899b34fa 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts @@ -9,10 +9,14 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleEcsMapping } from './mapping'; import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ - response: '{ "message": "ll callback later."}', -}); + response: "I'll callback later.", +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: EcsMappingState = ecsTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts index 38861cfc702b3..704d81c928146 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts @@ -9,10 +9,14 @@ import { FakeLLM } from '@langchain/core/utils/testing'; import { handleMissingKeys } from './missing'; import type { EcsMappingState } from '../../types'; import { ecsTestState } from '../../../__jest__/fixtures/ecs_mapping'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ - response: '{ "message": "ll callback later."}', -}); + response: "I'll callback later.", +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: EcsMappingState = ecsTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts index 97d249455cafd..24dc4365dcbff 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/errors.test.ts @@ -13,10 +13,14 @@ import { relatedMockProcessors, relatedExpectedHandlerResponse, } from '../../../__jest__/fixtures/related'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: JSON.stringify(relatedMockProcessors, null, 2), -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: RelatedState = relatedTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts index eade32fbd8ab8..40989e9733800 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.test.ts @@ -22,10 +22,14 @@ import { handleReview } from './review'; import { handleRelated } from './related'; import { handleErrors } from './errors'; import { testPipeline, combineProcessors } from '../../util'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: "I'll callback later.", -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; jest.mock('./errors'); jest.mock('./review'); diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 8170a145b08d8..3912fd492b827 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -4,8 +4,9 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ + import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -import type { StateGraphArgs } from '@langchain/langgraph'; +// import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; import type { ActionsClientChatOpenAI, @@ -19,6 +20,7 @@ import { handleErrors } from './errors'; import { handleReview } from './review'; import { RELATED_ECS_FIELDS, RELATED_EXAMPLE_ANSWER } from './constants'; +// @ts-expect-error remove after bump to TS@5 const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { value: (x: string, y?: string) => y ?? x, diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts index bcae2194ad889..3a741020fb530 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/related.test.ts @@ -13,10 +13,14 @@ import { relatedMockProcessors, relatedExpectedHandlerResponse, } from '../../../__jest__/fixtures/related'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: JSON.stringify(relatedMockProcessors, null, 2), -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: RelatedState = relatedTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts index 570a0e3157bfe..475f0d72b988d 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/review.test.ts @@ -13,10 +13,14 @@ import { relatedMockProcessors, relatedExpectedHandlerResponse, } from '../../../__jest__/fixtures/related'; +import { + ActionsClientChatOpenAI, + ActionsClientSimpleChatModel, +} from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ response: JSON.stringify(relatedMockProcessors, null, 2), -}); +}) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: RelatedState = relatedTestState; diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts index 60878fb82fbcb..c95a15cbe871d 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/fields.ts @@ -9,15 +9,11 @@ import nunjucks from 'nunjucks'; import { createSync, generateFields, mergeSamples } from '../util'; -interface Doc { - [key: string]: unknown; -} - export function createFieldMapping( packageName: string, dataStreamName: string, specificDataStreamDir: string, - docs: Doc[] + docs: object[] ): void { createBaseFields(specificDataStreamDir, packageName, dataStreamName); createCustomFields(specificDataStreamDir, docs); @@ -37,7 +33,7 @@ function createBaseFields( createSync(`${specificDataStreamDir}/base-fields.yml`, baseFields); } -function createCustomFields(specificDataStreamDir: string, pipelineResults: Doc[]): void { +function createCustomFields(specificDataStreamDir: string, pipelineResults: object[]): void { const mergedResults = mergeSamples(pipelineResults); const fieldKeys = generateFields(mergedResults); createSync(`${specificDataStreamDir}/fields/fields.yml`, fieldKeys); diff --git a/yarn.lock b/yarn.lock index 31ca09b91573a..0f76f99b0e808 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6920,10 +6920,10 @@ zod "^3.22.4" zod-to-json-schema "^3.22.3" -"@langchain/langgraph@^0.0.21": - version "0.0.21" - resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.0.21.tgz#5037597a954abad9ed5f0a1742226f5fcf27e7d7" - integrity sha512-7jtVZFAwvxSbIribYNzGXYIRrsAXV7YF4u1Xcpd8MYNz8sD3h8+rpIOJcYF1AdFh6laajnz0Gv8abPBHHQ2QiQ== +"@langchain/langgraph@^0.0.22": + version "0.0.22" + resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.0.22.tgz#2426f37ffa62df729d02961a4cefd82ae15e777a" + integrity sha512-VdWUDRo/CXe1SjR34WxtbIwxIykSKjbdduKaNxCIPCZYxhfeL+NY3xi3F8ES6RTQV9gNYrl6ODuuXQtACQpK7g== dependencies: "@langchain/core" ">0.1.61 <0.3.0" uuid "^9.0.1" From fc15ed1c4e21e502372a95300ab7c2d3347e361f Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Wed, 12 Jun 2024 23:44:36 +0200 Subject: [PATCH 59/62] fix --- .../integration_assistant/server/graphs/ecs/invalid.test.ts | 2 +- .../integration_assistant/server/graphs/ecs/mapping.test.ts | 2 +- .../integration_assistant/server/graphs/ecs/missing.test.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts index 7f252a46709a0..ce1f76ce7a721 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/invalid.test.ts @@ -15,7 +15,7 @@ import { } from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ - response: "I'll callback later.", + response: '{ "message": "ll callback later."}', }) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: EcsMappingState = ecsTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts index cd82c899b34fa..dbbfc0608d010 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/mapping.test.ts @@ -15,7 +15,7 @@ import { } from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ - response: "I'll callback later.", + response: '{ "message": "ll callback later."}', }) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: EcsMappingState = ecsTestState; diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts index 704d81c928146..b369d28b1e177 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/missing.test.ts @@ -15,7 +15,7 @@ import { } from '@kbn/langchain/server/language_models'; const mockLlm = new FakeLLM({ - response: "I'll callback later.", + response: '{ "message": "ll callback later."}', }) as unknown as ActionsClientChatOpenAI | ActionsClientSimpleChatModel; const testState: EcsMappingState = ecsTestState; From f5b7853218754ecba82a69a080d495eeed4e0339 Mon Sep 17 00:00:00 2001 From: Bharat Pasupula Date: Thu, 13 Jun 2024 15:44:34 +0200 Subject: [PATCH 60/62] Ignore building system tests for custom packages --- .../integration_builder/build_integration.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts index 26031695bb432..d2e29f769dd9a 100644 --- a/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts +++ b/x-pack/plugins/integration_assistant/server/integration_builder/build_integration.ts @@ -5,17 +5,16 @@ * 2.0. */ -import { join as joinPath } from 'path'; -import { tmpdir } from 'os'; -import nunjucks from 'nunjucks'; import AdmZip from 'adm-zip'; -import type { Integration, DataStream } from '../../common'; -import { createPackageSystemTests } from './dev_folders'; -import { createDatastream } from './data_stream'; +import nunjucks from 'nunjucks'; +import { tmpdir } from 'os'; +import { join as joinPath } from 'path'; +import type { DataStream, Integration } from '../../common'; +import { copySync, createSync, ensureDirSync, generateUniqueId } from '../util'; import { createAgentInput } from './agent'; +import { createDatastream } from './data_stream'; import { createFieldMapping } from './fields'; import { createPipeline } from './pipeline'; -import { generateUniqueId, ensureDirSync, copySync, createSync } from '../util'; export async function buildPackage(integration: Integration): Promise { const templateDir = joinPath(__dirname, '../templates'); @@ -59,7 +58,8 @@ function createPackage(packageDir: string, integration: Integration): void { createChangelog(packageDir); createBuildFile(packageDir); createPackageManifest(packageDir, integration); - createPackageSystemTests(packageDir, integration); + // Skipping creation of system tests temporarily for custom package generation + // createPackageSystemTests(packageDir, integration); createLogo(packageDir, integration); } From 77214b0b84f6c8b935fc6d0be7cb3df476491611 Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Thu, 13 Jun 2024 21:08:19 +0200 Subject: [PATCH 61/62] bump langgraph --- package.json | 2 +- tsconfig.base.json | 3 +-- typings/@langchain/langgraph/index.d.ts | 9 --------- .../server/graphs/categorization/graph.ts | 3 +-- .../server/graphs/ecs/graph.ts | 4 +--- .../server/graphs/related/graph.ts | 3 +-- .../server/connector_types/bedrock/bedrock.ts | 16 +++++++--------- yarn.lock | 8 ++++---- 8 files changed, 16 insertions(+), 32 deletions(-) delete mode 100644 typings/@langchain/langgraph/index.d.ts diff --git a/package.json b/package.json index caa34d116bed5..10a3935391d98 100644 --- a/package.json +++ b/package.json @@ -928,7 +928,7 @@ "@kbn/zod-helpers": "link:packages/kbn-zod-helpers", "@langchain/community": "^0.2.4", "@langchain/core": "0.2.3", - "@langchain/langgraph": "^0.0.22", + "@langchain/langgraph": "^0.0.23", "@langchain/openai": "^0.0.34", "@langtrase/trace-attributes": "^3.0.8", "@langtrase/typescript-sdk": "^2.2.1", diff --git a/tsconfig.base.json b/tsconfig.base.json index d9debbcf6a3f6..465783902a586 100644 --- a/tsconfig.base.json +++ b/tsconfig.base.json @@ -1864,8 +1864,7 @@ "@kbn/zod-helpers/*": ["packages/kbn-zod-helpers/*"], // END AUTOMATED PACKAGE LISTING // Allows for importing from `kibana` package for the exported types. - "@emotion/core": ["typings/@emotion"], - "@langchain/langgraph": ["typings/@langchain/langgraph"] + "@emotion/core": ["typings/@emotion"] }, // Support .tsx files and transform JSX into calls to React.createElement "jsx": "react", diff --git a/typings/@langchain/langgraph/index.d.ts b/typings/@langchain/langgraph/index.d.ts deleted file mode 100644 index eed5051d6a275..0000000000000 --- a/typings/@langchain/langgraph/index.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -declare module '@langchain/langgraph'; diff --git a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts index 4bd99b784b76e..6834fcf892a9e 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/categorization/graph.ts @@ -6,7 +6,7 @@ */ import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -// import type { StateGraphArgs } from '@langchain/langgraph'; +import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; import type { ActionsClientChatOpenAI, @@ -22,7 +22,6 @@ import { handleErrors } from './errors'; import { handleReview } from './review'; import { CATEGORIZATION_EXAMPLE_ANSWER, ECS_CATEGORIES, ECS_TYPES } from './constants'; -// @ts-expect-error remove after bump to TS@5 const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { value: (x: string, y?: string) => y ?? x, diff --git a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts index 173cacbdb51d7..8c7347a8b5058 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/ecs/graph.ts @@ -5,7 +5,7 @@ * 2.0. */ -// import type { StateGraphArgs } from '@langchain/langgraph'; +import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; import type { ActionsClientChatOpenAI, @@ -21,8 +21,6 @@ import { handleInvalidEcs } from './invalid'; import { handleValidateMappings } from './validate'; import type { EcsMappingState } from '../../types'; -// @ts-expect-error remove after bump to TS@5 - const graphState: StateGraphArgs['channels'] = { ecs: { value: (x: string, y?: string) => y ?? x, diff --git a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts index 3912fd492b827..9b50c05889402 100644 --- a/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts +++ b/x-pack/plugins/integration_assistant/server/graphs/related/graph.ts @@ -6,7 +6,7 @@ */ import type { IScopedClusterClient } from '@kbn/core-elasticsearch-server'; -// import type { StateGraphArgs } from '@langchain/langgraph'; +import type { StateGraphArgs } from '@langchain/langgraph'; import { StateGraph, END, START } from '@langchain/langgraph'; import type { ActionsClientChatOpenAI, @@ -20,7 +20,6 @@ import { handleErrors } from './errors'; import { handleReview } from './review'; import { RELATED_ECS_FIELDS, RELATED_EXAMPLE_ANSWER } from './constants'; -// @ts-expect-error remove after bump to TS@5 const graphState: StateGraphArgs['channels'] = { lastExecutedChain: { value: (x: string, y?: string) => y ?? x, diff --git a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts index 8b05c30a5b0cb..c09313ac7c0fd 100644 --- a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts +++ b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts @@ -338,7 +338,7 @@ const formatBedrockBody = ({ }) => ({ anthropic_version: 'bedrock-2023-05-31', ...ensureMessageFormat(messages, system), - max_tokens: maxTokens, + max_tokens: DEFAULT_TOKEN_LIMIT, stop_sequences: stopSequences, temperature, }); @@ -357,10 +357,6 @@ const ensureMessageFormat = ( const newMessages = messages.reduce((acc: Array<{ role: string; content: string }>, m) => { const lastMessage = acc[acc.length - 1]; - if (m.role === 'system') { - system = `${system.length ? `${system}\n` : ''}${m.content}`; - return acc; - } if (lastMessage && lastMessage.role === m.role) { // Bedrock only accepts assistant and user roles. @@ -371,11 +367,13 @@ const ensureMessageFormat = ( ]; } + if (m.role === 'system') { + system = `${system.length ? `${system}\n` : ''}${m.content}`; + return acc; + } + // force role outside of system to ensure it is either assistant or user - return [ - ...acc, - { content: m.content, role: ['assistant', 'ai'].includes(m.role) ? 'assistant' : 'user' }, - ]; + return [...acc, { content: m.content, role: m.role === 'assistant' ? 'assistant' : 'user' }]; }, []); return system.length ? { system, messages: newMessages } : { messages: newMessages }; }; diff --git a/yarn.lock b/yarn.lock index 9c254c502b2c2..ba5dc9f7f501c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6924,10 +6924,10 @@ zod "^3.22.4" zod-to-json-schema "^3.22.3" -"@langchain/langgraph@^0.0.22": - version "0.0.22" - resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.0.22.tgz#2426f37ffa62df729d02961a4cefd82ae15e777a" - integrity sha512-VdWUDRo/CXe1SjR34WxtbIwxIykSKjbdduKaNxCIPCZYxhfeL+NY3xi3F8ES6RTQV9gNYrl6ODuuXQtACQpK7g== +"@langchain/langgraph@^0.0.23": + version "0.0.23" + resolved "https://registry.yarnpkg.com/@langchain/langgraph/-/langgraph-0.0.23.tgz#34b5ad5dc9fe644ee96bcfcf11197ec1d7f9e0e2" + integrity sha512-pXlcsBOseT5xdf9enUqbLQ/59LaZxgMI2dL2vFJ+EpcoK7bQnlzzhRtRPp+vubMyMeEKRoAXlaA9ObwpVi93CA== dependencies: "@langchain/core" ">0.1.61 <0.3.0" uuid "^9.0.1" From 3492c88dce98205776cff77ffcb038de339d95c0 Mon Sep 17 00:00:00 2001 From: Patryk Kopycinski Date: Thu, 13 Jun 2024 22:02:36 +0200 Subject: [PATCH 62/62] revert --- .../server/connector_types/bedrock/bedrock.ts | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts index c09313ac7c0fd..8b05c30a5b0cb 100644 --- a/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts +++ b/x-pack/plugins/stack_connectors/server/connector_types/bedrock/bedrock.ts @@ -338,7 +338,7 @@ const formatBedrockBody = ({ }) => ({ anthropic_version: 'bedrock-2023-05-31', ...ensureMessageFormat(messages, system), - max_tokens: DEFAULT_TOKEN_LIMIT, + max_tokens: maxTokens, stop_sequences: stopSequences, temperature, }); @@ -357,6 +357,10 @@ const ensureMessageFormat = ( const newMessages = messages.reduce((acc: Array<{ role: string; content: string }>, m) => { const lastMessage = acc[acc.length - 1]; + if (m.role === 'system') { + system = `${system.length ? `${system}\n` : ''}${m.content}`; + return acc; + } if (lastMessage && lastMessage.role === m.role) { // Bedrock only accepts assistant and user roles. @@ -367,13 +371,11 @@ const ensureMessageFormat = ( ]; } - if (m.role === 'system') { - system = `${system.length ? `${system}\n` : ''}${m.content}`; - return acc; - } - // force role outside of system to ensure it is either assistant or user - return [...acc, { content: m.content, role: m.role === 'assistant' ? 'assistant' : 'user' }]; + return [ + ...acc, + { content: m.content, role: ['assistant', 'ai'].includes(m.role) ? 'assistant' : 'user' }, + ]; }, []); return system.length ? { system, messages: newMessages } : { messages: newMessages }; };