-
Notifications
You must be signed in to change notification settings - Fork 8.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'master' into api-flaky-test
- Loading branch information
Showing
70 changed files
with
2,582 additions
and
1,418 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -12,8 +12,7 @@ pipeline { | |
environment { | ||
BASE_DIR = 'src/github.com/elastic/kibana' | ||
HOME = "${env.WORKSPACE}" | ||
APM_ITS = 'apm-integration-testing' | ||
CYPRESS_DIR = 'x-pack/plugins/apm/e2e' | ||
E2E_DIR = 'x-pack/plugins/apm/e2e' | ||
PIPELINE_LOG_LEVEL = 'DEBUG' | ||
} | ||
options { | ||
|
@@ -43,32 +42,6 @@ pipeline { | |
env.APM_UPDATED = isGitRegionMatch(patterns: regexps) | ||
} | ||
} | ||
dir("${APM_ITS}"){ | ||
git changelog: false, | ||
credentialsId: 'f6c7695a-671e-4f4f-a331-acdce44ff9ba', | ||
poll: false, | ||
url: "[email protected]:elastic/${APM_ITS}.git" | ||
} | ||
} | ||
} | ||
stage('Start services') { | ||
options { skipDefaultCheckout() } | ||
when { | ||
anyOf { | ||
expression { return params.FORCE } | ||
expression { return env.APM_UPDATED != "false" } | ||
} | ||
} | ||
steps { | ||
notifyStatus('Starting services', 'PENDING') | ||
dir("${APM_ITS}"){ | ||
sh './scripts/compose.py start master --no-kibana' | ||
} | ||
} | ||
post { | ||
unsuccessful { | ||
notifyStatus('Environmental issue', 'FAILURE') | ||
} | ||
} | ||
} | ||
stage('Prepare Kibana') { | ||
|
@@ -85,7 +58,7 @@ pipeline { | |
steps { | ||
notifyStatus('Preparing kibana', 'PENDING') | ||
dir("${BASE_DIR}"){ | ||
sh script: "${CYPRESS_DIR}/ci/prepare-kibana.sh" | ||
sh "${E2E_DIR}/ci/prepare-kibana.sh" | ||
} | ||
} | ||
post { | ||
|
@@ -105,24 +78,20 @@ pipeline { | |
steps{ | ||
notifyStatus('Running smoke tests', 'PENDING') | ||
dir("${BASE_DIR}"){ | ||
sh ''' | ||
jobs -l | ||
docker build --tag cypress --build-arg NODE_VERSION=$(cat .node-version) ${CYPRESS_DIR}/ci | ||
docker run --rm -t --user "$(id -u):$(id -g)" \ | ||
-v `pwd`:/app --network="host" \ | ||
--name cypress cypress''' | ||
sh "${E2E_DIR}/ci/run-e2e.sh" | ||
} | ||
} | ||
post { | ||
always { | ||
dir("${BASE_DIR}"){ | ||
archiveArtifacts(allowEmptyArchive: false, artifacts: "${CYPRESS_DIR}/**/screenshots/**,${CYPRESS_DIR}/**/videos/**,${CYPRESS_DIR}/**/test-results/*e2e-tests.xml") | ||
junit(allowEmptyResults: true, testResults: "${CYPRESS_DIR}/**/test-results/*e2e-tests.xml") | ||
} | ||
dir("${APM_ITS}"){ | ||
sh 'docker-compose logs > apm-its.log || true' | ||
sh 'docker-compose down -v || true' | ||
archiveArtifacts(allowEmptyArchive: false, artifacts: 'apm-its.log') | ||
dir("${BASE_DIR}/${E2E_DIR}"){ | ||
archiveArtifacts(allowEmptyArchive: false, artifacts: 'cypress/screenshots/**,cypress/videos/**,cypress/test-results/*e2e-tests.xml') | ||
junit(allowEmptyResults: true, testResults: 'cypress/test-results/*e2e-tests.xml') | ||
dir('tmp/apm-integration-testing'){ | ||
sh 'docker-compose logs > apm-its-docker.log || true' | ||
sh 'docker-compose down -v || true' | ||
archiveArtifacts(allowEmptyArchive: true, artifacts: 'apm-its-docker.log') | ||
} | ||
archiveArtifacts(allowEmptyArchive: true, artifacts: 'tmp/*.log') | ||
} | ||
} | ||
unsuccessful { | ||
|
@@ -137,7 +106,7 @@ pipeline { | |
post { | ||
always { | ||
dir("${BASE_DIR}"){ | ||
archiveArtifacts(allowEmptyArchive: true, artifacts: "${CYPRESS_DIR}/ingest-data.log,kibana.log") | ||
archiveArtifacts(allowEmptyArchive: true, artifacts: "${E2E_DIR}/kibana.log") | ||
} | ||
} | ||
} | ||
|
Validating CODEOWNERS rules …
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added
BIN
+55.2 KB
docs/management/ingest-pipelines/images/ingest-pipeline-privileges.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
144 changes: 144 additions & 0 deletions
144
docs/management/ingest-pipelines/ingest-pipelines.asciidoc
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,144 @@ | ||
[role="xpack"] | ||
[[ingest-node-pipelines]] | ||
== Ingest Node Pipelines | ||
|
||
*Ingest Node Pipelines* enables you to create and manage {es} | ||
pipelines that perform common transformations and | ||
enrichments on your data. For example, you might remove a field, | ||
rename an existing field, or set a new field. | ||
|
||
You’ll find *Ingest Node Pipelines* in *Management > Elasticsearch*. With this feature, you can: | ||
|
||
* View a list of your pipelines and drill down into details. | ||
* Create a pipeline that defines a series of tasks, known as processors. | ||
* Test a pipeline before feeding it with real data to ensure the pipeline works as expected. | ||
* Delete a pipeline that is no longer needed. | ||
|
||
[role="screenshot"] | ||
image:management/ingest-pipelines/images/ingest-pipeline-list.png["Ingest node pipeline list"] | ||
|
||
[float] | ||
=== Required permissions | ||
|
||
The minimum required permissions to access *Ingest Node Pipelines* are | ||
the `manage_pipeline` and `cluster:monitor/nodes/info` cluster privileges. | ||
|
||
You can add these privileges in *Management > Security > Roles*. | ||
|
||
[role="screenshot"] | ||
image:management/ingest-pipelines/images/ingest-pipeline-privileges.png["Privileges required for Ingest Node Pipelines"] | ||
|
||
[float] | ||
[[ingest-node-pipelines-manage]] | ||
=== Manage pipelines | ||
|
||
From the list view, you can to drill down into the details of a pipeline. | ||
To | ||
edit, clone, or delete a pipeline, use the *Actions* menu. | ||
|
||
If you don’t have any pipelines, you can create one using the | ||
*Create pipeline* form. You’ll define processors to transform documents | ||
in a specific way. To handle exceptions, you can optionally define | ||
failure processors to execute immediately after a failed processor. | ||
Before creating the pipeline, you can verify it provides the expected output. | ||
|
||
[float] | ||
[[ingest-node-pipelines-example]] | ||
==== Example: Create a pipeline | ||
|
||
In this example, you’ll create a pipeline to handle server logs in the | ||
Common Log Format. The log looks similar to this: | ||
|
||
[source,js] | ||
---------------------------------- | ||
212.87.37.154 - - [05/May/2020:16:21:15 +0000] \"GET /favicon.ico HTTP/1.1\" | ||
200 3638 \"-\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) | ||
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36\" | ||
---------------------------------- | ||
|
||
The log contains an IP address, timestamp, and user agent. You want to give | ||
these three items their own field in {es} for fast search and visualization. | ||
You also want to know where the request is coming from. | ||
|
||
. In *Ingest Node Pipelines*, click *Create a pipeline*. | ||
. Provide a name and description for the pipeline. | ||
. Define the processors: | ||
+ | ||
[source,js] | ||
---------------------------------- | ||
[ | ||
{ | ||
"grok": { | ||
"field": "message", | ||
"patterns": ["%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \\[%{HTTPDATE:timestamp}\\] \"%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:httpversion}\" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}"] | ||
} | ||
}, | ||
{ | ||
"date": { | ||
"field": "timestamp", | ||
"formats": [ "dd/MMM/YYYY:HH:mm:ss Z" ] | ||
} | ||
}, | ||
{ | ||
"geoip": { | ||
"field": "clientip" | ||
} | ||
}, | ||
{ | ||
"user_agent": { | ||
"field": "agent" | ||
} | ||
} | ||
] | ||
---------------------------------- | ||
+ | ||
This code defines four {ref}/ingest-processors.html[processors] that run sequentially: | ||
{ref}/grok-processor.html[grok], {ref}/date-processor.html[date], | ||
{ref}/geoip-processor.html[geoip], and {ref}/user-agent-processor.html[user_agent]. | ||
Your form should look similar to this: | ||
+ | ||
[role="screenshot"] | ||
image:management/ingest-pipelines/images/ingest-pipeline-processor.png["Processors for Ingest Node Pipelines"] | ||
|
||
. To verify that the pipeline gives the expected outcome, click *Test pipeline*. | ||
|
||
. In the *Document* tab, provide the following sample document for testing: | ||
+ | ||
[source,js] | ||
---------------------------------- | ||
[ | ||
{ | ||
"_source": { | ||
"message": "212.87.37.154 - - [05/May/2020:16:21:15 +0000] \"GET /favicon.ico HTTP/1.1\" 200 3638 \"-\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36\"" | ||
} | ||
} | ||
] | ||
---------------------------------- | ||
|
||
. Click *Run the pipeline* and check if the pipeline worked as expected. | ||
+ | ||
You can also | ||
view the verbose output and refresh the output from this view. | ||
|
||
. If everything looks correct, close the panel, and then click *Create pipeline*. | ||
+ | ||
At this point, you’re ready to use the Elasticsearch index API to load | ||
the logs data. | ||
|
||
. In the Kibana Console, index a document with the pipeline | ||
you created. | ||
+ | ||
[source,js] | ||
---------------------------------- | ||
PUT my-index/_doc/1?pipeline=access_logs | ||
{ | ||
"message": "212.87.37.154 - - [05/May/2020:16:21:15 +0000] \"GET /favicon.ico HTTP/1.1\" 200 3638 \"-\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36\"" | ||
} | ||
---------------------------------- | ||
|
||
. To verify, run: | ||
+ | ||
[source,js] | ||
---------------------------------- | ||
GET my-index/_doc/1 | ||
---------------------------------- |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
[role="xpack"] | ||
[[uptime-alerting]] | ||
|
||
== Uptime alerting | ||
|
||
The Uptime app integrates with Kibana's {kibana-ref}/alerting-getting-started.html[alerting and actions] | ||
feature. It provides a set of built-in actions and Uptime specific threshold alerts for you to use | ||
and enables central management of all alerts from <<management, Kibana Management>>. | ||
|
||
[float] | ||
=== Monitor status alerts | ||
|
||
To receive alerts when a monitor goes down, use the alerting menu at the top of the | ||
overview page. Use a query in the alert flyout to determine which monitors to check | ||
with your alert. If you already have a query in the overview page search bar it will | ||
be carried over into this box. | ||
|
||
[role="screenshot"] | ||
image::uptime/images/monitor-status-alert-flyout.png[Create monitor status alert flyout] | ||
|
||
[float] | ||
=== TLS alerts | ||
|
||
Uptime also provides the ability to create an alert that will notify you when one or | ||
more of your monitors have a TLS certificate that will expire within some threshold, | ||
or when its age exceeds a limit. The values for these thresholds are configurable on | ||
the <<uptime-settings, Settings page>>. | ||
|
||
[role="screenshot"] | ||
image::uptime/images/tls-alert-flyout.png[Create TLS alert flyout] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
[role="xpack"] | ||
[[uptime-certificates]] | ||
|
||
== Certificates | ||
|
||
[role="screenshot"] | ||
image::uptime/images/certificates-page.png[Certificates] | ||
|
||
The certificates page allows you to visualize TLS certificate data in your indices. In addition to the | ||
common name, associated monitors, issuer information, and SHA fingerprints, Uptime also assigns a status | ||
derived from the threshold values in the <<uptime-settings, Settings page>>. | ||
|
||
Several of the columns on this page are sortable. You can use the search bar at the top of the view | ||
to find values in most of the TLS-related fields in your Uptime indices. Additionally, you can | ||
create a TLS alert using the `Alerts` dropdown at the top of the page. |
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.