Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[logtash] align logstash logs ingestion #4206

Merged
merged 13 commits into from
Sep 21, 2022
2 changes: 1 addition & 1 deletion packages/logstash/_dev/build/build.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
dependencies:
ecs:
reference: git@1.12
reference: git@8.4
2 changes: 1 addition & 1 deletion packages/logstash/_dev/build/docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ The `logstash` package collects metrics and logs of Logstash.

## Compatibility

The `logstash` package works with Logstash 7.3.0 and later
The `logstash` package works with Logstash 8.5.0 and later

## Logs

Expand Down
102 changes: 102 additions & 0 deletions packages/logstash/_dev/deploy/docker/config/log4j2.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
status = error
name = LogstashPropertiesConfig

appender.console.type = Console
appender.console.name = plain_console
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]}%notEmpty{[%X{plugin.id}]} %m%n

appender.json_console.type = Console
appender.json_console.name = json_console
appender.json_console.layout.type = JSONLayout
appender.json_console.layout.compact = true
appender.json_console.layout.eventEol = true

appender.rolling.type = RollingFile
appender.rolling.name = plain_rolling
appender.rolling.fileName = ${sys:ls.logs}/logstash-plain.log
appender.rolling.filePattern = ${sys:ls.logs}/logstash-plain-%d{yyyy-MM-dd}-%i.log.gz
appender.rolling.policies.type = Policies
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.rolling.policies.time.interval = 1
appender.rolling.policies.time.modulate = true
appender.rolling.layout.type = PatternLayout
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c]%notEmpty{[%X{pipeline.id}]}%notEmpty{[%X{plugin.id}]} %m%n
appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
appender.rolling.policies.size.size = 100MB
appender.rolling.strategy.type = DefaultRolloverStrategy
appender.rolling.strategy.max = 30

appender.json_rolling.type = RollingFile
appender.json_rolling.name = json_rolling
appender.json_rolling.fileName = ${sys:ls.logs}/logstash-json.log
appender.json_rolling.filePattern = ${sys:ls.logs}/logstash-json-%d{yyyy-MM-dd}-%i.log.gz
appender.json_rolling.policies.type = Policies
appender.json_rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.json_rolling.policies.time.interval = 1
appender.json_rolling.policies.time.modulate = true
appender.json_rolling.layout.type = JSONLayout
appender.json_rolling.layout.compact = true
appender.json_rolling.layout.eventEol = true
appender.json_rolling.policies.size.type = SizeBasedTriggeringPolicy
appender.json_rolling.policies.size.size = 100MB
appender.json_rolling.strategy.type = DefaultRolloverStrategy
appender.json_rolling.strategy.max = 30

rootLogger.level = ${sys:ls.log.level}
rootLogger.appenderRef.console.ref = ${sys:ls.log.format}_console
rootLogger.appenderRef.rolling.ref = ${sys:ls.log.format}_rolling
rootLogger.appenderRef.routing.ref = pipeline_routing_appender

# Slowlog

appender.console_slowlog.type = Console
appender.console_slowlog.name = plain_console_slowlog
appender.console_slowlog.layout.type = PatternLayout
appender.console_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n

appender.json_console_slowlog.type = Console
appender.json_console_slowlog.name = json_console_slowlog
appender.json_console_slowlog.layout.type = JSONLayout
appender.json_console_slowlog.layout.compact = true
appender.json_console_slowlog.layout.eventEol = true

appender.rolling_slowlog.type = RollingFile
appender.rolling_slowlog.name = plain_rolling_slowlog
appender.rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-plain.log
appender.rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-plain-%d{yyyy-MM-dd}-%i.log.gz
appender.rolling_slowlog.policies.type = Policies
appender.rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
appender.rolling_slowlog.policies.time.interval = 1
appender.rolling_slowlog.policies.time.modulate = true
appender.rolling_slowlog.layout.type = PatternLayout
appender.rolling_slowlog.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %m%n
appender.rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
appender.rolling_slowlog.policies.size.size = 100MB
appender.rolling_slowlog.strategy.type = DefaultRolloverStrategy
appender.rolling_slowlog.strategy.max = 30

appender.json_rolling_slowlog.type = RollingFile
appender.json_rolling_slowlog.name = json_rolling_slowlog
appender.json_rolling_slowlog.fileName = ${sys:ls.logs}/logstash-slowlog-json.log
appender.json_rolling_slowlog.filePattern = ${sys:ls.logs}/logstash-slowlog-json-%d{yyyy-MM-dd}-%i.log.gz
appender.json_rolling_slowlog.policies.type = Policies
appender.json_rolling_slowlog.policies.time.type = TimeBasedTriggeringPolicy
appender.json_rolling_slowlog.policies.time.interval = 1
appender.json_rolling_slowlog.policies.time.modulate = true
appender.json_rolling_slowlog.layout.type = JSONLayout
appender.json_rolling_slowlog.layout.compact = true
appender.json_rolling_slowlog.layout.eventEol = true
appender.json_rolling_slowlog.policies.size.type = SizeBasedTriggeringPolicy
appender.json_rolling_slowlog.policies.size.size = 100MB
appender.json_rolling_slowlog.strategy.type = DefaultRolloverStrategy
appender.json_rolling_slowlog.strategy.max = 30

logger.slowlog.name = slowlog
logger.slowlog.level = info
logger.slowlog.appenderRef.console_slowlog.ref = ${sys:ls.log.format}_console_slowlog
logger.slowlog.appenderRef.rolling_slowlog.ref = ${sys:ls.log.format}_rolling_slowlog
logger.slowlog.additivity = false

logger.licensereader.name = logstash.licensechecker.licensereader
logger.licensereader.level = error
6 changes: 6 additions & 0 deletions packages/logstash/_dev/deploy/docker/config/logstash.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,8 @@
http.host: "0.0.0.0"
config.reload.automatic: true
path.logs: /usr/share/logstash/logs
log.format: json
slowlog.threshold.warn: 1nanos
slowlog.threshold.info: 1nanos
slowlog.threshold.debug: 1nanos
slowlog.threshold.trace: 1nanos
1 change: 1 addition & 0 deletions packages/logstash/_dev/deploy/docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,6 @@ services:
volumes:
- "./pipeline:/usr/share/logstash/pipeline"
- "./config:/usr/share/logstash/config"
- ${SERVICE_LOGS_DIR}/logstash:/usr/share/logstash/logs
ports:
- "127.0.0.1:9600:9600"
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@ input {
}
}

filter {
sleep {
time => 1
every => 10
}
}

output {
elasticsearch {
hosts => ["https://elasticsearch:9200"]
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
dynamic_fields:
event.ingested: ".*"
event.created: ".*"
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{"level":"WARN","loggerName":"logstash.outputs.elasticsearch","timeMillis":1663084834955,"thread":"[pipeline-with-memory-queue]-pipeline-manager","logEvent":{"message":"Restored connection to ES instance","url":"https://elastic:xxxxxx@elasticsearch:9200/"}}
{"level":"INFO","loggerName":"logstash.outputs.elasticsearch","timeMillis":1663084834958,"thread":"[pipeline-with-persisted-queue]-pipeline-manager","logEvent":{"message":"Elasticsearch version determined (8.5.0-SNAPSHOT)","es_version":8}}
{"level":"WARN","loggerName":"logstash.outputs.elasticsearch","timeMillis":1663084834961,"thread":"[pipeline-with-persisted-queue]-pipeline-manager","logEvent":{"message":"Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type","es_version":8}}
{"level":"INFO","loggerName":"logstash.outputs.elasticsearch","timeMillis":1663084834963,"thread":"[pipeline-with-memory-queue]-pipeline-manager","logEvent":{"message":"Elasticsearch version determined (8.5.0-SNAPSHOT)","es_version":8}}
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
{
"expected": [
{
"@timestamp": "2022-09-13T16:00:34.955Z",
"event": {
"created": "2022-09-13T16:00:34.955Z",
"ingested": "2022-09-20T13:49:39.087356123Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "WARN"
},
"logstash": {
"log": {
"log_event": {
"url": "https://elastic:xxxxxx@elasticsearch:9200/"
},
"module": "logstash.outputs.elasticsearch",
"thread": "[pipeline-with-memory-queue]-pipeline-manager"
}
},
"message": "Restored connection to ES instance"
},
{
"@timestamp": "2022-09-13T16:00:34.958Z",
"event": {
"created": "2022-09-13T16:00:34.958Z",
"ingested": "2022-09-20T13:49:39.087384421Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "INFO"
},
"logstash": {
"log": {
"log_event": {
"es_version": 8
},
"module": "logstash.outputs.elasticsearch",
"thread": "[pipeline-with-persisted-queue]-pipeline-manager"
}
},
"message": "Elasticsearch version determined (8.5.0-SNAPSHOT)"
},
{
"@timestamp": "2022-09-13T16:00:34.961Z",
"event": {
"created": "2022-09-13T16:00:34.961Z",
"ingested": "2022-09-20T13:49:39.087390447Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "WARN"
},
"logstash": {
"log": {
"log_event": {
"es_version": 8
},
"module": "logstash.outputs.elasticsearch",
"thread": "[pipeline-with-persisted-queue]-pipeline-manager"
}
},
"message": "Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type"
},
{
"@timestamp": "2022-09-13T16:00:34.963Z",
"event": {
"created": "2022-09-13T16:00:34.963Z",
"ingested": "2022-09-20T13:49:39.087395138Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "INFO"
},
"logstash": {
"log": {
"log_event": {
"es_version": 8
},
"module": "logstash.outputs.elasticsearch",
"thread": "[pipeline-with-memory-queue]-pipeline-manager"
}
},
"message": "Elasticsearch version determined (8.5.0-SNAPSHOT)"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[2022-09-14T09:31:20,934][INFO ][logstash.javapipeline ][standalone-pipeline] Pipeline started {"pipeline.id"=>"standalone-pipeline"}
[2022-09-14T09:31:20,934][INFO ][logstash.javapipeline ][pipeline-with-memory-queue] Pipeline started {"pipeline.id"=>"pipeline-with-memory-queue"}
[2022-09-14T09:31:20,936][INFO ][logstash.javapipeline ][pipeline-with-persisted-queue] Pipeline started {"pipeline.id"=>"pipeline-with-persisted-queue"}
[2022-09-14T09:31:20,946][INFO ][logstash.agent ] Pipelines running {:count=>3, :running_pipelines=>[:"pipeline-with-memory-queue", :"standalone-pipeline", :"pipeline-with-persisted-queue"], :non_running_pipelines=>[]}
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
{
"expected": [
{
"@timestamp": "2022-09-14T09:31:20.934Z",
"event": {
"created": "2022-09-14T09:31:20.934Z",
"ingested": "2022-09-20T13:49:39.150272446Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "INFO"
},
"logstash": {
"log": {
"module": "logstash.javapipeline",
"pipeline_id": "standalone-pipeline"
}
},
"message": "Pipeline started {\"pipeline.id\"=\u003e\"standalone-pipeline\"}"
},
{
"@timestamp": "2022-09-14T09:31:20.934Z",
"event": {
"created": "2022-09-14T09:31:20.934Z",
"ingested": "2022-09-20T13:49:39.150301050Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "INFO"
},
"logstash": {
"log": {
"module": "logstash.javapipeline",
"pipeline_id": "pipeline-with-memory-queue"
}
},
"message": "Pipeline started {\"pipeline.id\"=\u003e\"pipeline-with-memory-queue\"}"
},
{
"@timestamp": "2022-09-14T09:31:20.936Z",
"event": {
"created": "2022-09-14T09:31:20.936Z",
"ingested": "2022-09-20T13:49:39.150307033Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "INFO"
},
"logstash": {
"log": {
"module": "logstash.javapipeline",
"pipeline_id": "pipeline-with-persisted-queue"
}
},
"message": "Pipeline started {\"pipeline.id\"=\u003e\"pipeline-with-persisted-queue\"}"
},
{
"@timestamp": "2022-09-14T09:31:20.946Z",
"event": {
"created": "2022-09-14T09:31:20.946Z",
"ingested": "2022-09-20T13:49:39.150342613Z",
"kind": "event",
"type": "info"
},
"log": {
"level": "INFO"
},
"logstash": {
"log": {
"module": "logstash.agent"
}
},
"message": "Pipelines running {:count=\u003e3, :running_pipelines=\u003e[:\"pipeline-with-memory-queue\", :\"standalone-pipeline\", :\"pipeline-with-persisted-queue\"], :non_running_pipelines=\u003e[]}"
}
]
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@ processors:
- set:
field: event.ingested
value: '{{_ingest.timestamp}}'
- rename:
klacabane marked this conversation as resolved.
Show resolved Hide resolved
field: '@timestamp'
target_field: event.created
- grok:
field: message
patterns:
Expand All @@ -19,6 +16,9 @@ processors:
- pipeline:
if: ctx.first_char == '{'
name: '{{ IngestPipeline "pipeline-json" }}'
- set:
copy_from: "@timestamp"
field: event.created
- remove:
field:
- first_char
Expand Down
4 changes: 4 additions & 0 deletions packages/logstash/data_stream/log/fields/ecs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,7 @@
external: ecs
- name: log.level
external: ecs
- name: ecs.version
external: ecs
- name: log.file.path
external: ecs
14 changes: 7 additions & 7 deletions packages/logstash/data_stream/log/fields/fields.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
- name: input.type
type: keyword
- name: log.offset
type: long
- name: logstash.log
title: Logstash
type: group
Expand All @@ -13,13 +17,9 @@
multi_fields:
- name: text
type: text
- name: log_event
type: object
description: |
key and value debugging information.
- name: log_event.action
type: keyword
- name: pipeline_id
type: keyword
- name: log_event
type: flattened
description: |
The ID of the pipeline.
key and value debugging information.
1 change: 0 additions & 1 deletion packages/logstash/data_stream/log/manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ streams:
required: true
show_user: true
default:
- /var/log/logstash/logstash-plain*.log
- /var/log/logstash/logstash-json*.log
template_path: log.yml.hbs
title: Logstash logs
Expand Down
Loading