Skip to content
Permalink

Comparing changes

This is a direct comparison between two commits made in this repository or its related repositories. View the default comparison for this range or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: elastic/beats
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: 75843bba1f807263eb00d0e57434ffbc7882af19
Choose a base ref
..
head repository: elastic/beats
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: 49102e952d52f3568ac41bb50d430c79dcdf9f3a
Choose a head ref
14 changes: 12 additions & 2 deletions .buildkite/packaging.pipeline.yml
Original file line number Diff line number Diff line change
@@ -28,6 +28,7 @@ steps:
- make beats-dashboards
env:
SNAPSHOT: true
DEV: true
artifact_paths:
- build/distributions/**/*

@@ -42,6 +43,9 @@ steps:
commands:
- make build/distributions/dependencies.csv
- make beats-dashboards
env:
SNAPSHOT: false
DEV: false
artifact_paths:
- build/distributions/**/*

@@ -53,6 +57,7 @@ steps:
env:
PLATFORMS: "${PLATFORMS}"
SNAPSHOT: true
DEV: true
command: ".buildkite/scripts/packaging/package-dra.sh {{matrix}}"
agents:
provider: gcp
@@ -82,6 +87,7 @@ steps:
PLATFORMS: "${PLATFORMS_ARM}"
PACKAGES: "docker"
SNAPSHOT: true
DEV: true
command: ".buildkite/scripts/packaging/package-dra.sh {{matrix}}"
agents:
provider: "aws"
@@ -107,6 +113,7 @@ steps:
env:
PLATFORMS: "${PLATFORMS}"
SNAPSHOT: true
DEV: true
command: ".buildkite/scripts/packaging/package-dra.sh x-pack/agentbeat"
agents:
provider: gcp
@@ -125,6 +132,7 @@ steps:
env:
PLATFORMS: "${PLATFORMS}"
SNAPSHOT: false
DEV: false
command: ".buildkite/scripts/packaging/package-dra.sh {{matrix}}"
agents:
provider: gcp
@@ -154,6 +162,7 @@ steps:
PLATFORMS: "${PLATFORMS_ARM}"
PACKAGES: "docker"
SNAPSHOT: false
DEV: false
command: ".buildkite/scripts/packaging/package-dra.sh {{matrix}}"
agents:
provider: "aws"
@@ -179,6 +188,7 @@ steps:
env:
PLATFORMS: "${PLATFORMS}"
SNAPSHOT: false
DEV: false
command: ".buildkite/scripts/packaging/package-dra.sh x-pack/agentbeat"
agents:
provider: gcp
@@ -201,7 +211,7 @@ steps:
- dashboards-snapshot
command: |
buildkite-agent artifact download "build/**/*" .
.buildkite/scripts/packaging/prepare-release-manager.sh
.buildkite/scripts/packaging/prepare-release-manager.sh snapshot
.buildkite/scripts/dra.sh
agents:
provider: gcp
@@ -219,7 +229,7 @@ steps:
- dashboards-staging
command: |
buildkite-agent artifact download "build/**" .
.buildkite/scripts/packaging/prepare-release-manager.sh
.buildkite/scripts/packaging/prepare-release-manager.sh staging
.buildkite/scripts/dra.sh
agents:
provider: gcp
110 changes: 110 additions & 0 deletions .buildkite/pipeline-scheduler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
#!/usr/bin/env python3

'''
This script is used by schedule-type pipelines
to automate triggering other pipelines (e.g. Iron Bank validation)
against release branches
Excepts a (comma separated) env var PIPELINES_TO_TRIGGER.
An optional EXCLUDE_BRANCHES (comma separated) env var can also be supplied to skip specific branches (e.g. EXCLUDE_BRANCHES="main")
For background info see:
https://elasticco.atlassian.net/browse/ENGPRD-318 /
https://github.com/elastic/ingest-dev/issues/2664
'''

import json
import os
import sys
import time
import typing
import urllib.request
from ruamel.yaml import YAML


ACTIVE_BRANCHES_URL = "https://storage.googleapis.com/artifacts-api/snapshots/branches.json"


class InputError(Exception):
""" Exception raised for input errors """


class UrlOpenError(Exception):
""" Exception raised when hitting errors retrieving content from a URL """


def fail_with_error(msg):
print(f"""^^^ +++
Error: [{msg}].
Exiting now.
""")
exit(1)


def parse_csv_env_var(env_var_name: str, is_valid=False) -> typing.List:
if is_valid and env_var_name not in os.environ.keys():
fail_with_error(msg=f'Required environment variable [{env_var_name}] is missing.')

env_var = os.getenv(env_var_name, "")

if is_valid and env_var.strip() == "":
fail_with_error(msg=f'Required environment variable [{env_var_name}] is empty.')
return env_var.split(",")


def get_json_with_retries(uri, retries=3, delay=5) -> typing.Dict:
for _ in range(retries):
try:
with urllib.request.urlopen(uri) as response:
data = response.read().decode('utf-8')
return json.loads(data)
except UrlOpenError as e:
print(f"Error: [{e}] when downloading from [{uri}]")
print(f"Retrying in {delay} seconds ...")
time.sleep(delay)
except json.JSONDecodeError as e:
fail_with_error(f"Error [{e}] when deserialing JSON from [{uri}]")
fail_with_error(f"Failed to retrieve JSON content from [{uri}] after [{retries}] retries")
return {} # for IDE typing checks


def get_release_branches() -> typing.List[str]:
resp = get_json_with_retries(uri=ACTIVE_BRANCHES_URL)
try:
release_branches = [branch for branch in resp["branches"]]
except KeyError:
fail_with_error(f'''Didn't find the excepted structure ["branches"] in the response [{resp}] from [{ACTIVE_BRANCHES_URL}]''')

return release_branches


def generate_pipeline(pipelines_to_trigger: typing.List[str], branches: typing.List[str]):
generated_pipeline = {"steps": []}

for pipeline in pipelines_to_trigger:
for branch in branches:
trigger = {
"trigger": pipeline,
"label": f":testexecute: Triggering {pipeline} / {branch}",
"build": {
"branch": branch,
"message": f":testexecute: Scheduled build for {branch}"
}
}
generated_pipeline["steps"].append(trigger)

return generated_pipeline


if __name__ == '__main__':
pipelines_to_trigger = parse_csv_env_var(env_var_name="PIPELINES_TO_TRIGGER", is_valid=True)
release_branches = get_release_branches()
exclude_branches = parse_csv_env_var(env_var_name="EXCLUDE_BRANCHES")

target_branches = sorted(list(set(release_branches).difference(exclude_branches)))
if len(target_branches) == 0 or target_branches[0].isspace():
fail_with_error(f"Calculated target branches were empty! You passed EXCLUDE_BRANCHES={exclude_branches} and release branches are {release_branches} the difference of which results in {target_branches}.")

pipeline = generate_pipeline(pipelines_to_trigger, branches=target_branches)
print('# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json')
YAML().dump(pipeline, sys.stdout)
3 changes: 3 additions & 0 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
@@ -96,6 +96,9 @@ https://github.com/elastic/beats/compare/v8.8.1\...main[Check the HEAD diff]
- Set field types to correctly match ECS in sessionmd processor {issue}38955[38955] {pull}38994[38994]
- Keep process info on exited processes, to avoid failing to enrich events in sessionmd processor {pull}39173[39173]

- Prevent scenario of losing children-related file events in a directory for recursive fsnotify backend of auditbeat file integrity module {pull}39133[39133]


*Filebeat*

- [Gcs Input] - Added missing locks for safe concurrency {pull}34914[34914]
7 changes: 6 additions & 1 deletion auditbeat/module/file_integrity/monitor/monitor_test.go
Original file line number Diff line number Diff line change
@@ -144,6 +144,11 @@ func TestRecursiveSubdirPermissions(t *testing.T) {
t.Skip("Skipping permissions test on Windows")
}

if os.Getuid() == 0 {
t.Skip("skipping as root can access every file and thus this unittest will fail")
return
}

// Create dir to be watched

dir, err := os.MkdirTemp("", "monitor")
@@ -202,7 +207,7 @@ func TestRecursiveSubdirPermissions(t *testing.T) {
for {
// No event is received
ev, err := readTimeout(t, watcher)
if err == errReadTimeout {
if errors.Is(err, errReadTimeout) {
break
}
assertNoError(t, err)
54 changes: 40 additions & 14 deletions auditbeat/module/file_integrity/monitor/recursive.go
Original file line number Diff line number Diff line change
@@ -84,37 +84,63 @@ func (watcher *recursiveWatcher) ErrorChannel() <-chan error {
return watcher.inner.Errors
}

func (watcher *recursiveWatcher) watchFile(path string, info os.FileInfo) error {
var err error
if info == nil {
info, err = os.Lstat(path)
if err != nil {
return err
}
}

if info.IsDir() {
if err = watcher.tree.AddDir(path); err != nil {
return err
}

if err = watcher.inner.Add(path); err != nil {
return err
}

return nil
}

return watcher.tree.AddFile(path)
}

func (watcher *recursiveWatcher) addRecursive(path string) error {
if watcher.isExcludedPath(path) {
return nil
}

if err := watcher.watchFile(path, nil); err != nil {
return fmt.Errorf("failed adding watcher to '%s': %w", path, err)
}

var errs multierror.Errors
err := filepath.Walk(path, func(path string, info os.FileInfo, fnErr error) error {
if watcher.isExcludedPath(path) {
err := filepath.Walk(path, func(walkPath string, info os.FileInfo, fnErr error) error {
if walkPath == path {
return nil
}

if watcher.isExcludedPath(walkPath) {
return nil
}

if fnErr != nil {
errs = append(errs, fmt.Errorf("error walking path '%s': %w", path, fnErr))
errs = append(errs, fmt.Errorf("error walking path '%s': %w", walkPath, fnErr))
// If FileInfo is not nil, the directory entry can be processed
// even if there was some error
if info == nil {
return nil
}
}
var err error
if info.IsDir() {
if err = watcher.tree.AddDir(path); err == nil {
if err = watcher.inner.Add(path); err != nil {
errs = append(errs, fmt.Errorf("failed adding watcher to '%s': %w", path, err))
return nil
}
}
} else {
err = watcher.tree.AddFile(path)

if err := watcher.watchFile(walkPath, info); err != nil {
errs = append(errs, fmt.Errorf("failed adding watcher to '%s': %w", walkPath, err))
}
return err

return nil
})
watcher.log.Debugw("Added recursive watch", "path", path)

6 changes: 3 additions & 3 deletions testing/environments/snapshot.yml
Original file line number Diff line number Diff line change
@@ -3,7 +3,7 @@
version: '2.3'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.15.0-f50da2d9-SNAPSHOT
image: docker.elastic.co/elasticsearch/elasticsearch:8.15.0-eb13af64-SNAPSHOT
# When extend is used it merges healthcheck.tests, see:
# https://github.com/docker/compose/issues/8962
# healthcheck:
@@ -31,7 +31,7 @@ services:
- "./docker/elasticsearch/users_roles:/usr/share/elasticsearch/config/users_roles"

logstash:
image: docker.elastic.co/logstash/logstash:8.15.0-f50da2d9-SNAPSHOT
image: docker.elastic.co/logstash/logstash:8.15.0-eb13af64-SNAPSHOT
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9600/_node/stats"]
retries: 600
@@ -44,7 +44,7 @@ services:
- 5055:5055

kibana:
image: docker.elastic.co/kibana/kibana:8.15.0-f50da2d9-SNAPSHOT
image: docker.elastic.co/kibana/kibana:8.15.0-eb13af64-SNAPSHOT
environment:
- "ELASTICSEARCH_USERNAME=kibana_system_user"
- "ELASTICSEARCH_PASSWORD=testing"