Skip to content

Commit

Permalink
Merge branch 'master' into gardenlintpacket
Browse files Browse the repository at this point in the history
  • Loading branch information
efd6 committed Feb 22, 2022
2 parents 4cb3c99 + 128cbf4 commit 45526e7
Show file tree
Hide file tree
Showing 118 changed files with 1,030 additions and 516 deletions.
26 changes: 26 additions & 0 deletions CHANGELOG.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,32 @@
:issue: https://github.com/elastic/beats/issues/
:pull: https://github.com/elastic/beats/pull/

[[release-notes-8.0.1]]
=== Beats version 8.0.1
https://github.com/elastic/beats/compare/v8.0.0...v8.0.1[View commits]

==== Bugfixes

*Filebeat*

- tcp/unix input: Stop accepting connections after socket is closed. {pull}29712[29712]
- Fix using log_group_name_prefix in aws-cloudwatch input. {pull}29695[29695]
- Fix multiple instances of the same module configured within `filebeat.modules` in filebeat.yml. {issue}29649[29649] {pull}29952[29952]
- aws-s3: fix race condition in states used by s3-poller. {issue}30123[30123] {pull}30131[30131]

*Filebeat*
- Fix broken Kafka input {issue}29746[29746] {pull}30277[30277]
- cisco module: Fix change the broke ASA and FTD configs that used `var.input: syslog`. {pull}30072[30072]
- aws-s3: fix race condition in states used by s3-poller. {issue}30123[30123] {pull}30131[30131]

*Heartbeat*
- Fix missing mapping for `service.name`. {pull}30324[30324]

*Winlogbeat*

- Fix run loop when reading from evtx file {pull}30006[30006]


[[release-notes-8.0.0]]
=== Beats version 8.0.0
https://github.com/elastic/beats/compare/v7.17.0...v8.0.0[View commits]
Expand Down
14 changes: 5 additions & 9 deletions CHANGELOG.next.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -41,36 +41,32 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...main[Check the HEAD dif
- Fix field names with `add_network_direction` processor. {issue}29747[29747] {pull}29751[29751]
- Fix a logging bug when `ssl.verification_mode` was set to `full` or `certificate`, the command `test output` incorrectly logged that TLS was disabled.
- Fix the ability for subcommands to be ran properly from the beats containers. {pull}30452[30452]
- Update docker/distribution dependency library to fix a security issues concerning OCI Manifest Type Confusion Issue. {pull}30462[30462]

*Auditbeat*

- auditd: Add error.message to events when processing fails. {pull}30009[30009]

*Filebeat*

- tcp/unix input: Stop accepting connections after socket is closed. {pull}29712[29712]
- Fix using log_group_name_prefix in aws-cloudwatch input. {pull}29695[29695]
- Fix multiple instances of the same module configured within `filebeat.modules` in filebeat.yml. {issue}29649[29649] {pull}29952[29952]
- aws-s3: fix race condition in states used by s3-poller. {issue}30123[30123] {pull}30131[30131]
- Fix broken Kafka input {issue}29746[29746] {pull}30277[30277]
- Report the starting offset of the line in `log.offset` when using `filestream` instead of the end to be ECS compliant. {pull}30445[30445]
- auditd: Prevent mapping explosion when truncated EXECVE records are ingested. {pull}30382[30382]
- elasticsearch: fix duplicate ingest when using a common appender configuration {issue}30428[30428] {pull}30440[30440]

*Heartbeat*
- Fix missing mapping for `service.name`. {pull}30324[30324]

*Metricbeat*

- Enhance metricbeat on openshift documentation {pull}30054[30054]
- Fixed missing ZooKeeper metrics due compatibility issues with versions >= 3.6.0 {pull}30068[30068]
- Fix Docker module: rename fields on dashboards. {pull}30500[30500]

*Packetbeat*


*Winlogbeat*

- Add provider names to Security pipeline conditional check in routing pipeline. {issue}27288[27288] {pull}29781[29781]
- Fix run loop when reading from evtx file {pull}30006[30006]

*Functionbeat*

Expand Down Expand Up @@ -135,10 +131,11 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...main[Check the HEAD dif
- Add gcp firestore metricset. {pull}29918[29918]
- Remove strict parsing on RabbitMQ module {pull}30090[30090]
- Add `kubernetes.container.status.last.reason` metric {pull}30306[30306]
- Extend documentation about `orchestrator.cluster` fields {pull}30518[30518]

*Packetbeat*

- Add automated OEM Npcap installation handling. {pull}29112[29112] {pull}30438[30438]
- Add automated OEM Npcap installation handling. {pull}29112[29112] {pull}30438[30438] {pull}30493[30493]
- Add support for capturing TLS random number and OCSP status request details. {issue}29962[29962] {pull}30102[30102]

*Functionbeat*
Expand Down Expand Up @@ -171,4 +168,3 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...main[Check the HEAD dif

==== Known Issue

*Journalbeat*
4 changes: 2 additions & 2 deletions NOTICE.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23825,11 +23825,11 @@ SOFTWARE.

--------------------------------------------------------------------------------
Dependency : github.com/docker/distribution
Version: v2.7.1+incompatible
Version: v2.8.0+incompatible
Licence type (autodetected): Apache-2.0
--------------------------------------------------------------------------------

Contents of probable licence file $GOMODCACHE/github.com/docker/distribution@v2.7.1+incompatible/LICENSE:
Contents of probable licence file $GOMODCACHE/github.com/docker/distribution@v2.8.0+incompatible/LICENSE:

Apache License
Version 2.0, January 2004
Expand Down
2 changes: 1 addition & 1 deletion auditbeat/cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ var withECSVersion = processing.WithFields(common.MapStr{

// AuditbeatSettings contains the default settings for auditbeat
func AuditbeatSettings() instance.Settings {
var runFlags = pflag.NewFlagSet(Name, pflag.ExitOnError)
runFlags := pflag.NewFlagSet(Name, pflag.ExitOnError)
return instance.Settings{
RunFlags: runFlags,
Name: Name,
Expand Down
2 changes: 1 addition & 1 deletion auditbeat/datastore/datastore.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ func OpenBucket(name string) (Bucket, error) {
initDatastoreOnce.Do(func() {
ds = &boltDatastore{
path: paths.Resolve(paths.Data, "beat.db"),
mode: 0600,
mode: 0o600,
}
})

Expand Down
19 changes: 9 additions & 10 deletions auditbeat/helper/hasher/hasher.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ import (
"github.com/cespare/xxhash/v2"
"github.com/dustin/go-humanize"
"github.com/joeshaw/multierror"
"github.com/pkg/errors"
"golang.org/x/crypto/blake2b"
"golang.org/x/crypto/sha3"
"golang.org/x/time/rate"
Expand Down Expand Up @@ -143,22 +142,22 @@ func (c *Config) Validate() error {

for _, ht := range c.HashTypes {
if !ht.IsValid() {
errs = append(errs, errors.Errorf("invalid hash_types value '%v'", ht))
errs = append(errs, fmt.Errorf("invalid hash_types value '%v'", ht))
}
}

var err error

c.MaxFileSizeBytes, err = humanize.ParseBytes(c.MaxFileSize)
if err != nil {
errs = append(errs, errors.Wrap(err, "invalid max_file_size value"))
errs = append(errs, fmt.Errorf("invalid max_file_size value: %w", err))
} else if c.MaxFileSizeBytes <= 0 {
errs = append(errs, errors.Errorf("max_file_size value (%v) must be positive", c.MaxFileSize))
errs = append(errs, fmt.Errorf("max_file_size value (%v) must be positive", c.MaxFileSize))
}

c.ScanRateBytesPerSec, err = humanize.ParseBytes(c.ScanRatePerSec)
if err != nil {
errs = append(errs, errors.Wrap(err, "invalid scan_rate_per_sec value"))
errs = append(errs, fmt.Errorf("invalid scan_rate_per_sec value: %w", err))
}

return errs.Err()
Expand Down Expand Up @@ -189,22 +188,22 @@ func NewFileHasher(c Config, done <-chan struct{}) (*FileHasher, error) {
func (hasher *FileHasher) HashFile(path string) (map[HashType]Digest, error) {
info, err := os.Stat(path)
if err != nil {
return nil, errors.Wrapf(err, "failed to stat file %v", path)
return nil, fmt.Errorf("failed to stat file %v: %w", path, err)
}

// Throttle reading and hashing rate.
if len(hasher.config.HashTypes) > 0 {
err = hasher.throttle(info.Size())
if err != nil {
return nil, errors.Wrapf(err, "failed to hash file %v", path)
return nil, fmt.Errorf("failed to hash file %v: %w", path, err)
}
}

var hashes []hash.Hash
for _, hashType := range hasher.config.HashTypes {
h, valid := validHashes[hashType]
if !valid {
return nil, errors.Errorf("unknown hash type '%v'", hashType)
return nil, fmt.Errorf("unknown hash type '%v'", hashType)
}

hashes = append(hashes, h())
Expand All @@ -213,13 +212,13 @@ func (hasher *FileHasher) HashFile(path string) (map[HashType]Digest, error) {
if len(hashes) > 0 {
f, err := file.ReadOpen(path)
if err != nil {
return nil, errors.Wrap(err, "failed to open file for hashing")
return nil, fmt.Errorf("failed to open file for hashing: %w", err)
}
defer f.Close()

hashWriter := multiWriter(hashes)
if _, err := io.Copy(hashWriter, f); err != nil {
return nil, errors.Wrap(err, "failed to calculate file hashes")
return nil, fmt.Errorf("failed to calculate file hashes: %w", err)
}

nameToHash := make(map[HashType]Digest, len(hashes))
Expand Down
8 changes: 4 additions & 4 deletions auditbeat/helper/hasher/hasher_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,12 @@
package hasher

import (
"errors"
"io/ioutil"
"os"
"path/filepath"
"testing"

"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
)

Expand All @@ -35,7 +35,7 @@ func TestHasher(t *testing.T) {
defer os.RemoveAll(dir)

file := filepath.Join(dir, "exe")
if err = ioutil.WriteFile(file, []byte("test exe\n"), 0600); err != nil {
if err = ioutil.WriteFile(file, []byte("test exe\n"), 0o600); err != nil {
t.Fatal(err)
}

Expand Down Expand Up @@ -69,7 +69,7 @@ func TestHasherLimits(t *testing.T) {
defer os.RemoveAll(dir)

file := filepath.Join(dir, "exe")
if err = ioutil.WriteFile(file, []byte("test exe\n"), 0600); err != nil {
if err = ioutil.WriteFile(file, []byte("test exe\n"), 0o600); err != nil {
t.Fatal(err)
}

Expand All @@ -88,5 +88,5 @@ func TestHasherLimits(t *testing.T) {
hashes, err := hasher.HashFile(file)
assert.Empty(t, hashes)
assert.Error(t, err)
assert.IsType(t, FileTooLargeError{}, errors.Cause(err))
assert.True(t, errors.As(err, &FileTooLargeError{}))
}
Loading

0 comments on commit 45526e7

Please sign in to comment.