Skip to content

Commit

Permalink
Add benchmark for filestream input
Browse files Browse the repository at this point in the history
Now we can quickly compare performance metrics when we make changes to the
filestream implementation without running the whole Filebeat.
  • Loading branch information
rdner committed Dec 6, 2023
1 parent bffffe5 commit 0c78985
Showing 1 changed file with 173 additions and 0 deletions.
173 changes: 173 additions & 0 deletions filebeat/input/filestream/input_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
// Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

package filestream

import (
"context"
"fmt"
"os"
"testing"
"time"

Check failure on line 26 in filebeat/input/filestream/input_test.go

View workflow job for this annotation

GitHub Actions / lint (windows)

File is not `goimports`-ed with -local github.com/elastic (goimports)

Check failure on line 26 in filebeat/input/filestream/input_test.go

View workflow job for this annotation

GitHub Actions / lint (linux)

File is not `goimports`-ed with -local github.com/elastic (goimports)
loginp "github.com/elastic/beats/v7/filebeat/input/filestream/internal/input-logfile"
v2 "github.com/elastic/beats/v7/filebeat/input/v2"
"github.com/elastic/beats/v7/libbeat/beat"
"github.com/elastic/beats/v7/libbeat/statestore"
"github.com/elastic/beats/v7/libbeat/statestore/storetest"
conf "github.com/elastic/elastic-agent-libs/config"
"github.com/elastic/elastic-agent-libs/logp"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

// runFilestreamBenchmark runs the entire filestream input with the in-memory registry and the test pipeline.
// `testID` must be unique for each test run
// `cfg` must be a valid YAML string containing valid filestream configuration
// `expEventCount` is an expected amount of produced events
func runFilestreamBenchmark(b *testing.B, testID string, cfg string, expEventCount int) {
logger := logp.L()
c, err := conf.NewConfigWithYAML([]byte(cfg), cfg)
require.NoError(b, err)

p := Plugin(logger, createTestStore(b))
input, err := p.Manager.Create(c)
require.NoError(b, err)

ctx, cancel := context.WithCancel(context.Background())
context := v2.Context{
Logger: logger,
ID: testID,
Cancelation: ctx,
}

var out []beat.Event
connector := newTestPipeline(&out)
done := make(chan struct{})
go func() {
err := input.Run(context, connector)
assert.NoError(b, err)
done <- struct{}{}
}()

require.Eventually(b, func() bool {
return len(out) == expEventCount
}, 30*time.Second, 10*time.Millisecond)

cancel()
<-done // for more stable results we should wait until the full shutdown
}

func generateFile(b *testing.B, lineCount int) string {
b.Helper()
dir := b.TempDir()
file, err := os.CreateTemp(dir, "lines.log")
require.NoError(b, err)

for i := 0; i < lineCount; i++ {
fmt.Fprintf(file, "rather mediocre log line message - %d\n", i)
}
filename := file.Name()
err = file.Close()
require.NoError(b, err)
return filename
}

func BenchmarkFilestream(b *testing.B) {
logp.TestingSetup(logp.ToDiscardOutput())
lineCount := 1000
filename := generateFile(b, lineCount)

b.Run("filestream default throughput", func(b *testing.B) {
cfg := `
type: filestream
prospector.scanner.check_interval: 1s
paths:
- ` + filename + `
`
for i := 0; i < b.N; i++ {
runFilestreamBenchmark(b, fmt.Sprintf("default-benchmark-%d", i), cfg, lineCount)
}
})

b.Run("filestream fingerprint throughput", func(b *testing.B) {
cfg := `
type: filestream
prospector.scanner:
fingerprint.enabled: true
check_interval: 1s
file_identity.fingerprint: ~
paths:
- ` + filename + `
`
for i := 0; i < b.N; i++ {
runFilestreamBenchmark(b, fmt.Sprintf("fp-benchmark-%d", i), cfg, lineCount)
}
})
}

func createTestStore(t *testing.B) loginp.StateStore {
return &testStore{registry: statestore.NewRegistry(storetest.NewMemoryStoreBackend())}
}

type testStore struct {
registry *statestore.Registry
}

func (s *testStore) Close() {
s.registry.Close()
}

func (s *testStore) Access() (*statestore.Store, error) {
return s.registry.Get("filestream-benchmark")
}

func (s *testStore) CleanupInterval() time.Duration {
return time.Second
}

func newTestPipeline(out *[]beat.Event) beat.PipelineConnector {
return &testPipeline{out: out}
}

type testPipeline struct {
out *[]beat.Event
}

func (p *testPipeline) ConnectWith(beat.ClientConfig) (beat.Client, error) {
return p.Connect()
}
func (p *testPipeline) Connect() (beat.Client, error) {
return &testClient{out: p.out}, nil
}

type testClient struct {
out *[]beat.Event
}

func (c *testClient) Publish(event beat.Event) {
*c.out = append(*c.out, event)
}

func (c *testClient) PublishAll(events []beat.Event) {
for _, e := range events {
c.Publish(e)
}
}
func (c *testClient) Close() error {
return nil
}

0 comments on commit 0c78985

Please sign in to comment.