From b858f59dc0be689f9929cd8eaefe8d99fb0a6aaa Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Thu, 20 Oct 2022 11:26:28 -0600 Subject: [PATCH 01/22] Add new Context-specific configuration options --- processor/transformprocessor/config.go | 69 +++- processor/transformprocessor/config_test.go | 76 +++- processor/transformprocessor/factory.go | 11 +- processor/transformprocessor/factory_test.go | 5 + .../internal/common/config.go | 28 ++ .../internal/common/functions.go | 10 + .../internal/common/processor.go | 330 ++++++++++++++++++ .../internal/logs/processor.go | 44 ++- .../internal/logs/processor_test.go | 166 ++++++++- .../internal/metrics/processor.go | 62 ++-- .../internal/metrics/processor_test.go | 176 +++++++++- .../internal/traces/processor.go | 45 ++- .../internal/traces/processor_test.go | 170 ++++++++- .../transformprocessor/testdata/config.yaml | 95 +++-- 14 files changed, 1192 insertions(+), 95 deletions(-) create mode 100644 processor/transformprocessor/internal/common/config.go create mode 100644 processor/transformprocessor/internal/common/processor.go diff --git a/processor/transformprocessor/config.go b/processor/transformprocessor/config.go index a53bd772cb6f..e32f018e950f 100644 --- a/processor/transformprocessor/config.go +++ b/processor/transformprocessor/config.go @@ -15,11 +15,15 @@ package transformprocessor // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor" import ( + "fmt" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/config" "go.uber.org/multierr" "go.uber.org/zap" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottldatapoints" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottltraces" @@ -31,6 +35,11 @@ import ( type Config struct { config.ProcessorSettings `mapstructure:",squash"` + TraceStatements []common.ContextStatements `mapstructure:"trace_statements"` + MetricStatements []common.ContextStatements `mapstructure:"metric_statements"` + LogStatements []common.ContextStatements `mapstructure:"log_statements"` + + // Deprecated. Use TraceStatements, MetricStatements, and LogStatements instead OTTLConfig `mapstructure:",squash"` } @@ -47,24 +56,60 @@ type SignalConfig struct { var _ config.Processor = (*Config)(nil) func (c *Config) Validate() error { + if (len(c.Traces.Statements) > 0 || len(c.Metrics.Statements) > 0 || len(c.Logs.Statements) > 0) && + (len(c.TraceStatements) > 0 || len(c.MetricStatements) > 0 || len(c.LogStatements) > 0) { + return fmt.Errorf("cannot use Traces, Metrics and/or Logs with TraceStatements, MetricStatements and/or LogStatements") + } + var errors error - ottltracesp := ottltraces.NewParser(traces.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) - _, err := ottltracesp.ParseStatements(c.Traces.Statements) - if err != nil { - errors = multierr.Append(errors, err) + if len(c.Traces.Statements) > 0 { + ottltracesp := ottltraces.NewParser(traces.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) + _, err := ottltracesp.ParseStatements(c.Traces.Statements) + if err != nil { + errors = multierr.Append(errors, err) + } + } + + if len(c.TraceStatements) > 0 { + pc := common.NewTracesParserCollection(traces.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) + _, err := pc.ParseContextStatements(c.TraceStatements) + if err != nil { + errors = multierr.Append(errors, err) + } + } + + if len(c.Metrics.Statements) > 0 { + ottlmetricsp := ottldatapoints.NewParser(metrics.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) + _, err := ottlmetricsp.ParseStatements(c.Metrics.Statements) + if err != nil { + errors = multierr.Append(errors, err) + } } - ottlmetricsp := ottldatapoints.NewParser(metrics.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) - _, err = ottlmetricsp.ParseStatements(c.Metrics.Statements) - if err != nil { - errors = multierr.Append(errors, err) + if len(c.MetricStatements) > 0 { + pc := common.NewMetricsParserCollection(metrics.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) + _, err := pc.ParseContextStatements(c.MetricStatements) + if err != nil { + errors = multierr.Append(errors, err) + } } - ottllogsp := ottllogs.NewParser(logs.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) - _, err = ottllogsp.ParseStatements(c.Logs.Statements) - if err != nil { - errors = multierr.Append(errors, err) + if len(c.Logs.Statements) > 0 { + ottllogsp := ottllogs.NewParser(logs.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) + _, err := ottllogsp.ParseStatements(c.Logs.Statements) + if err != nil { + errors = multierr.Append(errors, err) + } } + + if len(c.LogStatements) > 0 { + pc := common.NewLogsParserCollection(logs.Functions(), component.TelemetrySettings{Logger: zap.NewNop()}) + _, err := pc.ParseContextStatements(c.LogStatements) + if err != nil { + errors = multierr.Append(errors, err) + } + } + return errors } diff --git a/processor/transformprocessor/config_test.go b/processor/transformprocessor/config_test.go index 410cb5a41c1f..47f4aaa60395 100644 --- a/processor/transformprocessor/config_test.go +++ b/processor/transformprocessor/config_test.go @@ -18,6 +18,8 @@ import ( "path/filepath" "testing" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/config" @@ -34,6 +36,68 @@ func TestLoadConfig(t *testing.T) { }{ { id: config.NewComponentIDWithName(typeStr, ""), + expected: &Config{ + ProcessorSettings: config.NewProcessorSettings(config.NewComponentID(typeStr)), + OTTLConfig: OTTLConfig{ + Traces: SignalConfig{ + Statements: []string{}, + }, + Metrics: SignalConfig{ + Statements: []string{}, + }, + Logs: SignalConfig{ + Statements: []string{}, + }, + }, + TraceStatements: []common.ContextStatements{ + { + Context: "trace", + Statements: []string{ + `set(name, "bear") where attributes["http.path"] == "/animal"`, + `keep_keys(attributes, ["http.method", "http.path"])`, + }, + }, + { + Context: "resource", + Statements: []string{ + `set(attributes["name"], "bear")`, + }, + }, + }, + MetricStatements: []common.ContextStatements{ + { + Context: "datapoint", + Statements: []string{ + `set(metric.name, "bear") where attributes["http.path"] == "/animal"`, + `keep_keys(attributes, ["http.method", "http.path"])`, + }, + }, + { + Context: "resource", + Statements: []string{ + `set(attributes["name"], "bear")`, + }, + }, + }, + LogStatements: []common.ContextStatements{ + { + Context: "log", + Statements: []string{ + `set(body, "bear") where attributes["http.path"] == "/animal"`, + `keep_keys(attributes, ["http.method", "http.path"])`, + }, + }, + { + Context: "resource", + Statements: []string{ + `set(attributes["name"], "bear")`, + }, + }, + }, + }, + }, + { + id: config.NewComponentIDWithName(typeStr, "deprecated_format"), expected: &Config{ ProcessorSettings: config.NewProcessorSettings(config.NewComponentID(typeStr)), OTTLConfig: OTTLConfig{ @@ -56,8 +120,15 @@ func TestLoadConfig(t *testing.T) { }, }, }, + TraceStatements: []common.ContextStatements{}, + MetricStatements: []common.ContextStatements{}, + LogStatements: []common.ContextStatements{}, }, }, + { + id: config.NewComponentIDWithName(typeStr, "using_both_formats"), + errorMessage: "cannot use Traces, Metrics and/or Logs with TraceStatements, MetricStatements and/or LogStatements", + }, { id: config.NewComponentIDWithName(typeStr, "bad_syntax_trace"), errorMessage: "1:18: unexpected token \"where\" (expected \")\")", @@ -66,7 +137,6 @@ func TestLoadConfig(t *testing.T) { id: config.NewComponentIDWithName(typeStr, "unknown_function_trace"), errorMessage: "undefined function not_a_function", }, - { id: config.NewComponentIDWithName(typeStr, "bad_syntax_metric"), errorMessage: "1:18: unexpected token \"where\" (expected \")\")", @@ -83,6 +153,10 @@ func TestLoadConfig(t *testing.T) { id: config.NewComponentIDWithName(typeStr, "unknown_function_log"), errorMessage: "undefined function not_a_function", }, + { + id: config.NewComponentIDWithName(typeStr, "unknown_context"), + errorMessage: "context, test, is not a valid context", + }, } for _, tt := range tests { t.Run(tt.id.String(), func(t *testing.T) { diff --git a/processor/transformprocessor/factory.go b/processor/transformprocessor/factory.go index 0f43f9cd590f..fefe3fce194c 100644 --- a/processor/transformprocessor/factory.go +++ b/processor/transformprocessor/factory.go @@ -18,6 +18,8 @@ import ( "context" "fmt" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/config" "go.opentelemetry.io/collector/consumer" @@ -59,6 +61,9 @@ func createDefaultConfig() config.Processor { Statements: []string{}, }, }, + TraceStatements: []common.ContextStatements{}, + MetricStatements: []common.ContextStatements{}, + LogStatements: []common.ContextStatements{}, } } @@ -70,7 +75,7 @@ func createLogsProcessor( ) (component.LogsProcessor, error) { oCfg := cfg.(*Config) - proc, err := logs.NewProcessor(oCfg.Logs.Statements, logs.Functions(), set.TelemetrySettings) + proc, err := logs.NewProcessor(oCfg.Logs.Statements, oCfg.LogStatements, set.TelemetrySettings) if err != nil { return nil, fmt.Errorf("invalid config for \"transform\" processor %w", err) } @@ -91,7 +96,7 @@ func createTracesProcessor( ) (component.TracesProcessor, error) { oCfg := cfg.(*Config) - proc, err := traces.NewProcessor(oCfg.Traces.Statements, traces.Functions(), set.TelemetrySettings) + proc, err := traces.NewProcessor(oCfg.Traces.Statements, oCfg.TraceStatements, set.TelemetrySettings) if err != nil { return nil, fmt.Errorf("invalid config for \"transform\" processor %w", err) } @@ -112,7 +117,7 @@ func createMetricsProcessor( ) (component.MetricsProcessor, error) { oCfg := cfg.(*Config) - proc, err := metrics.NewProcessor(oCfg.Metrics.Statements, metrics.Functions(), set.TelemetrySettings) + proc, err := metrics.NewProcessor(oCfg.Metrics.Statements, oCfg.MetricStatements, set.TelemetrySettings) if err != nil { return nil, fmt.Errorf("invalid config for \"transform\" processor %w", err) } diff --git a/processor/transformprocessor/factory_test.go b/processor/transformprocessor/factory_test.go index 1c3319e9d123..adbb18e851d9 100644 --- a/processor/transformprocessor/factory_test.go +++ b/processor/transformprocessor/factory_test.go @@ -18,6 +18,8 @@ import ( "context" "testing" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/config" @@ -49,6 +51,9 @@ func TestFactory_CreateDefaultConfig(t *testing.T) { Statements: []string{}, }, }, + TraceStatements: []common.ContextStatements{}, + MetricStatements: []common.ContextStatements{}, + LogStatements: []common.ContextStatements{}, }) assert.NoError(t, configtest.CheckConfigStruct(cfg)) } diff --git a/processor/transformprocessor/internal/common/config.go b/processor/transformprocessor/internal/common/config.go new file mode 100644 index 000000000000..8106f6512ff1 --- /dev/null +++ b/processor/transformprocessor/internal/common/config.go @@ -0,0 +1,28 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + +const ( + Resource string = "resource" + Scope string = "scope" + Trace string = "trace" + DataPoint string = "datapoint" + Log string = "log" +) + +type ContextStatements struct { + Context string `mapstructure:"context"` + Statements []string `mapstructure:"statements"` +} diff --git a/processor/transformprocessor/internal/common/functions.go b/processor/transformprocessor/internal/common/functions.go index 7b1e370d71d6..710d52faeed0 100644 --- a/processor/transformprocessor/internal/common/functions.go +++ b/processor/transformprocessor/internal/common/functions.go @@ -15,6 +15,8 @@ package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" import ( + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlresource" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlscope" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/ottlfuncs" ) @@ -38,3 +40,11 @@ func Functions[K any]() map[string]interface{} { "delete_matching_keys": ottlfuncs.DeleteMatchingKeys[K], } } + +func ResourceFunctions() map[string]interface{} { + return Functions[ottlresource.TransformContext]() +} + +func ScopeFunctions() map[string]interface{} { + return Functions[ottlscope.TransformContext]() +} diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go new file mode 100644 index 000000000000..9de8fa7dad5f --- /dev/null +++ b/processor/transformprocessor/internal/common/processor.go @@ -0,0 +1,330 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" +import ( + "fmt" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/pdata/pcommon" + "go.opentelemetry.io/collector/pdata/plog" + "go.opentelemetry.io/collector/pdata/pmetric" + "go.opentelemetry.io/collector/pdata/ptrace" + "go.uber.org/multierr" + + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottldatapoints" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlresource" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlscope" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottltraces" +) + +type Context interface { + ProcessTraces(td ptrace.Traces) + ProcessMetrics(td pmetric.Metrics) + ProcessLogs(td plog.Logs) +} + +type ResourceStatements struct { + Statements []*ottl.Statement[ottlresource.TransformContext] +} + +func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + ctx := ottlresource.NewTransformContext(rspans.Resource()) + for _, statement := range r.Statements { + statement.Execute(ctx) + } + } +} + +func (r *ResourceStatements) ProcessMetrics(td pmetric.Metrics) { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + ctx := ottlresource.NewTransformContext(rmetrics.Resource()) + for _, statement := range r.Statements { + statement.Execute(ctx) + } + } +} + +func (r *ResourceStatements) ProcessLogs(td plog.Logs) { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + ctx := ottlresource.NewTransformContext(rlogs.Resource()) + for _, statement := range r.Statements { + statement.Execute(ctx) + } + } +} + +type ScopeStatements struct { + Statements []*ottl.Statement[ottlscope.TransformContext] +} + +func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + for j := 0; j < rspans.ScopeSpans().Len(); j++ { + sspans := rspans.ScopeSpans().At(j) + ctx := ottlscope.NewTransformContext(sspans.Scope(), rspans.Resource()) + for _, statement := range s.Statements { + statement.Execute(ctx) + } + } + } +} + +func (s *ScopeStatements) ProcessMetrics(td pmetric.Metrics) { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { + smetrics := rmetrics.ScopeMetrics().At(j) + ctx := ottlscope.NewTransformContext(smetrics.Scope(), rmetrics.Resource()) + for _, statement := range s.Statements { + statement.Execute(ctx) + } + } + } +} + +func (s *ScopeStatements) ProcessLogs(td plog.Logs) { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + for j := 0; j < rlogs.ScopeLogs().Len(); j++ { + slogs := rlogs.ScopeLogs().At(j) + ctx := ottlscope.NewTransformContext(slogs.Scope(), rlogs.Resource()) + for _, statement := range s.Statements { + statement.Execute(ctx) + } + } + } +} + +type TraceStatements struct { + statements []*ottl.Statement[ottltraces.TransformContext] +} + +func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + for j := 0; j < rspans.ScopeSpans().Len(); j++ { + sspans := rspans.ScopeSpans().At(j) + spans := sspans.Spans() + for k := 0; k < spans.Len(); k++ { + ctx := ottltraces.NewTransformContext(spans.At(k), sspans.Scope(), rspans.Resource()) + for _, statement := range t.statements { + statement.Execute(ctx) + } + } + } + } +} + +func (t *TraceStatements) ProcessMetrics(td pmetric.Metrics) {} + +func (t *TraceStatements) ProcessLogs(td plog.Logs) {} + +type LogStatements struct { + statements []*ottl.Statement[ottllogs.TransformContext] +} + +func (l *LogStatements) ProcessTraces(td ptrace.Traces) {} + +func (l *LogStatements) ProcessMetrics(td pmetric.Metrics) {} + +func (l *LogStatements) ProcessLogs(td plog.Logs) { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + for j := 0; j < rlogs.ScopeLogs().Len(); j++ { + slogs := rlogs.ScopeLogs().At(j) + logs := slogs.LogRecords() + for k := 0; k < logs.Len(); k++ { + ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) + for _, statement := range l.statements { + statement.Execute(ctx) + } + } + } + } +} + +type DataPointStatements struct { + statements []*ottl.Statement[ottldatapoints.TransformContext] +} + +func (d *DataPointStatements) ProcessTraces(td ptrace.Traces) {} + +func (d *DataPointStatements) ProcessLogs(td plog.Logs) {} + +func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { + smetrics := rmetrics.ScopeMetrics().At(j) + metrics := smetrics.Metrics() + for k := 0; k < metrics.Len(); k++ { + metric := metrics.At(k) + switch metric.Type() { + case pmetric.MetricTypeSum: + d.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeGauge: + d.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeHistogram: + d.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeExponentialHistogram: + d.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeSummary: + d.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + } + } + } + } +} + +func (d *DataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) { + for _, statement := range d.statements { + statement.Execute(ctx) + } +} + +type ParserCollection struct { + resourceParser ottl.Parser[ottlresource.TransformContext] + scopeParser ottl.Parser[ottlscope.TransformContext] + traceParser ottl.Parser[ottltraces.TransformContext] + dataPointsParser ottl.Parser[ottldatapoints.TransformContext] + logParser ottl.Parser[ottllogs.TransformContext] +} + +func NewTracesParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { + return ParserCollection{ + resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), + scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), + traceParser: ottltraces.NewParser(functions, settings), + dataPointsParser: ottl.Parser[ottldatapoints.TransformContext]{}, + logParser: ottl.Parser[ottllogs.TransformContext]{}, + } +} + +func NewMetricsParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { + return ParserCollection{ + resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), + scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), + traceParser: ottl.Parser[ottltraces.TransformContext]{}, + dataPointsParser: ottldatapoints.NewParser(functions, settings), + logParser: ottl.Parser[ottllogs.TransformContext]{}, + } +} + +func NewLogsParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { + return ParserCollection{ + resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), + scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), + traceParser: ottl.Parser[ottltraces.TransformContext]{}, + dataPointsParser: ottl.Parser[ottldatapoints.TransformContext]{}, + logParser: ottllogs.NewParser(functions, settings), + } +} + +func (pc ParserCollection) ParseContextStatements(contextStatements []ContextStatements) ([]Context, error) { + contexts := make([]Context, len(contextStatements)) + var errors error + + for i, s := range contextStatements { + switch s.Context { + case Resource: + statements, err := pc.resourceParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &ResourceStatements{ + Statements: statements, + } + case Scope: + statements, err := pc.scopeParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &ScopeStatements{ + Statements: statements, + } + case Trace: + statements, err := pc.traceParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &TraceStatements{ + statements: statements, + } + case DataPoint: + statements, err := pc.dataPointsParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &DataPointStatements{ + statements: statements, + } + case Log: + statements, err := pc.logParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &LogStatements{ + statements: statements, + } + default: + errors = multierr.Append(errors, fmt.Errorf("context, %v, is not a valid context", s.Context)) + } + } + + if errors != nil { + return nil, errors + } + return contexts, nil +} diff --git a/processor/transformprocessor/internal/logs/processor.go b/processor/transformprocessor/internal/logs/processor.go index fa3fd22ec06d..608fe513d60a 100644 --- a/processor/transformprocessor/internal/logs/processor.go +++ b/processor/transformprocessor/internal/logs/processor.go @@ -22,36 +22,54 @@ import ( "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) type Processor struct { + contexts []common.Context statements []*ottl.Statement[ottllogs.TransformContext] } -func NewProcessor(statements []string, functions map[string]interface{}, settings component.TelemetrySettings) (*Processor, error) { - ottlp := ottllogs.NewParser(functions, settings) - parsedStatements, err := ottlp.ParseStatements(statements) +func NewProcessor(statements []string, contextStatements []common.ContextStatements, settings component.TelemetrySettings) (*Processor, error) { + if len(statements) > 0 { + ottlp := ottllogs.NewParser(Functions(), settings) + parsedStatements, err := ottlp.ParseStatements(statements) + if err != nil { + return nil, err + } + return &Processor{ + statements: parsedStatements, + }, nil + } + pc := common.NewLogsParserCollection(Functions(), settings) + contexts, err := pc.ParseContextStatements(contextStatements) if err != nil { return nil, err } return &Processor{ - statements: parsedStatements, + contexts: contexts, }, nil } func (p *Processor) ProcessLogs(_ context.Context, td plog.Logs) (plog.Logs, error) { - for i := 0; i < td.ResourceLogs().Len(); i++ { - rlogs := td.ResourceLogs().At(i) - for j := 0; j < rlogs.ScopeLogs().Len(); j++ { - slogs := rlogs.ScopeLogs().At(j) - logs := slogs.LogRecords() - for k := 0; k < logs.Len(); k++ { - ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) - for _, statement := range p.statements { - statement.Execute(ctx) + if len(p.statements) > 0 { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + for j := 0; j < rlogs.ScopeLogs().Len(); j++ { + slogs := rlogs.ScopeLogs().At(j) + logs := slogs.LogRecords() + for k := 0; k < logs.Len(); k++ { + ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) + for _, statement := range p.statements { + statement.Execute(ctx) + } } } } + } else { + for _, contexts := range p.contexts { + contexts.ProcessLogs(td) + } } return td, nil } diff --git a/processor/transformprocessor/internal/logs/processor_test.go b/processor/transformprocessor/internal/logs/processor_test.go index bc14743c5f64..688220af7108 100644 --- a/processor/transformprocessor/internal/logs/processor_test.go +++ b/processor/transformprocessor/internal/logs/processor_test.go @@ -19,6 +19,8 @@ import ( "testing" "time" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pcommon" @@ -36,7 +38,77 @@ var ( spanID = [8]byte{1, 2, 3, 4, 5, 6, 7, 8} ) -func TestProcess(t *testing.T) { +func Test_ProcessLogs_ResourceContext(t *testing.T) { + tests := []struct { + statement string + want func(td plog.Logs) + }{ + { + statement: `set(attributes["test"], "pass")`, + want: func(td plog.Logs) { + td.ResourceLogs().At(0).Resource().Attributes().PutStr("test", "pass") + }, + }, + { + statement: `set(attributes["test"], "pass") where attributes["host.name"] == "wrong"`, + want: func(td plog.Logs) { + }, + }, + } + + for _, tt := range tests { + t.Run(tt.statement, func(t *testing.T) { + td := constructLogs() + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "resource", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessLogs(context.Background(), td) + assert.NoError(t, err) + + exTd := constructLogs() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessLogs_ScopeContext(t *testing.T) { + tests := []struct { + statement string + want func(td plog.Logs) + }{ + { + statement: `set(attributes["test"], "pass") where name == "scope"`, + want: func(td plog.Logs) { + td.ResourceLogs().At(0).ScopeLogs().At(0).Scope().Attributes().PutStr("test", "pass") + }, + }, + { + statement: `set(attributes["test"], "pass") where version == 2`, + want: func(td plog.Logs) { + }, + }, + } + + for _, tt := range tests { + t.Run(tt.statement, func(t *testing.T) { + td := constructLogs() + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "scope", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessLogs(context.Background(), td) + assert.NoError(t, err) + + exTd := constructLogs() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessLogs_LogContext(t *testing.T) { tests := []struct { statement string want func(td plog.Logs) @@ -198,7 +270,96 @@ func TestProcess(t *testing.T) { for _, tt := range tests { t.Run(tt.statement, func(t *testing.T) { td := constructLogs() - processor, err := NewProcessor([]string{tt.statement}, Functions(), componenttest.NewNopTelemetrySettings()) + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "log", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessLogs(context.Background(), td) + assert.NoError(t, err) + + exTd := constructLogs() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessLogs_MixContext(t *testing.T) { + tests := []struct { + name string + contextStatments []common.ContextStatements + want func(td plog.Logs) + }{ + { + name: "set resource and then use", + contextStatments: []common.ContextStatements{ + { + Context: "resource", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "log", + Statements: []string{ + `set(attributes["test"], "pass") where resource.attributes["test"] == "pass"`, + }, + }, + }, + want: func(td plog.Logs) { + td.ResourceLogs().At(0).Resource().Attributes().PutStr("test", "pass") + td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).Attributes().PutStr("test", "pass") + td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(1).Attributes().PutStr("test", "pass") + }, + }, + { + name: "set scope and then use", + contextStatments: []common.ContextStatements{ + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "log", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + }, + want: func(td plog.Logs) { + td.ResourceLogs().At(0).ScopeLogs().At(0).Scope().Attributes().PutStr("test", "pass") + td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).Attributes().PutStr("test", "pass") + td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(1).Attributes().PutStr("test", "pass") + }, + }, + { + name: "order matters", + contextStatments: []common.ContextStatements{ + { + Context: "log", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + }, + want: func(td plog.Logs) { + td.ResourceLogs().At(0).ScopeLogs().At(0).Scope().Attributes().PutStr("test", "pass") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + td := constructLogs() + processor, err := NewProcessor(nil, tt.contextStatments, componenttest.NewNopTelemetrySettings()) assert.NoError(t, err) _, err = processor.ProcessLogs(context.Background(), td) @@ -217,6 +378,7 @@ func constructLogs() plog.Logs { rs0 := td.ResourceLogs().AppendEmpty() rs0.Resource().Attributes().PutStr("host.name", "localhost") rs0ils0 := rs0.ScopeLogs().AppendEmpty() + rs0ils0.Scope().SetName("scope") fillLogOne(rs0ils0.LogRecords().AppendEmpty()) fillLogTwo(rs0ils0.LogRecords().AppendEmpty()) return td diff --git a/processor/transformprocessor/internal/metrics/processor.go b/processor/transformprocessor/internal/metrics/processor.go index cf815032d2be..66f99f997c2d 100644 --- a/processor/transformprocessor/internal/metrics/processor.go +++ b/processor/transformprocessor/internal/metrics/processor.go @@ -23,45 +23,63 @@ import ( "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottldatapoints" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) type Processor struct { + contexts []common.Context statements []*ottl.Statement[ottldatapoints.TransformContext] } -func NewProcessor(statements []string, functions map[string]interface{}, settings component.TelemetrySettings) (*Processor, error) { - ottlp := ottldatapoints.NewParser(functions, settings) - parsedStatements, err := ottlp.ParseStatements(statements) +func NewProcessor(statements []string, contextStatements []common.ContextStatements, settings component.TelemetrySettings) (*Processor, error) { + if len(statements) > 0 { + ottlp := ottldatapoints.NewParser(Functions(), settings) + parsedStatements, err := ottlp.ParseStatements(statements) + if err != nil { + return nil, err + } + return &Processor{ + statements: parsedStatements, + }, nil + } + pc := common.NewMetricsParserCollection(Functions(), settings) + contexts, err := pc.ParseContextStatements(contextStatements) if err != nil { return nil, err } return &Processor{ - statements: parsedStatements, + contexts: contexts, }, nil } func (p *Processor) ProcessMetrics(_ context.Context, td pmetric.Metrics) (pmetric.Metrics, error) { - for i := 0; i < td.ResourceMetrics().Len(); i++ { - rmetrics := td.ResourceMetrics().At(i) - for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { - smetrics := rmetrics.ScopeMetrics().At(j) - metrics := smetrics.Metrics() - for k := 0; k < metrics.Len(); k++ { - metric := metrics.At(k) - switch metric.Type() { - case pmetric.MetricTypeSum: - p.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeGauge: - p.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeHistogram: - p.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeExponentialHistogram: - p.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeSummary: - p.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + if len(p.statements) > 0 { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { + smetrics := rmetrics.ScopeMetrics().At(j) + metrics := smetrics.Metrics() + for k := 0; k < metrics.Len(); k++ { + metric := metrics.At(k) + switch metric.Type() { + case pmetric.MetricTypeSum: + p.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeGauge: + p.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeHistogram: + p.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeExponentialHistogram: + p.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeSummary: + p.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + } } } } + } else { + for _, contexts := range p.contexts { + contexts.ProcessMetrics(td) + } } return td, nil } diff --git a/processor/transformprocessor/internal/metrics/processor_test.go b/processor/transformprocessor/internal/metrics/processor_test.go index e860ee3f0e6b..570d57c86d78 100644 --- a/processor/transformprocessor/internal/metrics/processor_test.go +++ b/processor/transformprocessor/internal/metrics/processor_test.go @@ -19,6 +19,8 @@ import ( "testing" "time" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pcommon" @@ -32,7 +34,77 @@ var ( TestTimeStamp = pcommon.NewTimestampFromTime(StartTime) ) -func TestProcess(t *testing.T) { +func Test_ProcessMetrics_ResourceContext(t *testing.T) { + tests := []struct { + statement string + want func(td pmetric.Metrics) + }{ + { + statement: `set(attributes["test"], "pass")`, + want: func(td pmetric.Metrics) { + td.ResourceMetrics().At(0).Resource().Attributes().PutStr("test", "pass") + }, + }, + { + statement: `set(attributes["test"], "pass") where attributes["host.name"] == "wrong"`, + want: func(td pmetric.Metrics) { + }, + }, + } + + for _, tt := range tests { + t.Run(tt.statement, func(t *testing.T) { + td := constructMetrics() + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "resource", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessMetrics(context.Background(), td) + assert.NoError(t, err) + + exTd := constructMetrics() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessMetrics_ScopeContext(t *testing.T) { + tests := []struct { + statement string + want func(td pmetric.Metrics) + }{ + { + statement: `set(attributes["test"], "pass") where name == "scope"`, + want: func(td pmetric.Metrics) { + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Scope().Attributes().PutStr("test", "pass") + }, + }, + { + statement: `set(attributes["test"], "pass") where version == 2`, + want: func(td pmetric.Metrics) { + }, + }, + } + + for _, tt := range tests { + t.Run(tt.statement, func(t *testing.T) { + td := constructMetrics() + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "scope", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessMetrics(context.Background(), td) + assert.NoError(t, err) + + exTd := constructMetrics() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessMetrics_DataPointContext(t *testing.T) { tests := []struct { statements []string want func(pmetric.Metrics) @@ -371,7 +443,106 @@ func TestProcess(t *testing.T) { for _, tt := range tests { t.Run(tt.statements[0], func(t *testing.T) { td := constructMetrics() - processor, err := NewProcessor(tt.statements, Functions(), componenttest.NewNopTelemetrySettings()) + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "datapoint", Statements: tt.statements}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessMetrics(context.Background(), td) + assert.NoError(t, err) + + exTd := constructMetrics() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessMetrics_MixContext(t *testing.T) { + tests := []struct { + name string + contextStatments []common.ContextStatements + want func(td pmetric.Metrics) + }{ + { + name: "set resource and then use", + contextStatments: []common.ContextStatements{ + { + Context: "resource", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "datapoint", + Statements: []string{ + `set(attributes["test"], "pass") where resource.attributes["test"] == "pass"`, + }, + }, + }, + want: func(td pmetric.Metrics) { + td.ResourceMetrics().At(0).Resource().Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(1).Histogram().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(1).Histogram().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(2).ExponentialHistogram().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(2).ExponentialHistogram().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(3).Summary().DataPoints().At(0).Attributes().PutStr("test", "pass") + }, + }, + { + name: "set scope and then use", + contextStatments: []common.ContextStatements{ + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "datapoint", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + }, + want: func(td pmetric.Metrics) { + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Scope().Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(1).Histogram().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(1).Histogram().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(2).ExponentialHistogram().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(2).ExponentialHistogram().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(3).Summary().DataPoints().At(0).Attributes().PutStr("test", "pass") + }, + }, + { + name: "order matters", + contextStatments: []common.ContextStatements{ + { + Context: "datapoint", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + }, + want: func(td pmetric.Metrics) { + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Scope().Attributes().PutStr("test", "pass") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + td := constructMetrics() + processor, err := NewProcessor(nil, tt.contextStatments, componenttest.NewNopTelemetrySettings()) assert.NoError(t, err) _, err = processor.ProcessMetrics(context.Background(), td) @@ -390,6 +561,7 @@ func constructMetrics() pmetric.Metrics { rm0 := td.ResourceMetrics().AppendEmpty() rm0.Resource().Attributes().PutStr("host.name", "myhost") rm0ils0 := rm0.ScopeMetrics().AppendEmpty() + rm0ils0.Scope().SetName("scope") fillMetricOne(rm0ils0.Metrics().AppendEmpty()) fillMetricTwo(rm0ils0.Metrics().AppendEmpty()) fillMetricThree(rm0ils0.Metrics().AppendEmpty()) diff --git a/processor/transformprocessor/internal/traces/processor.go b/processor/transformprocessor/internal/traces/processor.go index 112f7c9d8731..3317c0692f5c 100644 --- a/processor/transformprocessor/internal/traces/processor.go +++ b/processor/transformprocessor/internal/traces/processor.go @@ -22,36 +22,55 @@ import ( "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottltraces" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) type Processor struct { + contexts []common.Context statements []*ottl.Statement[ottltraces.TransformContext] } -func NewProcessor(statements []string, functions map[string]interface{}, settings component.TelemetrySettings) (*Processor, error) { - ottlp := ottltraces.NewParser(functions, settings) - parsedStatements, err := ottlp.ParseStatements(statements) +func NewProcessor(statements []string, contextStatements []common.ContextStatements, settings component.TelemetrySettings) (*Processor, error) { + if len(statements) > 0 { + ottlp := ottltraces.NewParser(Functions(), settings) + parsedStatements, err := ottlp.ParseStatements(statements) + if err != nil { + return nil, err + } + return &Processor{ + statements: parsedStatements, + }, nil + } + + pc := common.NewTracesParserCollection(Functions(), settings) + contexts, err := pc.ParseContextStatements(contextStatements) if err != nil { return nil, err } return &Processor{ - statements: parsedStatements, + contexts: contexts, }, nil } func (p *Processor) ProcessTraces(_ context.Context, td ptrace.Traces) (ptrace.Traces, error) { - for i := 0; i < td.ResourceSpans().Len(); i++ { - rspans := td.ResourceSpans().At(i) - for j := 0; j < rspans.ScopeSpans().Len(); j++ { - sspan := rspans.ScopeSpans().At(j) - spans := sspan.Spans() - for k := 0; k < spans.Len(); k++ { - ctx := ottltraces.NewTransformContext(spans.At(k), sspan.Scope(), rspans.Resource()) - for _, statement := range p.statements { - statement.Execute(ctx) + if len(p.statements) > 0 { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + for j := 0; j < rspans.ScopeSpans().Len(); j++ { + sspan := rspans.ScopeSpans().At(j) + spans := sspan.Spans() + for k := 0; k < spans.Len(); k++ { + ctx := ottltraces.NewTransformContext(spans.At(k), sspan.Scope(), rspans.Resource()) + for _, statement := range p.statements { + statement.Execute(ctx) + } } } } + } else { + for _, contexts := range p.contexts { + contexts.ProcessTraces(td) + } } return td, nil } diff --git a/processor/transformprocessor/internal/traces/processor_test.go b/processor/transformprocessor/internal/traces/processor_test.go index be14087970b5..b058349871ef 100644 --- a/processor/transformprocessor/internal/traces/processor_test.go +++ b/processor/transformprocessor/internal/traces/processor_test.go @@ -19,6 +19,8 @@ import ( "testing" "time" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pcommon" @@ -37,7 +39,77 @@ var ( spanID2 = [8]byte{8, 7, 6, 5, 4, 3, 2, 1} ) -func TestProcess(t *testing.T) { +func Test_ProcessTraces_ResourceContext(t *testing.T) { + tests := []struct { + statement string + want func(td ptrace.Traces) + }{ + { + statement: `set(attributes["test"], "pass")`, + want: func(td ptrace.Traces) { + td.ResourceSpans().At(0).Resource().Attributes().PutStr("test", "pass") + }, + }, + { + statement: `set(attributes["test"], "pass") where attributes["host.name"] == "wrong"`, + want: func(td ptrace.Traces) { + }, + }, + } + + for _, tt := range tests { + t.Run(tt.statement, func(t *testing.T) { + td := constructTraces() + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "resource", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessTraces(context.Background(), td) + assert.NoError(t, err) + + exTd := constructTraces() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessTraces_ScopeContext(t *testing.T) { + tests := []struct { + statement string + want func(td ptrace.Traces) + }{ + { + statement: `set(attributes["test"], "pass") where name == "scope"`, + want: func(td ptrace.Traces) { + td.ResourceSpans().At(0).ScopeSpans().At(0).Scope().Attributes().PutStr("test", "pass") + }, + }, + { + statement: `set(attributes["test"], "pass") where version == 2`, + want: func(td ptrace.Traces) { + }, + }, + } + + for _, tt := range tests { + t.Run(tt.statement, func(t *testing.T) { + td := constructTraces() + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "scope", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessTraces(context.Background(), td) + assert.NoError(t, err) + + exTd := constructTraces() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessTraces_TraceContext(t *testing.T) { tests := []struct { statement string want func(td ptrace.Traces) @@ -239,7 +311,96 @@ func TestProcess(t *testing.T) { for _, tt := range tests { t.Run(tt.statement, func(t *testing.T) { td := constructTraces() - processor, err := NewProcessor([]string{tt.statement}, Functions(), componenttest.NewNopTelemetrySettings()) + processor, err := NewProcessor(nil, []common.ContextStatements{{Context: "trace", Statements: []string{tt.statement}}}, componenttest.NewNopTelemetrySettings()) + assert.NoError(t, err) + + _, err = processor.ProcessTraces(context.Background(), td) + assert.NoError(t, err) + + exTd := constructTraces() + tt.want(exTd) + + assert.Equal(t, exTd, td) + }) + } +} + +func Test_ProcessTraces_MixContext(t *testing.T) { + tests := []struct { + name string + contextStatments []common.ContextStatements + want func(td ptrace.Traces) + }{ + { + name: "set resource and then use", + contextStatments: []common.ContextStatements{ + { + Context: "resource", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "trace", + Statements: []string{ + `set(attributes["test"], "pass") where resource.attributes["test"] == "pass"`, + }, + }, + }, + want: func(td ptrace.Traces) { + td.ResourceSpans().At(0).Resource().Attributes().PutStr("test", "pass") + td.ResourceSpans().At(0).ScopeSpans().At(0).Spans().At(0).Attributes().PutStr("test", "pass") + td.ResourceSpans().At(0).ScopeSpans().At(0).Spans().At(1).Attributes().PutStr("test", "pass") + }, + }, + { + name: "set scope and then use", + contextStatments: []common.ContextStatements{ + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "trace", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + }, + want: func(td ptrace.Traces) { + td.ResourceSpans().At(0).ScopeSpans().At(0).Scope().Attributes().PutStr("test", "pass") + td.ResourceSpans().At(0).ScopeSpans().At(0).Spans().At(0).Attributes().PutStr("test", "pass") + td.ResourceSpans().At(0).ScopeSpans().At(0).Spans().At(1).Attributes().PutStr("test", "pass") + }, + }, + { + name: "order matters", + contextStatments: []common.ContextStatements{ + { + Context: "trace", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + }, + want: func(td ptrace.Traces) { + td.ResourceSpans().At(0).ScopeSpans().At(0).Scope().Attributes().PutStr("test", "pass") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + td := constructTraces() + processor, err := NewProcessor(nil, tt.contextStatments, componenttest.NewNopTelemetrySettings()) assert.NoError(t, err) _, err = processor.ProcessTraces(context.Background(), td) @@ -289,7 +450,7 @@ func BenchmarkTwoSpans(b *testing.B) { for _, tt := range tests { b.Run(tt.name, func(b *testing.B) { - processor, err := NewProcessor(tt.statements, Functions(), componenttest.NewNopTelemetrySettings()) + processor, err := NewProcessor(tt.statements, nil, componenttest.NewNopTelemetrySettings()) assert.NoError(b, err) b.ResetTimer() for n := 0; n < b.N; n++ { @@ -331,7 +492,7 @@ func BenchmarkHundredSpans(b *testing.B) { } for _, tt := range tests { b.Run(tt.name, func(b *testing.B) { - processor, err := NewProcessor(tt.statements, Functions(), componenttest.NewNopTelemetrySettings()) + processor, err := NewProcessor(tt.statements, nil, componenttest.NewNopTelemetrySettings()) assert.NoError(b, err) b.ResetTimer() for n := 0; n < b.N; n++ { @@ -348,6 +509,7 @@ func constructTraces() ptrace.Traces { rs0 := td.ResourceSpans().AppendEmpty() rs0.Resource().Attributes().PutStr("host.name", "localhost") rs0ils0 := rs0.ScopeSpans().AppendEmpty() + rs0ils0.Scope().SetName("scope") fillSpanOne(rs0ils0.Spans().AppendEmpty()) fillSpanTwo(rs0ils0.Spans().AppendEmpty()) return td diff --git a/processor/transformprocessor/testdata/config.yaml b/processor/transformprocessor/testdata/config.yaml index f7d68f80a070..53db5f752e7f 100644 --- a/processor/transformprocessor/testdata/config.yaml +++ b/processor/transformprocessor/testdata/config.yaml @@ -1,4 +1,30 @@ transform: + trace_statements: + - context: trace + statements: + - set(name, "bear") where attributes["http.path"] == "/animal" + - keep_keys(attributes, ["http.method", "http.path"]) + - context: resource + statements: + - set(attributes["name"], "bear") + metric_statements: + - context: datapoint + statements: + - set(metric.name, "bear") where attributes["http.path"] == "/animal" + - keep_keys(attributes, ["http.method", "http.path"]) + - context: resource + statements: + - set(attributes["name"], "bear") + log_statements: + - context: log + statements: + - set(body, "bear") where attributes["http.path"] == "/animal" + - keep_keys(attributes, ["http.method", "http.path"]) + - context: resource + statements: + - set(attributes["name"], "bear") + +transform/deprecated_format: traces: statements: - set(name, "bear") where attributes["http.path"] == "/animal" @@ -12,38 +38,61 @@ transform: - set(body, "bear") where attributes["http.path"] == "/animal" - keep_keys(attributes, ["http.method", "http.path"]) -transform/bad_syntax_log: - logs: +transform/using_both_formats: + trace_statements: + - context: trace + statements: + - set(name, "bear") where attributes["http.path"] == "/animal" + - keep_keys(attributes, ["http.method", "http.path"]) + traces: statements: - - set(body, "bear" where attributes["http.path"] == "/animal" + - set(name, "bear") where attributes["http.path"] == "/animal" - keep_keys(attributes, ["http.method", "http.path"]) +transform/bad_syntax_log: + log_statements: + - context: log + statements: + - set(body, "bear" where attributes["http.path"] == "/animal" + - keep_keys(attributes, ["http.method", "http.path"]) + transform/bad_syntax_metric: - metrics: - statements: - - set(name, "bear" where attributes["http.path"] == "/animal" - - keep_keys(attributes, ["http.method", "http.path"]) + metric_statements: + - context: datapoint + statements: + - set(name, "bear" where attributes["http.path"] == "/animal" + - keep_keys(attributes, ["http.method", "http.path"]) transform/bad_syntax_trace: - traces: - statements: - - set(name, "bear" where attributes["http.path"] == "/animal" - - keep_keys(attributes, ["http.method", "http.path"]) + trace_statements: + - context: trace + statements: + - set(name, "bear" where attributes["http.path"] == "/animal" + - keep_keys(attributes, ["http.method", "http.path"]) transform/unknown_function_log: - logs: - statements: - - set(body, "bear") where attributes["http.path"] == "/animal" - - not_a_function(attributes, ["http.method", "http.path"]) + log_statements: + - context: log + statements: + - set(body, "bear") where attributes["http.path"] == "/animal" + - not_a_function(attributes, ["http.method", "http.path"]) transform/unknown_function_metric: - metrics: - statements: - - set(metric.name, "bear") where attributes["http.path"] == "/animal" - - not_a_function(attributes, ["http.method", "http.path"]) + metric_statements: + - context: datapoint + statements: + - set(metric.name, "bear") where attributes["http.path"] == "/animal" + - not_a_function(attributes, ["http.method", "http.path"]) transform/unknown_function_trace: - traces: - statements: - - set(name, "bear") where attributes["http.path"] == "/animal" - - not_a_function(attributes, ["http.method", "http.path"]) + trace_statements: + - context: trace + statements: + - set(name, "bear") where attributes["http.path"] == "/animal" + - not_a_function(attributes, ["http.method", "http.path"]) + +transform/unknown_context: + trace_statements: + - context: test + statements: + - set(name, "bear") where attributes["http.path"] == "/animal" \ No newline at end of file From 07457a88ca89e3f92870403d9d45385343a542f2 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Fri, 21 Oct 2022 15:48:11 -0600 Subject: [PATCH 02/22] Add ContextStatements config --- .../internal/common/config.go | 28 ++ .../internal/common/functions.go | 10 + .../internal/common/processor.go | 330 ++++++++++++++++++ 3 files changed, 368 insertions(+) create mode 100644 processor/transformprocessor/internal/common/config.go create mode 100644 processor/transformprocessor/internal/common/processor.go diff --git a/processor/transformprocessor/internal/common/config.go b/processor/transformprocessor/internal/common/config.go new file mode 100644 index 000000000000..8106f6512ff1 --- /dev/null +++ b/processor/transformprocessor/internal/common/config.go @@ -0,0 +1,28 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + +const ( + Resource string = "resource" + Scope string = "scope" + Trace string = "trace" + DataPoint string = "datapoint" + Log string = "log" +) + +type ContextStatements struct { + Context string `mapstructure:"context"` + Statements []string `mapstructure:"statements"` +} diff --git a/processor/transformprocessor/internal/common/functions.go b/processor/transformprocessor/internal/common/functions.go index 7b1e370d71d6..710d52faeed0 100644 --- a/processor/transformprocessor/internal/common/functions.go +++ b/processor/transformprocessor/internal/common/functions.go @@ -15,6 +15,8 @@ package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" import ( + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlresource" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlscope" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/ottlfuncs" ) @@ -38,3 +40,11 @@ func Functions[K any]() map[string]interface{} { "delete_matching_keys": ottlfuncs.DeleteMatchingKeys[K], } } + +func ResourceFunctions() map[string]interface{} { + return Functions[ottlresource.TransformContext]() +} + +func ScopeFunctions() map[string]interface{} { + return Functions[ottlscope.TransformContext]() +} diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go new file mode 100644 index 000000000000..9de8fa7dad5f --- /dev/null +++ b/processor/transformprocessor/internal/common/processor.go @@ -0,0 +1,330 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" +import ( + "fmt" + + "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/pdata/pcommon" + "go.opentelemetry.io/collector/pdata/plog" + "go.opentelemetry.io/collector/pdata/pmetric" + "go.opentelemetry.io/collector/pdata/ptrace" + "go.uber.org/multierr" + + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottldatapoints" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlresource" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlscope" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottltraces" +) + +type Context interface { + ProcessTraces(td ptrace.Traces) + ProcessMetrics(td pmetric.Metrics) + ProcessLogs(td plog.Logs) +} + +type ResourceStatements struct { + Statements []*ottl.Statement[ottlresource.TransformContext] +} + +func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + ctx := ottlresource.NewTransformContext(rspans.Resource()) + for _, statement := range r.Statements { + statement.Execute(ctx) + } + } +} + +func (r *ResourceStatements) ProcessMetrics(td pmetric.Metrics) { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + ctx := ottlresource.NewTransformContext(rmetrics.Resource()) + for _, statement := range r.Statements { + statement.Execute(ctx) + } + } +} + +func (r *ResourceStatements) ProcessLogs(td plog.Logs) { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + ctx := ottlresource.NewTransformContext(rlogs.Resource()) + for _, statement := range r.Statements { + statement.Execute(ctx) + } + } +} + +type ScopeStatements struct { + Statements []*ottl.Statement[ottlscope.TransformContext] +} + +func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + for j := 0; j < rspans.ScopeSpans().Len(); j++ { + sspans := rspans.ScopeSpans().At(j) + ctx := ottlscope.NewTransformContext(sspans.Scope(), rspans.Resource()) + for _, statement := range s.Statements { + statement.Execute(ctx) + } + } + } +} + +func (s *ScopeStatements) ProcessMetrics(td pmetric.Metrics) { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { + smetrics := rmetrics.ScopeMetrics().At(j) + ctx := ottlscope.NewTransformContext(smetrics.Scope(), rmetrics.Resource()) + for _, statement := range s.Statements { + statement.Execute(ctx) + } + } + } +} + +func (s *ScopeStatements) ProcessLogs(td plog.Logs) { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + for j := 0; j < rlogs.ScopeLogs().Len(); j++ { + slogs := rlogs.ScopeLogs().At(j) + ctx := ottlscope.NewTransformContext(slogs.Scope(), rlogs.Resource()) + for _, statement := range s.Statements { + statement.Execute(ctx) + } + } + } +} + +type TraceStatements struct { + statements []*ottl.Statement[ottltraces.TransformContext] +} + +func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + for j := 0; j < rspans.ScopeSpans().Len(); j++ { + sspans := rspans.ScopeSpans().At(j) + spans := sspans.Spans() + for k := 0; k < spans.Len(); k++ { + ctx := ottltraces.NewTransformContext(spans.At(k), sspans.Scope(), rspans.Resource()) + for _, statement := range t.statements { + statement.Execute(ctx) + } + } + } + } +} + +func (t *TraceStatements) ProcessMetrics(td pmetric.Metrics) {} + +func (t *TraceStatements) ProcessLogs(td plog.Logs) {} + +type LogStatements struct { + statements []*ottl.Statement[ottllogs.TransformContext] +} + +func (l *LogStatements) ProcessTraces(td ptrace.Traces) {} + +func (l *LogStatements) ProcessMetrics(td pmetric.Metrics) {} + +func (l *LogStatements) ProcessLogs(td plog.Logs) { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + for j := 0; j < rlogs.ScopeLogs().Len(); j++ { + slogs := rlogs.ScopeLogs().At(j) + logs := slogs.LogRecords() + for k := 0; k < logs.Len(); k++ { + ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) + for _, statement := range l.statements { + statement.Execute(ctx) + } + } + } + } +} + +type DataPointStatements struct { + statements []*ottl.Statement[ottldatapoints.TransformContext] +} + +func (d *DataPointStatements) ProcessTraces(td ptrace.Traces) {} + +func (d *DataPointStatements) ProcessLogs(td plog.Logs) {} + +func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { + smetrics := rmetrics.ScopeMetrics().At(j) + metrics := smetrics.Metrics() + for k := 0; k < metrics.Len(); k++ { + metric := metrics.At(k) + switch metric.Type() { + case pmetric.MetricTypeSum: + d.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeGauge: + d.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeHistogram: + d.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeExponentialHistogram: + d.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeSummary: + d.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + } + } + } + } +} + +func (d *DataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + d.callFunctions(ctx) + } +} + +func (d *DataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) { + for _, statement := range d.statements { + statement.Execute(ctx) + } +} + +type ParserCollection struct { + resourceParser ottl.Parser[ottlresource.TransformContext] + scopeParser ottl.Parser[ottlscope.TransformContext] + traceParser ottl.Parser[ottltraces.TransformContext] + dataPointsParser ottl.Parser[ottldatapoints.TransformContext] + logParser ottl.Parser[ottllogs.TransformContext] +} + +func NewTracesParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { + return ParserCollection{ + resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), + scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), + traceParser: ottltraces.NewParser(functions, settings), + dataPointsParser: ottl.Parser[ottldatapoints.TransformContext]{}, + logParser: ottl.Parser[ottllogs.TransformContext]{}, + } +} + +func NewMetricsParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { + return ParserCollection{ + resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), + scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), + traceParser: ottl.Parser[ottltraces.TransformContext]{}, + dataPointsParser: ottldatapoints.NewParser(functions, settings), + logParser: ottl.Parser[ottllogs.TransformContext]{}, + } +} + +func NewLogsParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { + return ParserCollection{ + resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), + scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), + traceParser: ottl.Parser[ottltraces.TransformContext]{}, + dataPointsParser: ottl.Parser[ottldatapoints.TransformContext]{}, + logParser: ottllogs.NewParser(functions, settings), + } +} + +func (pc ParserCollection) ParseContextStatements(contextStatements []ContextStatements) ([]Context, error) { + contexts := make([]Context, len(contextStatements)) + var errors error + + for i, s := range contextStatements { + switch s.Context { + case Resource: + statements, err := pc.resourceParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &ResourceStatements{ + Statements: statements, + } + case Scope: + statements, err := pc.scopeParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &ScopeStatements{ + Statements: statements, + } + case Trace: + statements, err := pc.traceParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &TraceStatements{ + statements: statements, + } + case DataPoint: + statements, err := pc.dataPointsParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &DataPointStatements{ + statements: statements, + } + case Log: + statements, err := pc.logParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = &LogStatements{ + statements: statements, + } + default: + errors = multierr.Append(errors, fmt.Errorf("context, %v, is not a valid context", s.Context)) + } + } + + if errors != nil { + return nil, errors + } + return contexts, nil +} From cd058e7090db158710d9d8a2593b0914b9fb24da Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Tue, 25 Oct 2022 17:13:11 -0600 Subject: [PATCH 03/22] Add high-level context --- .../internal/common/processor.go | 46 +++++++++++++++---- .../internal/logs/processor.go | 2 +- .../internal/metrics/processor.go | 2 +- .../internal/traces/processor.go | 2 +- 4 files changed, 40 insertions(+), 12 deletions(-) diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index 9de8fa7dad5f..bdfb22fe8bd8 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -32,15 +32,31 @@ import ( ) type Context interface { + IsContext() bool +} + +type TracesContext interface { ProcessTraces(td ptrace.Traces) +} + +type MetricsContext interface { ProcessMetrics(td pmetric.Metrics) +} + +type LogsContext interface { ProcessLogs(td plog.Logs) } +var _ Context = &ResourceStatements{} + type ResourceStatements struct { Statements []*ottl.Statement[ottlresource.TransformContext] } +func (r *ResourceStatements) IsContext() bool { + return true +} + func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) @@ -71,10 +87,16 @@ func (r *ResourceStatements) ProcessLogs(td plog.Logs) { } } +var _ Context = &ScopeStatements{} + type ScopeStatements struct { Statements []*ottl.Statement[ottlscope.TransformContext] } +func (s *ScopeStatements) IsContext() bool { + return true +} + func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) @@ -114,10 +136,16 @@ func (s *ScopeStatements) ProcessLogs(td plog.Logs) { } } +var _ Context = &TraceStatements{} + type TraceStatements struct { statements []*ottl.Statement[ottltraces.TransformContext] } +func (t *TraceStatements) IsContext() bool { + return true +} + func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) @@ -134,17 +162,15 @@ func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { } } -func (t *TraceStatements) ProcessMetrics(td pmetric.Metrics) {} - -func (t *TraceStatements) ProcessLogs(td plog.Logs) {} +var _ Context = &LogStatements{} type LogStatements struct { statements []*ottl.Statement[ottllogs.TransformContext] } -func (l *LogStatements) ProcessTraces(td ptrace.Traces) {} - -func (l *LogStatements) ProcessMetrics(td pmetric.Metrics) {} +func (l *LogStatements) IsContext() bool { + return true +} func (l *LogStatements) ProcessLogs(td plog.Logs) { for i := 0; i < td.ResourceLogs().Len(); i++ { @@ -162,13 +188,15 @@ func (l *LogStatements) ProcessLogs(td plog.Logs) { } } +var _ Context = &DataPointStatements{} + type DataPointStatements struct { statements []*ottl.Statement[ottldatapoints.TransformContext] } -func (d *DataPointStatements) ProcessTraces(td ptrace.Traces) {} - -func (d *DataPointStatements) ProcessLogs(td plog.Logs) {} +func (d *DataPointStatements) IsContext() bool { + return true +} func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) { for i := 0; i < td.ResourceMetrics().Len(); i++ { diff --git a/processor/transformprocessor/internal/logs/processor.go b/processor/transformprocessor/internal/logs/processor.go index 608fe513d60a..4bbca741eb7d 100644 --- a/processor/transformprocessor/internal/logs/processor.go +++ b/processor/transformprocessor/internal/logs/processor.go @@ -68,7 +68,7 @@ func (p *Processor) ProcessLogs(_ context.Context, td plog.Logs) (plog.Logs, err } } else { for _, contexts := range p.contexts { - contexts.ProcessLogs(td) + contexts.(common.LogsContext).ProcessLogs(td) } } return td, nil diff --git a/processor/transformprocessor/internal/metrics/processor.go b/processor/transformprocessor/internal/metrics/processor.go index 66f99f997c2d..4446435367c0 100644 --- a/processor/transformprocessor/internal/metrics/processor.go +++ b/processor/transformprocessor/internal/metrics/processor.go @@ -78,7 +78,7 @@ func (p *Processor) ProcessMetrics(_ context.Context, td pmetric.Metrics) (pmetr } } else { for _, contexts := range p.contexts { - contexts.ProcessMetrics(td) + contexts.(common.MetricsContext).ProcessMetrics(td) } } return td, nil diff --git a/processor/transformprocessor/internal/traces/processor.go b/processor/transformprocessor/internal/traces/processor.go index 3317c0692f5c..7507126368df 100644 --- a/processor/transformprocessor/internal/traces/processor.go +++ b/processor/transformprocessor/internal/traces/processor.go @@ -69,7 +69,7 @@ func (p *Processor) ProcessTraces(_ context.Context, td ptrace.Traces) (ptrace.T } } else { for _, contexts := range p.contexts { - contexts.ProcessTraces(td) + contexts.(common.TracesContext).ProcessTraces(td) } } return td, nil From 5f8471c71e2bdca578c573cd83854dd3410bce5b Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Tue, 25 Oct 2022 17:14:04 -0600 Subject: [PATCH 04/22] Add high-level context --- .../internal/common/processor.go | 46 +++++++++++++++---- 1 file changed, 37 insertions(+), 9 deletions(-) diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index 9de8fa7dad5f..bdfb22fe8bd8 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -32,15 +32,31 @@ import ( ) type Context interface { + IsContext() bool +} + +type TracesContext interface { ProcessTraces(td ptrace.Traces) +} + +type MetricsContext interface { ProcessMetrics(td pmetric.Metrics) +} + +type LogsContext interface { ProcessLogs(td plog.Logs) } +var _ Context = &ResourceStatements{} + type ResourceStatements struct { Statements []*ottl.Statement[ottlresource.TransformContext] } +func (r *ResourceStatements) IsContext() bool { + return true +} + func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) @@ -71,10 +87,16 @@ func (r *ResourceStatements) ProcessLogs(td plog.Logs) { } } +var _ Context = &ScopeStatements{} + type ScopeStatements struct { Statements []*ottl.Statement[ottlscope.TransformContext] } +func (s *ScopeStatements) IsContext() bool { + return true +} + func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) @@ -114,10 +136,16 @@ func (s *ScopeStatements) ProcessLogs(td plog.Logs) { } } +var _ Context = &TraceStatements{} + type TraceStatements struct { statements []*ottl.Statement[ottltraces.TransformContext] } +func (t *TraceStatements) IsContext() bool { + return true +} + func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) @@ -134,17 +162,15 @@ func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { } } -func (t *TraceStatements) ProcessMetrics(td pmetric.Metrics) {} - -func (t *TraceStatements) ProcessLogs(td plog.Logs) {} +var _ Context = &LogStatements{} type LogStatements struct { statements []*ottl.Statement[ottllogs.TransformContext] } -func (l *LogStatements) ProcessTraces(td ptrace.Traces) {} - -func (l *LogStatements) ProcessMetrics(td pmetric.Metrics) {} +func (l *LogStatements) IsContext() bool { + return true +} func (l *LogStatements) ProcessLogs(td plog.Logs) { for i := 0; i < td.ResourceLogs().Len(); i++ { @@ -162,13 +188,15 @@ func (l *LogStatements) ProcessLogs(td plog.Logs) { } } +var _ Context = &DataPointStatements{} + type DataPointStatements struct { statements []*ottl.Statement[ottldatapoints.TransformContext] } -func (d *DataPointStatements) ProcessTraces(td ptrace.Traces) {} - -func (d *DataPointStatements) ProcessLogs(td plog.Logs) {} +func (d *DataPointStatements) IsContext() bool { + return true +} func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) { for i := 0; i < td.ResourceMetrics().Len(); i++ { From ffc1fee0f1d39e414db4a2f4eba5873a7dc3f6ed Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Wed, 26 Oct 2022 20:07:05 -0600 Subject: [PATCH 05/22] respond to feedback --- .../internal/common/processor.go | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index bdfb22fe8bd8..5d797b970f9e 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -32,7 +32,8 @@ import ( ) type Context interface { - IsContext() bool + // IsContext dummy method for type safety + IsContext() } type TracesContext interface { @@ -48,14 +49,15 @@ type LogsContext interface { } var _ Context = &ResourceStatements{} +var _ TracesContext = &ResourceStatements{} +var _ MetricsContext = &ResourceStatements{} +var _ LogsContext = &ResourceStatements{} type ResourceStatements struct { Statements []*ottl.Statement[ottlresource.TransformContext] } -func (r *ResourceStatements) IsContext() bool { - return true -} +func (r *ResourceStatements) IsContext() {} func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { @@ -88,14 +90,15 @@ func (r *ResourceStatements) ProcessLogs(td plog.Logs) { } var _ Context = &ScopeStatements{} +var _ TracesContext = &ScopeStatements{} +var _ MetricsContext = &ScopeStatements{} +var _ LogsContext = &ScopeStatements{} type ScopeStatements struct { Statements []*ottl.Statement[ottlscope.TransformContext] } -func (s *ScopeStatements) IsContext() bool { - return true -} +func (s *ScopeStatements) IsContext() {} func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { @@ -137,14 +140,13 @@ func (s *ScopeStatements) ProcessLogs(td plog.Logs) { } var _ Context = &TraceStatements{} +var _ TracesContext = &TraceStatements{} type TraceStatements struct { statements []*ottl.Statement[ottltraces.TransformContext] } -func (t *TraceStatements) IsContext() bool { - return true -} +func (t *TraceStatements) IsContext() {} func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { for i := 0; i < td.ResourceSpans().Len(); i++ { @@ -163,14 +165,13 @@ func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { } var _ Context = &LogStatements{} +var _ LogsContext = &LogStatements{} type LogStatements struct { statements []*ottl.Statement[ottllogs.TransformContext] } -func (l *LogStatements) IsContext() bool { - return true -} +func (l *LogStatements) IsContext() {} func (l *LogStatements) ProcessLogs(td plog.Logs) { for i := 0; i < td.ResourceLogs().Len(); i++ { @@ -189,14 +190,13 @@ func (l *LogStatements) ProcessLogs(td plog.Logs) { } var _ Context = &DataPointStatements{} +var _ MetricsContext = &DataPointStatements{} type DataPointStatements struct { statements []*ottl.Statement[ottldatapoints.TransformContext] } -func (d *DataPointStatements) IsContext() bool { - return true -} +func (d *DataPointStatements) IsContext() {} func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) { for i := 0; i < td.ResourceMetrics().Len(); i++ { From 38b7d9cdbc1c04a153acb1cd83f6d2ac1a7d1cd3 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Thu, 27 Oct 2022 08:42:37 -0600 Subject: [PATCH 06/22] Fix merge --- .../internal/common/processor.go | 127 +++++++++++++----- 1 file changed, 92 insertions(+), 35 deletions(-) diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index 5d797b970f9e..257ba6539c02 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -37,15 +37,15 @@ type Context interface { } type TracesContext interface { - ProcessTraces(td ptrace.Traces) + ProcessTraces(td ptrace.Traces) error } type MetricsContext interface { - ProcessMetrics(td pmetric.Metrics) + ProcessMetrics(td pmetric.Metrics) error } type LogsContext interface { - ProcessLogs(td plog.Logs) + ProcessLogs(td plog.Logs) error } var _ Context = &ResourceStatements{} @@ -59,34 +59,46 @@ type ResourceStatements struct { func (r *ResourceStatements) IsContext() {} -func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) { +func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) ctx := ottlresource.NewTransformContext(rspans.Resource()) for _, statement := range r.Statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } + return nil } -func (r *ResourceStatements) ProcessMetrics(td pmetric.Metrics) { +func (r *ResourceStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) ctx := ottlresource.NewTransformContext(rmetrics.Resource()) for _, statement := range r.Statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } + return nil } -func (r *ResourceStatements) ProcessLogs(td plog.Logs) { +func (r *ResourceStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) ctx := ottlresource.NewTransformContext(rlogs.Resource()) for _, statement := range r.Statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } + return nil } var _ Context = &ScopeStatements{} @@ -100,43 +112,55 @@ type ScopeStatements struct { func (s *ScopeStatements) IsContext() {} -func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) { +func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) for j := 0; j < rspans.ScopeSpans().Len(); j++ { sspans := rspans.ScopeSpans().At(j) ctx := ottlscope.NewTransformContext(sspans.Scope(), rspans.Resource()) for _, statement := range s.Statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } } + return nil } -func (s *ScopeStatements) ProcessMetrics(td pmetric.Metrics) { +func (s *ScopeStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { smetrics := rmetrics.ScopeMetrics().At(j) ctx := ottlscope.NewTransformContext(smetrics.Scope(), rmetrics.Resource()) for _, statement := range s.Statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } } + return nil } -func (s *ScopeStatements) ProcessLogs(td plog.Logs) { +func (s *ScopeStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) for j := 0; j < rlogs.ScopeLogs().Len(); j++ { slogs := rlogs.ScopeLogs().At(j) ctx := ottlscope.NewTransformContext(slogs.Scope(), rlogs.Resource()) for _, statement := range s.Statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } } + return nil } var _ Context = &TraceStatements{} @@ -148,7 +172,7 @@ type TraceStatements struct { func (t *TraceStatements) IsContext() {} -func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { +func (t *TraceStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) for j := 0; j < rspans.ScopeSpans().Len(); j++ { @@ -157,11 +181,15 @@ func (t *TraceStatements) ProcessTraces(td ptrace.Traces) { for k := 0; k < spans.Len(); k++ { ctx := ottltraces.NewTransformContext(spans.At(k), sspans.Scope(), rspans.Resource()) for _, statement := range t.statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } } } + return nil } var _ Context = &LogStatements{} @@ -173,7 +201,7 @@ type LogStatements struct { func (l *LogStatements) IsContext() {} -func (l *LogStatements) ProcessLogs(td plog.Logs) { +func (l *LogStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) for j := 0; j < rlogs.ScopeLogs().Len(); j++ { @@ -182,11 +210,15 @@ func (l *LogStatements) ProcessLogs(td plog.Logs) { for k := 0; k < logs.Len(); k++ { ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) for _, statement := range l.statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } } } } + return nil } var _ Context = &DataPointStatements{} @@ -198,7 +230,7 @@ type DataPointStatements struct { func (d *DataPointStatements) IsContext() {} -func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) { +func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { @@ -206,55 +238,80 @@ func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) { metrics := smetrics.Metrics() for k := 0; k < metrics.Len(); k++ { metric := metrics.At(k) + var err error switch metric.Type() { case pmetric.MetricTypeSum: - d.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + err = d.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) case pmetric.MetricTypeGauge: - d.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + err = d.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) case pmetric.MetricTypeHistogram: - d.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + err = d.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) case pmetric.MetricTypeExponentialHistogram: - d.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + err = d.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) case pmetric.MetricTypeSummary: - d.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + err = d.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + } + if err != nil { + return err } } } } + return nil } -func (d *DataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { +func (d *DataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - d.callFunctions(ctx) + err := d.callFunctions(ctx) + if err != nil { + return err + } } + return nil } -func (d *DataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { +func (d *DataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - d.callFunctions(ctx) + err := d.callFunctions(ctx) + if err != nil { + return err + } } + return nil } -func (d *DataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { +func (d *DataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - d.callFunctions(ctx) + err := d.callFunctions(ctx) + if err != nil { + return err + } } + return nil } -func (d *DataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) { +func (d *DataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - d.callFunctions(ctx) + err := d.callFunctions(ctx) + if err != nil { + return err + } } + return nil } -func (d *DataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) { +func (d *DataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) error { for _, statement := range d.statements { - statement.Execute(ctx) + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } } + return nil } type ParserCollection struct { From edfe1a45f223820f9fb535957268bba0d58fddbc Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Mon, 31 Oct 2022 14:45:56 -0600 Subject: [PATCH 07/22] Adjust ParserCollection --- .../internal/common/processor.go | 140 +++++++++--------- 1 file changed, 72 insertions(+), 68 deletions(-) diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index 257ba6539c02..7d6e056071ce 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -15,7 +15,6 @@ package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" import ( "fmt" - "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" @@ -32,8 +31,8 @@ import ( ) type Context interface { - // IsContext dummy method for type safety - IsContext() + // isContext dummy method for type safety + isContext() } type TracesContext interface { @@ -48,18 +47,18 @@ type LogsContext interface { ProcessLogs(td plog.Logs) error } -var _ Context = &ResourceStatements{} -var _ TracesContext = &ResourceStatements{} -var _ MetricsContext = &ResourceStatements{} -var _ LogsContext = &ResourceStatements{} +var _ Context = &resourceStatements{} +var _ TracesContext = &resourceStatements{} +var _ MetricsContext = &resourceStatements{} +var _ LogsContext = &resourceStatements{} -type ResourceStatements struct { +type resourceStatements struct { Statements []*ottl.Statement[ottlresource.TransformContext] } -func (r *ResourceStatements) IsContext() {} +func (r *resourceStatements) isContext() {} -func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) error { +func (r *resourceStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) ctx := ottlresource.NewTransformContext(rspans.Resource()) @@ -73,7 +72,7 @@ func (r *ResourceStatements) ProcessTraces(td ptrace.Traces) error { return nil } -func (r *ResourceStatements) ProcessMetrics(td pmetric.Metrics) error { +func (r *resourceStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) ctx := ottlresource.NewTransformContext(rmetrics.Resource()) @@ -87,7 +86,7 @@ func (r *ResourceStatements) ProcessMetrics(td pmetric.Metrics) error { return nil } -func (r *ResourceStatements) ProcessLogs(td plog.Logs) error { +func (r *resourceStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) ctx := ottlresource.NewTransformContext(rlogs.Resource()) @@ -101,18 +100,18 @@ func (r *ResourceStatements) ProcessLogs(td plog.Logs) error { return nil } -var _ Context = &ScopeStatements{} -var _ TracesContext = &ScopeStatements{} -var _ MetricsContext = &ScopeStatements{} -var _ LogsContext = &ScopeStatements{} +var _ Context = &scopeStatements{} +var _ TracesContext = &scopeStatements{} +var _ MetricsContext = &scopeStatements{} +var _ LogsContext = &scopeStatements{} -type ScopeStatements struct { +type scopeStatements struct { Statements []*ottl.Statement[ottlscope.TransformContext] } -func (s *ScopeStatements) IsContext() {} +func (s *scopeStatements) isContext() {} -func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) error { +func (s *scopeStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) for j := 0; j < rspans.ScopeSpans().Len(); j++ { @@ -129,7 +128,7 @@ func (s *ScopeStatements) ProcessTraces(td ptrace.Traces) error { return nil } -func (s *ScopeStatements) ProcessMetrics(td pmetric.Metrics) error { +func (s *scopeStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { @@ -146,7 +145,7 @@ func (s *ScopeStatements) ProcessMetrics(td pmetric.Metrics) error { return nil } -func (s *ScopeStatements) ProcessLogs(td plog.Logs) error { +func (s *scopeStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) for j := 0; j < rlogs.ScopeLogs().Len(); j++ { @@ -163,16 +162,16 @@ func (s *ScopeStatements) ProcessLogs(td plog.Logs) error { return nil } -var _ Context = &TraceStatements{} -var _ TracesContext = &TraceStatements{} +var _ Context = &traceStatements{} +var _ TracesContext = &traceStatements{} -type TraceStatements struct { +type traceStatements struct { statements []*ottl.Statement[ottltraces.TransformContext] } -func (t *TraceStatements) IsContext() {} +func (t *traceStatements) isContext() {} -func (t *TraceStatements) ProcessTraces(td ptrace.Traces) error { +func (t *traceStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) for j := 0; j < rspans.ScopeSpans().Len(); j++ { @@ -192,16 +191,16 @@ func (t *TraceStatements) ProcessTraces(td ptrace.Traces) error { return nil } -var _ Context = &LogStatements{} -var _ LogsContext = &LogStatements{} +var _ Context = &logStatements{} +var _ LogsContext = &logStatements{} -type LogStatements struct { +type logStatements struct { statements []*ottl.Statement[ottllogs.TransformContext] } -func (l *LogStatements) IsContext() {} +func (l *logStatements) isContext() {} -func (l *LogStatements) ProcessLogs(td plog.Logs) error { +func (l *logStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) for j := 0; j < rlogs.ScopeLogs().Len(); j++ { @@ -221,16 +220,16 @@ func (l *LogStatements) ProcessLogs(td plog.Logs) error { return nil } -var _ Context = &DataPointStatements{} -var _ MetricsContext = &DataPointStatements{} +var _ Context = &dataPointStatements{} +var _ MetricsContext = &dataPointStatements{} -type DataPointStatements struct { +type dataPointStatements struct { statements []*ottl.Statement[ottldatapoints.TransformContext] } -func (d *DataPointStatements) IsContext() {} +func (d *dataPointStatements) isContext() {} -func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) error { +func (d *dataPointStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { @@ -260,7 +259,7 @@ func (d *DataPointStatements) ProcessMetrics(td pmetric.Metrics) error { return nil } -func (d *DataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d *dataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -271,7 +270,7 @@ func (d *DataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPoint return nil } -func (d *DataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d *dataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -282,7 +281,7 @@ func (d *DataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDat return nil } -func (d *DataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d *dataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -293,7 +292,7 @@ func (d *DataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.Ex return nil } -func (d *DataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d *dataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -304,7 +303,7 @@ func (d *DataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPoi return nil } -func (d *DataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) error { +func (d *dataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) error { for _, statement := range d.statements { _, _, err := statement.Execute(ctx) if err != nil { @@ -315,6 +314,7 @@ func (d *DataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) } type ParserCollection struct { + settings component.TelemetrySettings resourceParser ottl.Parser[ottlresource.TransformContext] scopeParser ottl.Parser[ottlscope.TransformContext] traceParser ottl.Parser[ottltraces.TransformContext] @@ -322,37 +322,41 @@ type ParserCollection struct { logParser ottl.Parser[ottllogs.TransformContext] } -func NewTracesParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { - return ParserCollection{ - resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), - scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), - traceParser: ottltraces.NewParser(functions, settings), - dataPointsParser: ottl.Parser[ottldatapoints.TransformContext]{}, - logParser: ottl.Parser[ottllogs.TransformContext]{}, +// Option to construct new consumers. +type Option func(*ParserCollection) + +func WithTraceParser(functions map[string]interface{}) Option { + return func(o *ParserCollection) { + o.traceParser = ottltraces.NewParser(functions, o.settings) + } +} + +func WithLogParser(functions map[string]interface{}) Option { + return func(o *ParserCollection) { + o.logParser = ottllogs.NewParser(functions, o.settings) } } -func NewMetricsParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { - return ParserCollection{ - resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), - scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), - traceParser: ottl.Parser[ottltraces.TransformContext]{}, - dataPointsParser: ottldatapoints.NewParser(functions, settings), - logParser: ottl.Parser[ottllogs.TransformContext]{}, +func WithDataPointParser(functions map[string]interface{}) Option { + return func(o *ParserCollection) { + o.dataPointsParser = ottldatapoints.NewParser(functions, o.settings) } } -func NewLogsParserCollection(functions map[string]interface{}, settings component.TelemetrySettings) ParserCollection { - return ParserCollection{ - resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), - scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), - traceParser: ottl.Parser[ottltraces.TransformContext]{}, - dataPointsParser: ottl.Parser[ottldatapoints.TransformContext]{}, - logParser: ottllogs.NewParser(functions, settings), +func NewParserCollection(settings component.TelemetrySettings, options ...Option) *ParserCollection { + pc := &ParserCollection{ + resourceParser: ottlresource.NewParser(ResourceFunctions(), settings), + scopeParser: ottlscope.NewParser(ScopeFunctions(), settings), + } + + for _, op := range options { + op(pc) } + + return pc } -func (pc ParserCollection) ParseContextStatements(contextStatements []ContextStatements) ([]Context, error) { +func (pc *ParserCollection) ParseContextStatements(contextStatements []ContextStatements) ([]Context, error) { contexts := make([]Context, len(contextStatements)) var errors error @@ -364,7 +368,7 @@ func (pc ParserCollection) ParseContextStatements(contextStatements []ContextSta errors = multierr.Append(errors, err) continue } - contexts[i] = &ResourceStatements{ + contexts[i] = &resourceStatements{ Statements: statements, } case Scope: @@ -373,7 +377,7 @@ func (pc ParserCollection) ParseContextStatements(contextStatements []ContextSta errors = multierr.Append(errors, err) continue } - contexts[i] = &ScopeStatements{ + contexts[i] = &scopeStatements{ Statements: statements, } case Trace: @@ -382,7 +386,7 @@ func (pc ParserCollection) ParseContextStatements(contextStatements []ContextSta errors = multierr.Append(errors, err) continue } - contexts[i] = &TraceStatements{ + contexts[i] = &traceStatements{ statements: statements, } case DataPoint: @@ -391,7 +395,7 @@ func (pc ParserCollection) ParseContextStatements(contextStatements []ContextSta errors = multierr.Append(errors, err) continue } - contexts[i] = &DataPointStatements{ + contexts[i] = &dataPointStatements{ statements: statements, } case Log: @@ -400,7 +404,7 @@ func (pc ParserCollection) ParseContextStatements(contextStatements []ContextSta errors = multierr.Append(errors, err) continue } - contexts[i] = &LogStatements{ + contexts[i] = &logStatements{ statements: statements, } default: From cdf7c7e58555e7525dd5c4e7b3dbe1eadba7c032 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Mon, 31 Oct 2022 14:46:13 -0600 Subject: [PATCH 08/22] Add example usage temporarily --- .../internal/logs/processor.go | 33 +++++++------------ 1 file changed, 11 insertions(+), 22 deletions(-) diff --git a/processor/transformprocessor/internal/logs/processor.go b/processor/transformprocessor/internal/logs/processor.go index eb335669f875..f8e7783c0d3b 100644 --- a/processor/transformprocessor/internal/logs/processor.go +++ b/processor/transformprocessor/internal/logs/processor.go @@ -16,45 +16,34 @@ package logs // import "github.com/open-telemetry/opentelemetry-collector-contri import ( "context" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/plog" - - "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" - "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" ) type Processor struct { - statements []*ottl.Statement[ottllogs.TransformContext] + contexts []common.Context } -func NewProcessor(statements []string, functions map[string]interface{}, settings component.TelemetrySettings) (*Processor, error) { - ottlp := ottllogs.NewParser(functions, settings) - parsedStatements, err := ottlp.ParseStatements(statements) +func NewProcessor(statements []common.ContextStatements, functions map[string]interface{}, settings component.TelemetrySettings) (*Processor, error) { + pc := common.NewParserCollection(settings, common.WithLogParser(Functions())) + contexts, err := pc.ParseContextStatements(statements) if err != nil { return nil, err } return &Processor{ - statements: parsedStatements, + contexts: contexts, }, nil } func (p *Processor) ProcessLogs(_ context.Context, td plog.Logs) (plog.Logs, error) { - for i := 0; i < td.ResourceLogs().Len(); i++ { - rlogs := td.ResourceLogs().At(i) - for j := 0; j < rlogs.ScopeLogs().Len(); j++ { - slogs := rlogs.ScopeLogs().At(j) - logs := slogs.LogRecords() - for k := 0; k < logs.Len(); k++ { - ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) - for _, statement := range p.statements { - _, _, err := statement.Execute(ctx) - if err != nil { - return td, err - } - } - } + for _, contexts := range p.contexts { + lc, ok := contexts.(common.LogsContext) + if !ok { + // handle error } + lc.ProcessLogs(td) } return td, nil } From a19bd177ec477e6d9b93c15e363d2613391c687d Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Mon, 31 Oct 2022 14:58:13 -0600 Subject: [PATCH 09/22] apply feedback --- .../internal/common/processor.go | 100 ++++++++---------- 1 file changed, 42 insertions(+), 58 deletions(-) diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index 7d6e056071ce..00f7f8d35f83 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -52,17 +52,15 @@ var _ TracesContext = &resourceStatements{} var _ MetricsContext = &resourceStatements{} var _ LogsContext = &resourceStatements{} -type resourceStatements struct { - Statements []*ottl.Statement[ottlresource.TransformContext] -} +type resourceStatements []*ottl.Statement[ottlresource.TransformContext] -func (r *resourceStatements) isContext() {} +func (r resourceStatements) isContext() {} -func (r *resourceStatements) ProcessTraces(td ptrace.Traces) error { +func (r resourceStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) ctx := ottlresource.NewTransformContext(rspans.Resource()) - for _, statement := range r.Statements { + for _, statement := range r { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -72,11 +70,11 @@ func (r *resourceStatements) ProcessTraces(td ptrace.Traces) error { return nil } -func (r *resourceStatements) ProcessMetrics(td pmetric.Metrics) error { +func (r resourceStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) ctx := ottlresource.NewTransformContext(rmetrics.Resource()) - for _, statement := range r.Statements { + for _, statement := range r { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -86,11 +84,11 @@ func (r *resourceStatements) ProcessMetrics(td pmetric.Metrics) error { return nil } -func (r *resourceStatements) ProcessLogs(td plog.Logs) error { +func (r resourceStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) ctx := ottlresource.NewTransformContext(rlogs.Resource()) - for _, statement := range r.Statements { + for _, statement := range r { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -105,19 +103,17 @@ var _ TracesContext = &scopeStatements{} var _ MetricsContext = &scopeStatements{} var _ LogsContext = &scopeStatements{} -type scopeStatements struct { - Statements []*ottl.Statement[ottlscope.TransformContext] -} +type scopeStatements []*ottl.Statement[ottlscope.TransformContext] -func (s *scopeStatements) isContext() {} +func (s scopeStatements) isContext() {} -func (s *scopeStatements) ProcessTraces(td ptrace.Traces) error { +func (s scopeStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) for j := 0; j < rspans.ScopeSpans().Len(); j++ { sspans := rspans.ScopeSpans().At(j) ctx := ottlscope.NewTransformContext(sspans.Scope(), rspans.Resource()) - for _, statement := range s.Statements { + for _, statement := range s { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -128,13 +124,13 @@ func (s *scopeStatements) ProcessTraces(td ptrace.Traces) error { return nil } -func (s *scopeStatements) ProcessMetrics(td pmetric.Metrics) error { +func (s scopeStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { smetrics := rmetrics.ScopeMetrics().At(j) ctx := ottlscope.NewTransformContext(smetrics.Scope(), rmetrics.Resource()) - for _, statement := range s.Statements { + for _, statement := range s { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -145,13 +141,13 @@ func (s *scopeStatements) ProcessMetrics(td pmetric.Metrics) error { return nil } -func (s *scopeStatements) ProcessLogs(td plog.Logs) error { +func (s scopeStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) for j := 0; j < rlogs.ScopeLogs().Len(); j++ { slogs := rlogs.ScopeLogs().At(j) ctx := ottlscope.NewTransformContext(slogs.Scope(), rlogs.Resource()) - for _, statement := range s.Statements { + for _, statement := range s { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -165,13 +161,11 @@ func (s *scopeStatements) ProcessLogs(td plog.Logs) error { var _ Context = &traceStatements{} var _ TracesContext = &traceStatements{} -type traceStatements struct { - statements []*ottl.Statement[ottltraces.TransformContext] -} +type traceStatements []*ottl.Statement[ottltraces.TransformContext] -func (t *traceStatements) isContext() {} +func (t traceStatements) isContext() {} -func (t *traceStatements) ProcessTraces(td ptrace.Traces) error { +func (t traceStatements) ProcessTraces(td ptrace.Traces) error { for i := 0; i < td.ResourceSpans().Len(); i++ { rspans := td.ResourceSpans().At(i) for j := 0; j < rspans.ScopeSpans().Len(); j++ { @@ -179,7 +173,7 @@ func (t *traceStatements) ProcessTraces(td ptrace.Traces) error { spans := sspans.Spans() for k := 0; k < spans.Len(); k++ { ctx := ottltraces.NewTransformContext(spans.At(k), sspans.Scope(), rspans.Resource()) - for _, statement := range t.statements { + for _, statement := range t { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -194,13 +188,11 @@ func (t *traceStatements) ProcessTraces(td ptrace.Traces) error { var _ Context = &logStatements{} var _ LogsContext = &logStatements{} -type logStatements struct { - statements []*ottl.Statement[ottllogs.TransformContext] -} +type logStatements []*ottl.Statement[ottllogs.TransformContext] -func (l *logStatements) isContext() {} +func (l logStatements) isContext() {} -func (l *logStatements) ProcessLogs(td plog.Logs) error { +func (l logStatements) ProcessLogs(td plog.Logs) error { for i := 0; i < td.ResourceLogs().Len(); i++ { rlogs := td.ResourceLogs().At(i) for j := 0; j < rlogs.ScopeLogs().Len(); j++ { @@ -208,7 +200,7 @@ func (l *logStatements) ProcessLogs(td plog.Logs) error { logs := slogs.LogRecords() for k := 0; k < logs.Len(); k++ { ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) - for _, statement := range l.statements { + for _, statement := range l { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -223,13 +215,11 @@ func (l *logStatements) ProcessLogs(td plog.Logs) error { var _ Context = &dataPointStatements{} var _ MetricsContext = &dataPointStatements{} -type dataPointStatements struct { - statements []*ottl.Statement[ottldatapoints.TransformContext] -} +type dataPointStatements []*ottl.Statement[ottldatapoints.TransformContext] -func (d *dataPointStatements) isContext() {} +func (d dataPointStatements) isContext() {} -func (d *dataPointStatements) ProcessMetrics(td pmetric.Metrics) error { +func (d dataPointStatements) ProcessMetrics(td pmetric.Metrics) error { for i := 0; i < td.ResourceMetrics().Len(); i++ { rmetrics := td.ResourceMetrics().At(i) for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { @@ -259,7 +249,7 @@ func (d *dataPointStatements) ProcessMetrics(td pmetric.Metrics) error { return nil } -func (d *dataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d dataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -270,7 +260,7 @@ func (d *dataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPoint return nil } -func (d *dataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d dataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -281,7 +271,7 @@ func (d *dataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDat return nil } -func (d *dataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d dataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -292,7 +282,7 @@ func (d *dataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.Ex return nil } -func (d *dataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { +func (d dataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { for i := 0; i < dps.Len(); i++ { ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) err := d.callFunctions(ctx) @@ -303,8 +293,8 @@ func (d *dataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPoi return nil } -func (d *dataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) error { - for _, statement := range d.statements { +func (d dataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) error { + for _, statement := range d { _, _, err := statement.Execute(ctx) if err != nil { return err @@ -368,45 +358,39 @@ func (pc *ParserCollection) ParseContextStatements(contextStatements []ContextSt errors = multierr.Append(errors, err) continue } - contexts[i] = &resourceStatements{ - Statements: statements, - } + contexts[i] = resourceStatements(statements) case Scope: statements, err := pc.scopeParser.ParseStatements(s.Statements) if err != nil { errors = multierr.Append(errors, err) continue } - contexts[i] = &scopeStatements{ - Statements: statements, - } + contexts[i] = scopeStatements(statements) + case Trace: statements, err := pc.traceParser.ParseStatements(s.Statements) if err != nil { errors = multierr.Append(errors, err) continue } - contexts[i] = &traceStatements{ - statements: statements, - } + contexts[i] = traceStatements(statements) + case DataPoint: statements, err := pc.dataPointsParser.ParseStatements(s.Statements) if err != nil { errors = multierr.Append(errors, err) continue } - contexts[i] = &dataPointStatements{ - statements: statements, - } + contexts[i] = dataPointStatements(statements) + case Log: statements, err := pc.logParser.ParseStatements(s.Statements) if err != nil { errors = multierr.Append(errors, err) continue } - contexts[i] = &logStatements{ - statements: statements, - } + contexts[i] = logStatements(statements) + default: errors = multierr.Append(errors, fmt.Errorf("context, %v, is not a valid context", s.Context)) } From 14d60b1ff3b44d7b55b3ce3e3b790e3d7a9f5ac7 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Tue, 1 Nov 2022 09:51:36 -0600 Subject: [PATCH 10/22] Split into individual files --- .../internal/common/logs.go | 52 ++++++ .../internal/common/metrics.go | 117 +++++++++++++ .../internal/common/processor.go | 159 +----------------- .../internal/common/traces.go | 52 ++++++ 4 files changed, 222 insertions(+), 158 deletions(-) create mode 100644 processor/transformprocessor/internal/common/logs.go create mode 100644 processor/transformprocessor/internal/common/metrics.go create mode 100644 processor/transformprocessor/internal/common/traces.go diff --git a/processor/transformprocessor/internal/common/logs.go b/processor/transformprocessor/internal/common/logs.go new file mode 100644 index 000000000000..25bbeb700ab6 --- /dev/null +++ b/processor/transformprocessor/internal/common/logs.go @@ -0,0 +1,52 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + +import ( + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" + "go.opentelemetry.io/collector/pdata/plog" +) + +type LogsContext interface { + ProcessLogs(td plog.Logs) error +} + +var _ Context = &logStatements{} +var _ LogsContext = &logStatements{} + +type logStatements []*ottl.Statement[ottllogs.TransformContext] + +func (l logStatements) isContext() {} + +func (l logStatements) ProcessLogs(td plog.Logs) error { + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + for j := 0; j < rlogs.ScopeLogs().Len(); j++ { + slogs := rlogs.ScopeLogs().At(j) + logs := slogs.LogRecords() + for k := 0; k < logs.Len(); k++ { + ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) + for _, statement := range l { + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } + } + } + } + } + return nil +} diff --git a/processor/transformprocessor/internal/common/metrics.go b/processor/transformprocessor/internal/common/metrics.go new file mode 100644 index 000000000000..fd01cf02fc18 --- /dev/null +++ b/processor/transformprocessor/internal/common/metrics.go @@ -0,0 +1,117 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + +import ( + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottldatapoints" + "go.opentelemetry.io/collector/pdata/pcommon" + "go.opentelemetry.io/collector/pdata/pmetric" +) + +type MetricsContext interface { + ProcessMetrics(td pmetric.Metrics) error +} + +var _ Context = &dataPointStatements{} +var _ MetricsContext = &dataPointStatements{} + +type dataPointStatements []*ottl.Statement[ottldatapoints.TransformContext] + +func (d dataPointStatements) isContext() {} + +func (d dataPointStatements) ProcessMetrics(td pmetric.Metrics) error { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { + smetrics := rmetrics.ScopeMetrics().At(j) + metrics := smetrics.Metrics() + for k := 0; k < metrics.Len(); k++ { + metric := metrics.At(k) + var err error + switch metric.Type() { + case pmetric.MetricTypeSum: + err = d.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeGauge: + err = d.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeHistogram: + err = d.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeExponentialHistogram: + err = d.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + case pmetric.MetricTypeSummary: + err = d.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) + } + if err != nil { + return err + } + } + } + } + return nil +} + +func (d dataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + err := d.callFunctions(ctx) + if err != nil { + return err + } + } + return nil +} + +func (d dataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + err := d.callFunctions(ctx) + if err != nil { + return err + } + } + return nil +} + +func (d dataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + err := d.callFunctions(ctx) + if err != nil { + return err + } + } + return nil +} + +func (d dataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { + for i := 0; i < dps.Len(); i++ { + ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) + err := d.callFunctions(ctx) + if err != nil { + return err + } + } + return nil +} + +func (d dataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) error { + for _, statement := range d { + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } + } + return nil +} diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index 00f7f8d35f83..f1159fed7466 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -13,10 +13,10 @@ // limitations under the License. package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + import ( "fmt" "go.opentelemetry.io/collector/component" - "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/pdata/ptrace" @@ -35,18 +35,6 @@ type Context interface { isContext() } -type TracesContext interface { - ProcessTraces(td ptrace.Traces) error -} - -type MetricsContext interface { - ProcessMetrics(td pmetric.Metrics) error -} - -type LogsContext interface { - ProcessLogs(td plog.Logs) error -} - var _ Context = &resourceStatements{} var _ TracesContext = &resourceStatements{} var _ MetricsContext = &resourceStatements{} @@ -158,151 +146,6 @@ func (s scopeStatements) ProcessLogs(td plog.Logs) error { return nil } -var _ Context = &traceStatements{} -var _ TracesContext = &traceStatements{} - -type traceStatements []*ottl.Statement[ottltraces.TransformContext] - -func (t traceStatements) isContext() {} - -func (t traceStatements) ProcessTraces(td ptrace.Traces) error { - for i := 0; i < td.ResourceSpans().Len(); i++ { - rspans := td.ResourceSpans().At(i) - for j := 0; j < rspans.ScopeSpans().Len(); j++ { - sspans := rspans.ScopeSpans().At(j) - spans := sspans.Spans() - for k := 0; k < spans.Len(); k++ { - ctx := ottltraces.NewTransformContext(spans.At(k), sspans.Scope(), rspans.Resource()) - for _, statement := range t { - _, _, err := statement.Execute(ctx) - if err != nil { - return err - } - } - } - } - } - return nil -} - -var _ Context = &logStatements{} -var _ LogsContext = &logStatements{} - -type logStatements []*ottl.Statement[ottllogs.TransformContext] - -func (l logStatements) isContext() {} - -func (l logStatements) ProcessLogs(td plog.Logs) error { - for i := 0; i < td.ResourceLogs().Len(); i++ { - rlogs := td.ResourceLogs().At(i) - for j := 0; j < rlogs.ScopeLogs().Len(); j++ { - slogs := rlogs.ScopeLogs().At(j) - logs := slogs.LogRecords() - for k := 0; k < logs.Len(); k++ { - ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) - for _, statement := range l { - _, _, err := statement.Execute(ctx) - if err != nil { - return err - } - } - } - } - } - return nil -} - -var _ Context = &dataPointStatements{} -var _ MetricsContext = &dataPointStatements{} - -type dataPointStatements []*ottl.Statement[ottldatapoints.TransformContext] - -func (d dataPointStatements) isContext() {} - -func (d dataPointStatements) ProcessMetrics(td pmetric.Metrics) error { - for i := 0; i < td.ResourceMetrics().Len(); i++ { - rmetrics := td.ResourceMetrics().At(i) - for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { - smetrics := rmetrics.ScopeMetrics().At(j) - metrics := smetrics.Metrics() - for k := 0; k < metrics.Len(); k++ { - metric := metrics.At(k) - var err error - switch metric.Type() { - case pmetric.MetricTypeSum: - err = d.handleNumberDataPoints(metric.Sum().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeGauge: - err = d.handleNumberDataPoints(metric.Gauge().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeHistogram: - err = d.handleHistogramDataPoints(metric.Histogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeExponentialHistogram: - err = d.handleExponetialHistogramDataPoints(metric.ExponentialHistogram().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - case pmetric.MetricTypeSummary: - err = d.handleSummaryDataPoints(metric.Summary().DataPoints(), metrics.At(k), metrics, smetrics.Scope(), rmetrics.Resource()) - } - if err != nil { - return err - } - } - } - } - return nil -} - -func (d dataPointStatements) handleNumberDataPoints(dps pmetric.NumberDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { - for i := 0; i < dps.Len(); i++ { - ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - err := d.callFunctions(ctx) - if err != nil { - return err - } - } - return nil -} - -func (d dataPointStatements) handleHistogramDataPoints(dps pmetric.HistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { - for i := 0; i < dps.Len(); i++ { - ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - err := d.callFunctions(ctx) - if err != nil { - return err - } - } - return nil -} - -func (d dataPointStatements) handleExponetialHistogramDataPoints(dps pmetric.ExponentialHistogramDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { - for i := 0; i < dps.Len(); i++ { - ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - err := d.callFunctions(ctx) - if err != nil { - return err - } - } - return nil -} - -func (d dataPointStatements) handleSummaryDataPoints(dps pmetric.SummaryDataPointSlice, metric pmetric.Metric, metrics pmetric.MetricSlice, is pcommon.InstrumentationScope, resource pcommon.Resource) error { - for i := 0; i < dps.Len(); i++ { - ctx := ottldatapoints.NewTransformContext(dps.At(i), metric, metrics, is, resource) - err := d.callFunctions(ctx) - if err != nil { - return err - } - } - return nil -} - -func (d dataPointStatements) callFunctions(ctx ottldatapoints.TransformContext) error { - for _, statement := range d { - _, _, err := statement.Execute(ctx) - if err != nil { - return err - } - } - return nil -} - type ParserCollection struct { settings component.TelemetrySettings resourceParser ottl.Parser[ottlresource.TransformContext] diff --git a/processor/transformprocessor/internal/common/traces.go b/processor/transformprocessor/internal/common/traces.go new file mode 100644 index 000000000000..d3b2a5471b7d --- /dev/null +++ b/processor/transformprocessor/internal/common/traces.go @@ -0,0 +1,52 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common // import "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" + +import ( + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottltraces" + "go.opentelemetry.io/collector/pdata/ptrace" +) + +type TracesContext interface { + ProcessTraces(td ptrace.Traces) error +} + +var _ Context = &traceStatements{} +var _ TracesContext = &traceStatements{} + +type traceStatements []*ottl.Statement[ottltraces.TransformContext] + +func (t traceStatements) isContext() {} + +func (t traceStatements) ProcessTraces(td ptrace.Traces) error { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + for j := 0; j < rspans.ScopeSpans().Len(); j++ { + sspans := rspans.ScopeSpans().At(j) + spans := sspans.Spans() + for k := 0; k < spans.Len(); k++ { + ctx := ottltraces.NewTransformContext(spans.At(k), sspans.Scope(), rspans.Resource()) + for _, statement := range t { + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } + } + } + } + } + return nil +} From 8950da08a1b55b2b2c5c788ac3e762b3b6d607ac Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Wed, 2 Nov 2022 09:44:37 -0600 Subject: [PATCH 11/22] Add spanevent and metric contexts --- .../internal/common/config.go | 2 ++ .../internal/common/metrics.go | 28 ++++++++++++++++ .../internal/common/processor.go | 33 +++++++++++++++++-- .../internal/common/traces.go | 32 ++++++++++++++++++ 4 files changed, 92 insertions(+), 3 deletions(-) diff --git a/processor/transformprocessor/internal/common/config.go b/processor/transformprocessor/internal/common/config.go index 8106f6512ff1..7b19e12904a6 100644 --- a/processor/transformprocessor/internal/common/config.go +++ b/processor/transformprocessor/internal/common/config.go @@ -18,6 +18,8 @@ const ( Resource string = "resource" Scope string = "scope" Trace string = "trace" + SpanEvent string = "spanevent" + Metric string = "metric" DataPoint string = "datapoint" Log string = "log" ) diff --git a/processor/transformprocessor/internal/common/metrics.go b/processor/transformprocessor/internal/common/metrics.go index fd01cf02fc18..43b464dfbc54 100644 --- a/processor/transformprocessor/internal/common/metrics.go +++ b/processor/transformprocessor/internal/common/metrics.go @@ -17,6 +17,7 @@ package common // import "github.com/open-telemetry/opentelemetry-collector-cont import ( "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottldatapoints" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlmetric" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" ) @@ -25,6 +26,33 @@ type MetricsContext interface { ProcessMetrics(td pmetric.Metrics) error } +var _ Context = &metricStatements{} +var _ MetricsContext = &metricStatements{} + +type metricStatements []*ottl.Statement[ottlmetric.TransformContext] + +func (m metricStatements) isContext() {} + +func (m metricStatements) ProcessMetrics(td pmetric.Metrics) error { + for i := 0; i < td.ResourceMetrics().Len(); i++ { + rmetrics := td.ResourceMetrics().At(i) + for j := 0; j < rmetrics.ScopeMetrics().Len(); j++ { + smetrics := rmetrics.ScopeMetrics().At(j) + metrics := smetrics.Metrics() + for k := 0; k < metrics.Len(); k++ { + ctx := ottlmetric.NewTransformContext(metrics.At(k), smetrics.Scope(), rmetrics.Resource()) + for _, statement := range m { + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } + } + } + } + } + return nil +} + var _ Context = &dataPointStatements{} var _ MetricsContext = &dataPointStatements{} diff --git a/processor/transformprocessor/internal/common/processor.go b/processor/transformprocessor/internal/common/processor.go index f1159fed7466..b36f4ebb6ddd 100644 --- a/processor/transformprocessor/internal/common/processor.go +++ b/processor/transformprocessor/internal/common/processor.go @@ -16,6 +16,8 @@ package common // import "github.com/open-telemetry/opentelemetry-collector-cont import ( "fmt" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlmetric" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlspanevent" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/pmetric" @@ -151,6 +153,8 @@ type ParserCollection struct { resourceParser ottl.Parser[ottlresource.TransformContext] scopeParser ottl.Parser[ottlscope.TransformContext] traceParser ottl.Parser[ottltraces.TransformContext] + spanEventParser ottl.Parser[ottlspanevent.TransformContext] + metricParser ottl.Parser[ottlmetric.TransformContext] dataPointsParser ottl.Parser[ottldatapoints.TransformContext] logParser ottl.Parser[ottllogs.TransformContext] } @@ -164,12 +168,24 @@ func WithTraceParser(functions map[string]interface{}) Option { } } +func WithSpanEventParser(functions map[string]interface{}) Option { + return func(o *ParserCollection) { + o.spanEventParser = ottlspanevent.NewParser(functions, o.settings) + } +} + func WithLogParser(functions map[string]interface{}) Option { return func(o *ParserCollection) { o.logParser = ottllogs.NewParser(functions, o.settings) } } +func WithMetricParser(functions map[string]interface{}) Option { + return func(o *ParserCollection) { + o.metricParser = ottlmetric.NewParser(functions, o.settings) + } +} + func WithDataPointParser(functions map[string]interface{}) Option { return func(o *ParserCollection) { o.dataPointsParser = ottldatapoints.NewParser(functions, o.settings) @@ -209,7 +225,6 @@ func (pc *ParserCollection) ParseContextStatements(contextStatements []ContextSt continue } contexts[i] = scopeStatements(statements) - case Trace: statements, err := pc.traceParser.ParseStatements(s.Statements) if err != nil { @@ -217,7 +232,20 @@ func (pc *ParserCollection) ParseContextStatements(contextStatements []ContextSt continue } contexts[i] = traceStatements(statements) - + case SpanEvent: + statements, err := pc.spanEventParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = spanEventStatements(statements) + case Metric: + statements, err := pc.metricParser.ParseStatements(s.Statements) + if err != nil { + errors = multierr.Append(errors, err) + continue + } + contexts[i] = metricStatements(statements) case DataPoint: statements, err := pc.dataPointsParser.ParseStatements(s.Statements) if err != nil { @@ -225,7 +253,6 @@ func (pc *ParserCollection) ParseContextStatements(contextStatements []ContextSt continue } contexts[i] = dataPointStatements(statements) - case Log: statements, err := pc.logParser.ParseStatements(s.Statements) if err != nil { diff --git a/processor/transformprocessor/internal/common/traces.go b/processor/transformprocessor/internal/common/traces.go index d3b2a5471b7d..2f860ba03f48 100644 --- a/processor/transformprocessor/internal/common/traces.go +++ b/processor/transformprocessor/internal/common/traces.go @@ -16,6 +16,7 @@ package common // import "github.com/open-telemetry/opentelemetry-collector-cont import ( "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlspanevent" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottltraces" "go.opentelemetry.io/collector/pdata/ptrace" ) @@ -50,3 +51,34 @@ func (t traceStatements) ProcessTraces(td ptrace.Traces) error { } return nil } + +var _ Context = &spanEventStatements{} +var _ TracesContext = &spanEventStatements{} + +type spanEventStatements []*ottl.Statement[ottlspanevent.TransformContext] + +func (s spanEventStatements) isContext() {} + +func (s spanEventStatements) ProcessTraces(td ptrace.Traces) error { + for i := 0; i < td.ResourceSpans().Len(); i++ { + rspans := td.ResourceSpans().At(i) + for j := 0; j < rspans.ScopeSpans().Len(); j++ { + sspans := rspans.ScopeSpans().At(j) + spans := sspans.Spans() + for k := 0; k < spans.Len(); k++ { + span := spans.At(k) + spanEvents := span.Events() + for n := 0; n < spanEvents.Len(); n++ { + ctx := ottlspanevent.NewTransformContext(spanEvents.At(k), span, sspans.Scope(), rspans.Resource()) + for _, statement := range s { + _, _, err := statement.Execute(ctx) + if err != nil { + return err + } + } + } + } + } + } + return nil +} From 438ce15a1a1fa4ca6f4979ed38eec4d15c20aaa8 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Wed, 2 Nov 2022 12:12:01 -0600 Subject: [PATCH 12/22] revert logs processor example --- .../internal/logs/processor.go | 33 ++++++++++++------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/processor/transformprocessor/internal/logs/processor.go b/processor/transformprocessor/internal/logs/processor.go index ee0f62698682..c1ef7de43dda 100644 --- a/processor/transformprocessor/internal/logs/processor.go +++ b/processor/transformprocessor/internal/logs/processor.go @@ -16,34 +16,45 @@ package logs // import "github.com/open-telemetry/opentelemetry-collector-contri import ( "context" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/plog" + + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" ) type Processor struct { - contexts []common.Context + statements []*ottl.Statement[ottllogs.TransformContext] } -func NewProcessor(statements []common.ContextStatements, settings component.TelemetrySettings) (*Processor, error) { - pc := common.NewParserCollection(settings, common.WithLogParser(Functions())) - contexts, err := pc.ParseContextStatements(statements) +func NewProcessor(statements []string, settings component.TelemetrySettings) (*Processor, error) { + ottlp := ottllogs.NewParser(Functions(), settings) + parsedStatements, err := ottlp.ParseStatements(statements) if err != nil { return nil, err } return &Processor{ - contexts: contexts, + statements: parsedStatements, }, nil } func (p *Processor) ProcessLogs(_ context.Context, td plog.Logs) (plog.Logs, error) { - for _, contexts := range p.contexts { - lc, ok := contexts.(common.LogsContext) - if !ok { - // handle error + for i := 0; i < td.ResourceLogs().Len(); i++ { + rlogs := td.ResourceLogs().At(i) + for j := 0; j < rlogs.ScopeLogs().Len(); j++ { + slogs := rlogs.ScopeLogs().At(j) + logs := slogs.LogRecords() + for k := 0; k < logs.Len(); k++ { + ctx := ottllogs.NewTransformContext(logs.At(k), slogs.Scope(), rlogs.Resource()) + for _, statement := range p.statements { + _, _, err := statement.Execute(ctx) + if err != nil { + return td, err + } + } + } } - lc.ProcessLogs(td) } return td, nil } From 85045f522c0f51cbf6298ab7f3df172688e606ad Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Mon, 7 Nov 2022 20:19:48 -0700 Subject: [PATCH 13/22] Fix lint --- processor/transformprocessor/config.go | 4 ++-- processor/transformprocessor/internal/logs/processor.go | 2 +- processor/transformprocessor/internal/metrics/processor.go | 2 +- processor/transformprocessor/internal/traces/processor.go | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/processor/transformprocessor/config.go b/processor/transformprocessor/config.go index f4ecd4018c29..d1ec1fadc13a 100644 --- a/processor/transformprocessor/config.go +++ b/processor/transformprocessor/config.go @@ -16,15 +16,15 @@ package transformprocessor // import "github.com/open-telemetry/opentelemetry-co import ( "fmt" + "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/config" "go.uber.org/zap" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" - "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottldatapoints" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllogs" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottltraces" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/logs" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/metrics" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/traces" diff --git a/processor/transformprocessor/internal/logs/processor.go b/processor/transformprocessor/internal/logs/processor.go index b8107f27a610..3d578b8e484a 100644 --- a/processor/transformprocessor/internal/logs/processor.go +++ b/processor/transformprocessor/internal/logs/processor.go @@ -16,9 +16,9 @@ package logs // import "github.com/open-telemetry/opentelemetry-collector-contri import ( "context" - "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/pdata/plog" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" diff --git a/processor/transformprocessor/internal/metrics/processor.go b/processor/transformprocessor/internal/metrics/processor.go index c9c05be05e98..b8e4ae69e554 100644 --- a/processor/transformprocessor/internal/metrics/processor.go +++ b/processor/transformprocessor/internal/metrics/processor.go @@ -16,9 +16,9 @@ package metrics // import "github.com/open-telemetry/opentelemetry-collector-con import ( "context" - "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" diff --git a/processor/transformprocessor/internal/traces/processor.go b/processor/transformprocessor/internal/traces/processor.go index ce4063c7c37a..ac6af5cc1403 100644 --- a/processor/transformprocessor/internal/traces/processor.go +++ b/processor/transformprocessor/internal/traces/processor.go @@ -16,9 +16,9 @@ package traces // import "github.com/open-telemetry/opentelemetry-collector-cont import ( "context" - "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/component" + "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/pdata/ptrace" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" From 1a83c265c41a908fdc0ac56a5d069f12128463c5 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Mon, 7 Nov 2022 20:37:35 -0700 Subject: [PATCH 14/22] run make gotidy --- processor/transformprocessor/go.mod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processor/transformprocessor/go.mod b/processor/transformprocessor/go.mod index 78958a4d43e3..55ed2fc2ec08 100644 --- a/processor/transformprocessor/go.mod +++ b/processor/transformprocessor/go.mod @@ -7,7 +7,6 @@ require ( github.com/stretchr/testify v1.8.1 go.opentelemetry.io/collector v0.63.2-0.20221104003159-6b27644724d8 go.opentelemetry.io/collector/pdata v0.63.2-0.20221104003159-6b27644724d8 - go.uber.org/multierr v1.8.0 go.uber.org/zap v1.23.0 ) @@ -34,6 +33,7 @@ require ( go.opentelemetry.io/otel/metric v0.33.0 // indirect go.opentelemetry.io/otel/trace v1.11.1 // indirect go.uber.org/atomic v1.10.0 // indirect + go.uber.org/multierr v1.8.0 // indirect golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e // indirect golang.org/x/net v0.0.0-20220225172249-27dd8689420f // indirect golang.org/x/sys v0.0.0-20220919091848-fb04ddd9f9c8 // indirect From 335d124f81f910d6c2e6f4e791eefcfa1ed2ffbe Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Tue, 8 Nov 2022 09:29:54 -0700 Subject: [PATCH 15/22] fix impi --- processor/transformprocessor/config_test.go | 4 ++-- processor/transformprocessor/factory.go | 3 +-- processor/transformprocessor/factory_test.go | 4 ++-- processor/transformprocessor/internal/logs/processor_test.go | 4 ++-- .../transformprocessor/internal/metrics/processor_test.go | 4 ++-- .../transformprocessor/internal/traces/processor_test.go | 4 ++-- 6 files changed, 11 insertions(+), 12 deletions(-) diff --git a/processor/transformprocessor/config_test.go b/processor/transformprocessor/config_test.go index e30d73aa9305..9e019d421dc8 100644 --- a/processor/transformprocessor/config_test.go +++ b/processor/transformprocessor/config_test.go @@ -18,11 +18,11 @@ import ( "path/filepath" "testing" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" - "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/config" "go.opentelemetry.io/collector/confmap/confmaptest" + + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) func TestLoadConfig(t *testing.T) { diff --git a/processor/transformprocessor/factory.go b/processor/transformprocessor/factory.go index fefe3fce194c..f3f75e74dde0 100644 --- a/processor/transformprocessor/factory.go +++ b/processor/transformprocessor/factory.go @@ -18,13 +18,12 @@ import ( "context" "fmt" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" - "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/config" "go.opentelemetry.io/collector/consumer" "go.opentelemetry.io/collector/processor/processorhelper" + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/logs" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/metrics" "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/traces" diff --git a/processor/transformprocessor/factory_test.go b/processor/transformprocessor/factory_test.go index adbb18e851d9..3a8122fad6a1 100644 --- a/processor/transformprocessor/factory_test.go +++ b/processor/transformprocessor/factory_test.go @@ -18,8 +18,6 @@ import ( "context" "testing" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" - "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/config" @@ -28,6 +26,8 @@ import ( "go.opentelemetry.io/collector/pdata/plog" "go.opentelemetry.io/collector/pdata/pmetric" "go.opentelemetry.io/collector/pdata/ptrace" + + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) func TestFactory_Type(t *testing.T) { diff --git a/processor/transformprocessor/internal/logs/processor_test.go b/processor/transformprocessor/internal/logs/processor_test.go index 688220af7108..2b310e709f53 100644 --- a/processor/transformprocessor/internal/logs/processor_test.go +++ b/processor/transformprocessor/internal/logs/processor_test.go @@ -19,12 +19,12 @@ import ( "testing" "time" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" - "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" + + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) var ( diff --git a/processor/transformprocessor/internal/metrics/processor_test.go b/processor/transformprocessor/internal/metrics/processor_test.go index 570d57c86d78..ff1da001caa1 100644 --- a/processor/transformprocessor/internal/metrics/processor_test.go +++ b/processor/transformprocessor/internal/metrics/processor_test.go @@ -19,12 +19,12 @@ import ( "testing" "time" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" - "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" + + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) var ( diff --git a/processor/transformprocessor/internal/traces/processor_test.go b/processor/transformprocessor/internal/traces/processor_test.go index 5c106b8e413f..b47efe698d1d 100644 --- a/processor/transformprocessor/internal/traces/processor_test.go +++ b/processor/transformprocessor/internal/traces/processor_test.go @@ -19,12 +19,12 @@ import ( "testing" "time" - "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" - "github.com/stretchr/testify/assert" "go.opentelemetry.io/collector/component/componenttest" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/ptrace" + + "github.com/open-telemetry/opentelemetry-collector-contrib/processor/transformprocessor/internal/common" ) var ( From 4c1dd6b6a7aad2f76653da3a112bdfa24d1250df Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Tue, 8 Nov 2022 11:56:13 -0700 Subject: [PATCH 16/22] Update readme --- processor/transformprocessor/README.md | 150 +++++++++++++++++++------ 1 file changed, 115 insertions(+), 35 deletions(-) diff --git a/processor/transformprocessor/README.md b/processor/transformprocessor/README.md index 32be946782aa..30e19fff51a9 100644 --- a/processor/transformprocessor/README.md +++ b/processor/transformprocessor/README.md @@ -8,66 +8,146 @@ | Warnings | [Unsound Transformations, Identity Conflict, Orphaned Telemetry, Other](#warnings) | The transform processor modifies telemetry based on configuration using the [OpenTelemetry Transformation Language](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl). -The processor takes a list of statements for each signal type and executes the statements against the incoming telemetry in the order specified in the config. Each statement can access and transform telemetry using functions and allow the use of a condition to help decide whether the function should be executed. + +For each signal type, the processor takes a list of statements associated to a [Context type](#contexts) and executes the statements against the incoming telemetry in the order specified in the config. +Each statement can access and transform telemetry using functions and allow the use of a condition to help decide whether the function should be executed. ## Config -The transform processor allows configuring statements for traces, metrics, and logs. Each signal specifies a list of string statements that get passed to the OTTL for interpretation. +The transform processor allows configuring multiple context statements for traces, metrics, and logs. +The value of `context` specifies which [OTTL Context](#contexts) to use when interpreting the associated statements. +The statement strings, which must be OTTL compatible, will be passed to the OTTL and interpreted using the associated context. +Each context will be processed in the order specified and each statement for a context will be executed in the order specified. ```yaml transform: - : - statements: - - string - - string - - string + _statements: + - context: string + statements: + - string + - string + - string + - context: string + statements: + - string + - string + - string ``` +Proper use of contexts will provide increased performance and capabilities. See [Contexts](#contexts) for more details. + +Valid values for `context` are: + +| Signal | Context Values | +|-------------------|------------------------------------------------| +| trace_statements | `resrouce`, `scope`, `trace`, and `spanevent` | +| metric_statements | `resrouce`, `scope`, `metric`, and `datapoint` | +| log_statements | `resrouce`, `scope`, and `log` | + ## Example +The example takes advantage of context efficiency by grouping transformations with the context which it indents to transform. +See [Contexts](#contexts) for more details. + Example configuration: ```yaml transform: - traces: - statements: - - set(status.code, 1) where attributes["http.path"] == "/health" - - keep_keys(resource.attributes, ["service.name", "service.namespace", "cloud.region", "process.command_line"]) - - set(name, attributes["http.route"]) - - replace_match(attributes["http.target"], "/user/*/list/*", "/user/{userId}/list/{listId}") - - replace_pattern(resource.attributes["process.command_line"], "password\\=[^\\s]*(\\s?)", "password=***") - - limit(attributes, 100, []) - - limit(resource.attributes, 100, []) + trace_statements: + - context: resource + statements: + - keep_keys(attributes, ["service.name", "service.namespace", "cloud.region", "process.command_line"]) + - replace_pattern(attributes["process.command_line"], "password\\=[^\\s]*(\\s?)", "password=***") + - limit(attributes, 100, []) + - truncate_all(attributes, 4096) + - context: trace + statements: + - set(status.code, 1) where attributes["http.path"] == "/health" + - set(name, attributes["http.route"]) + - replace_match(attributes["http.target"], "/user/*/list/*", "/user/{userId}/list/{listId}") + - limit(attributes, 100, []) + - truncate_all(attributes, 4096) + + metric_statements: + - context: resource + statements: + - keep_keys(attributes, ["host.name"]) - truncate_all(attributes, 4096) - - truncate_all(resource.attributes, 4096) - metrics: - statements: - - set(metric.description, "Sum") where metric.type == "Sum" - - keep_keys(resource.attributes, ["host.name"]) - - limit(attributes, 100, ["host.name"]) - - truncate_all(attributes, 4096) - - truncate_all(resource.attributes, 4096) - - convert_sum_to_gauge() where metric.name == "system.processes.count" - - convert_gauge_to_sum("cumulative", false) where metric.name == "prometheus_metric" - logs: - statements: - - set(severity_text, "FAIL") where body == "request failed" - - replace_all_matches(attributes, "/user/*/list/*", "/user/{userId}/list/{listId}") - - replace_all_patterns(attributes, "/account/\\d{4}", "/account/{accountId}") - - set(body, attributes["http.route"]) - - keep_keys(resource.attributes, ["service.name", "service.namespace", "cloud.region"]) + - context: metric + statements: + - set(description, "Sum") where type == "Sum" + - context: datapoint + statements: + - limit(attributes, 100, ["host.name"]) + - truncate_all(attributes, 4096) + - convert_sum_to_gauge() where metric.name == "system.processes.count" + - convert_gauge_to_sum("cumulative", false) where metric.name == "prometheus_metric" + + log_statements: + - context: resource + statements: + - keep_keys(resource.attributes, ["service.name", "service.namespace", "cloud.region"]) + - context: log + statements: + - set(severity_text, "FAIL") where body == "request failed" + - replace_all_matches(attributes, "/user/*/list/*", "/user/{userId}/list/{listId}") + - replace_all_patterns(attributes, "/account/\\d{4}", "/account/{accountId}") + - set(body, attributes["http.route"]) ``` + ## Grammar You can learn more in-depth details on the capabilities and limitations of the OpenTelemetry Transformation Language used by the transform processor by reading about its [grammar](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl#grammar). ## Contexts -The transform processor utilizes the OTTL's standard contexts for Traces, Metrics and Logs. The contexts allow the OTTL to interact with the underlying telemetry data in its pdata form. +The transform processor utilizes the OTTL's contexts to transform Resource, Scope, Trace, SpanEvent, Metric, DataPoint, and Log telemetry. +The contexts allow the OTTL to interact with the underlying telemetry data in its pdata form. +- [Resource Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottlresource) +- [Scope Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottlscope) - [Traces Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottltraces) -- [Metrics Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottldatapoints) +- [SpanEvent Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottlspanevent) +- [Metric Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottlmetric) +- [DataPoint Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottldatapoints) - [Logs Context](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/pkg/ottl/contexts/ottllogs) +Each context allows transformation of its type of telemetry. +For example, statements associated to a `resource` context will be able to transform the resource's `attributes` and `dropped_attributes_count`. + +Contexts __NEVER__ supply access to individual items "lower" in the protobuf definition. +- This means statements associated to a `resource` __WILL NOT__ be able to access the underlying instrumentation scopes. +- This means statements associated to a `scope` __WILL NOT__ be able to access the underlying telemetry slices (spans, metrics, or logs). +- Similarly, statements associated to a `metric` __WILL NOT__ be able to access individual datapoints, but can access the entire datapoints slice. +- Similarly, statements associated to a `trace` __WILL NOT__ be able to access individual SpanEvents, but can access the entire SpanEvents slice. + +For practical purposes, this means that a context cannot make decisions on its telemetry based on telemetry "lower" in the structure. +For example, __the following context statement is not possible__ because it attempts to use individual datapoint attributes in the condition of a statements that is associated to a `metric` + +```yaml +metric_statements: +- context: metric + statements: + - set(description, "test passed") where datapoints.attributes["test"] == "pass" +``` + +Context __ALWAYS__ supply access to the items "higher" in the protobuf definition that are associated to the telemetry being transformed. +- This means that statements associated to a `datapoint` have access to a datapoint's metric, instrumentation scope, and resource. +- This means that statements associated to a `spanevent` have access to a spanevent's span, instrumentation scope, and resource. +- This means that statements associated to a `trace`/`metric`/`log` have access to the telemetry's instrumentation scope, and resource. +- This means that statements associated to a `scope` have access to the scope's resource. + +For example, __the following context statement is possible__ because `datapoint` statements can access the datapoint's metric. + +```yaml +metric_statements: +- context: datapoint + statements: + - set(metric.description, "test passed") where attributes["test"] == "pass" +``` + +Whenever possible, associate your statements to the context that the statement intend to transform. +Although you can modify resource attributes associated to a span using the `trace` context, it is more efficient to use the `resource` context. + ## Supported functions: Since the transform processor utilizes the OTTL's contexts for Traces, Metrics, and Logs, it is able to utilize functions that expect pdata in addition to any common functions. These common functions can be used for any signal. From 7cc39edd810c2176a78d5d6f7451e6c2e928274c Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Tue, 8 Nov 2022 15:10:40 -0700 Subject: [PATCH 17/22] add changelog entry --- .chloggen/tp-enhanced-contexts.yaml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100755 .chloggen/tp-enhanced-contexts.yaml diff --git a/.chloggen/tp-enhanced-contexts.yaml b/.chloggen/tp-enhanced-contexts.yaml new file mode 100755 index 000000000000..9444d6a6a5f6 --- /dev/null +++ b/.chloggen/tp-enhanced-contexts.yaml @@ -0,0 +1,17 @@ +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: processor/transform + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Adds new configuration options that allow specifying the OTTL context to use when executing statements. See [Transform Processor README](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/transformprocessor#config) for more details. + +# One or more tracking issues related to the change +issues: [15381] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: | + The existing configuration options will be deprecated in a future release. From c3f163b6cab903642374ab4b5f35a2757f2a58c3 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Wed, 9 Nov 2022 11:04:39 -0700 Subject: [PATCH 18/22] Update processor/transformprocessor/README.md Co-authored-by: Daniel Jaglowski --- processor/transformprocessor/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/processor/transformprocessor/README.md b/processor/transformprocessor/README.md index 30e19fff51a9..6b791aea2a28 100644 --- a/processor/transformprocessor/README.md +++ b/processor/transformprocessor/README.md @@ -40,9 +40,9 @@ Valid values for `context` are: | Signal | Context Values | |-------------------|------------------------------------------------| -| trace_statements | `resrouce`, `scope`, `trace`, and `spanevent` | -| metric_statements | `resrouce`, `scope`, `metric`, and `datapoint` | -| log_statements | `resrouce`, `scope`, and `log` | +| trace_statements | `resource`, `scope`, `trace`, and `spanevent` | +| metric_statements | `resource`, `scope`, `metric`, and `datapoint` | +| log_statements | `resource`, `scope`, and `log` | ## Example From ad395096683c1e85d16f587f02a02744e514626a Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Wed, 9 Nov 2022 11:16:38 -0700 Subject: [PATCH 19/22] Add test cases that reuse context --- .../internal/logs/processor_test.go | 28 ++++++++++++++++ .../internal/metrics/processor_test.go | 33 +++++++++++++++++++ .../internal/traces/processor_test.go | 28 ++++++++++++++++ 3 files changed, 89 insertions(+) diff --git a/processor/transformprocessor/internal/logs/processor_test.go b/processor/transformprocessor/internal/logs/processor_test.go index 2b310e709f53..3a1e87754b21 100644 --- a/processor/transformprocessor/internal/logs/processor_test.go +++ b/processor/transformprocessor/internal/logs/processor_test.go @@ -354,6 +354,34 @@ func Test_ProcessLogs_MixContext(t *testing.T) { td.ResourceLogs().At(0).ScopeLogs().At(0).Scope().Attributes().PutStr("test", "pass") }, }, + { + name: "reuse context", + contextStatments: []common.ContextStatements{ + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "log", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "fail")`, + }, + }, + }, + want: func(td plog.Logs) { + td.ResourceLogs().At(0).ScopeLogs().At(0).Scope().Attributes().PutStr("test", "fail") + td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0).Attributes().PutStr("test", "pass") + td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(1).Attributes().PutStr("test", "pass") + }, + }, } for _, tt := range tests { diff --git a/processor/transformprocessor/internal/metrics/processor_test.go b/processor/transformprocessor/internal/metrics/processor_test.go index ff1da001caa1..5bd6119fdb66 100644 --- a/processor/transformprocessor/internal/metrics/processor_test.go +++ b/processor/transformprocessor/internal/metrics/processor_test.go @@ -537,6 +537,39 @@ func Test_ProcessMetrics_MixContext(t *testing.T) { td.ResourceMetrics().At(0).ScopeMetrics().At(0).Scope().Attributes().PutStr("test", "pass") }, }, + { + name: "reuse context ", + contextStatments: []common.ContextStatements{ + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "datapoint", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "fail")`, + }, + }, + }, + want: func(td pmetric.Metrics) { + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Scope().Attributes().PutStr("test", "fail") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(1).Histogram().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(1).Histogram().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(2).ExponentialHistogram().DataPoints().At(0).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(2).ExponentialHistogram().DataPoints().At(1).Attributes().PutStr("test", "pass") + td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(3).Summary().DataPoints().At(0).Attributes().PutStr("test", "pass") + }, + }, } for _, tt := range tests { diff --git a/processor/transformprocessor/internal/traces/processor_test.go b/processor/transformprocessor/internal/traces/processor_test.go index b47efe698d1d..32e502787068 100644 --- a/processor/transformprocessor/internal/traces/processor_test.go +++ b/processor/transformprocessor/internal/traces/processor_test.go @@ -401,6 +401,34 @@ func Test_ProcessTraces_MixContext(t *testing.T) { td.ResourceSpans().At(0).ScopeSpans().At(0).Scope().Attributes().PutStr("test", "pass") }, }, + { + name: "reuse context", + contextStatments: []common.ContextStatements{ + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "pass")`, + }, + }, + { + Context: "trace", + Statements: []string{ + `set(attributes["test"], "pass") where instrumentation_scope.attributes["test"] == "pass"`, + }, + }, + { + Context: "scope", + Statements: []string{ + `set(attributes["test"], "fail")`, + }, + }, + }, + want: func(td ptrace.Traces) { + td.ResourceSpans().At(0).ScopeSpans().At(0).Scope().Attributes().PutStr("test", "fail") + td.ResourceSpans().At(0).ScopeSpans().At(0).Spans().At(0).Attributes().PutStr("test", "pass") + td.ResourceSpans().At(0).ScopeSpans().At(0).Spans().At(1).Attributes().PutStr("test", "pass") + }, + }, } for _, tt := range tests { From 6e7426a238f68f780135ed1577c5d81f964d3c69 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Wed, 9 Nov 2022 14:32:58 -0700 Subject: [PATCH 20/22] Update processor/transformprocessor/README.md Co-authored-by: Evan Bradley --- processor/transformprocessor/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processor/transformprocessor/README.md b/processor/transformprocessor/README.md index 6b791aea2a28..57c81515cafc 100644 --- a/processor/transformprocessor/README.md +++ b/processor/transformprocessor/README.md @@ -46,7 +46,7 @@ Valid values for `context` are: ## Example -The example takes advantage of context efficiency by grouping transformations with the context which it indents to transform. +The example takes advantage of context efficiency by grouping transformations with the context which it intends to transform. See [Contexts](#contexts) for more details. Example configuration: From a939062f384660c1c5d77435fa183d413fb60541 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Wed, 9 Nov 2022 15:28:59 -0700 Subject: [PATCH 21/22] Apply feedback --- processor/transformprocessor/README.md | 1 + processor/transformprocessor/testdata/config.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/processor/transformprocessor/README.md b/processor/transformprocessor/README.md index 57c81515cafc..f8b5822b9922 100644 --- a/processor/transformprocessor/README.md +++ b/processor/transformprocessor/README.md @@ -147,6 +147,7 @@ metric_statements: Whenever possible, associate your statements to the context that the statement intend to transform. Although you can modify resource attributes associated to a span using the `trace` context, it is more efficient to use the `resource` context. +This is because modifying resource attributes at the `trace` context means modifying the resource once per span associated to the resource. ## Supported functions: diff --git a/processor/transformprocessor/testdata/config.yaml b/processor/transformprocessor/testdata/config.yaml index 53db5f752e7f..ba38bcf0a157 100644 --- a/processor/transformprocessor/testdata/config.yaml +++ b/processor/transformprocessor/testdata/config.yaml @@ -95,4 +95,4 @@ transform/unknown_context: trace_statements: - context: test statements: - - set(name, "bear") where attributes["http.path"] == "/animal" \ No newline at end of file + - set(name, "bear") where attributes["http.path"] == "/animal" From 1ad3e0439bbea8a7572443d38801726263d8e555 Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Thu, 10 Nov 2022 08:30:09 -0700 Subject: [PATCH 22/22] Update processor/transformprocessor/README.md Co-authored-by: Kent Quirk --- processor/transformprocessor/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processor/transformprocessor/README.md b/processor/transformprocessor/README.md index f8b5822b9922..4b0dcbf97640 100644 --- a/processor/transformprocessor/README.md +++ b/processor/transformprocessor/README.md @@ -147,7 +147,7 @@ metric_statements: Whenever possible, associate your statements to the context that the statement intend to transform. Although you can modify resource attributes associated to a span using the `trace` context, it is more efficient to use the `resource` context. -This is because modifying resource attributes at the `trace` context means modifying the resource once per span associated to the resource. +This is because contexts are nested: the efficiency comes because higher-level contexts can avoid iterating through any of the contexts at a lower level. ## Supported functions: