From beea276d93e7cb735367a06f6a1f3ea5477136a2 Mon Sep 17 00:00:00 2001 From: Bogdan Date: Thu, 18 Aug 2022 14:28:38 -0700 Subject: [PATCH] Change pdata generated types to use type definition instead of aliases Change the way how objects are generated in `pdata`. Previously the pdata objects were generated in the `internal` package and aliases were created in the public packageas. This PR changes this, by creating only "wrappers" object inside the internal package, and the public type is a type def of the internal type: ```golang package internal type LogRecord struct { orig *otlplogs.LogRecord } func GetOrigLogRecord(ms LogRecord) *otlplogs.LogRecord { return ms.orig } func NewLogRecord(orig *otlplogs.LogRecord) LogRecord { return LogRecord{orig: orig} } ``` ```golang package plog type LogRecord internal.LogRecord ``` With this approach, we still do not allow users access to the internal origin, which allows us flexibility to change to other representation (something like lazy proto, etc), but improves documentation of the pdata packages, see [current documentation](https://pkg.go.dev/go.opentelemetry.io/collector/pdata@v0.58.0/plog). Signed-off-by: Bogdan --- CHANGELOG.md | 2 + .../loggingexporter/internal/otlptext/logs.go | 2 +- .../cmd/pdatagen/internal/base_fields.go | 319 ++- .../cmd/pdatagen/internal/base_slices.go | 319 ++- .../cmd/pdatagen/internal/base_structs.go | 76 +- .../cmd/pdatagen/internal/common_structs.go | 67 +- pdata/internal/cmd/pdatagen/internal/files.go | 99 +- .../internal/immutable_slice_structs.go | 89 +- .../cmd/pdatagen/internal/log_structs.go | 26 +- .../cmd/pdatagen/internal/metrics_structs.go | 92 +- .../cmd/pdatagen/internal/resource_structs.go | 7 +- .../cmd/pdatagen/internal/trace_structs.go | 60 +- pdata/internal/cmd/pdatagen/main.go | 27 +- pdata/internal/generated_immutable_slice.go | 126 - pdata/internal/generated_plog_test.go | 615 ----- pdata/internal/generated_pmetric_test.go | 2242 ----------------- pdata/internal/generated_ptrace_test.go | 1090 -------- pdata/internal/generated_wrapper_common.go | 74 + .../generated_wrapper_immutable_slice.go | 54 + pdata/internal/generated_wrapper_logs.go | 186 ++ pdata/internal/generated_wrapper_metrics.go | 661 +++++ .../generated_wrapper_resource.go} | 38 +- pdata/internal/generated_wrapper_traces.go | 322 +++ pdata/internal/wrapper_common.go | 66 + pdata/internal/wrapper_logs.go | 69 + pdata/internal/wrapper_metrics.go | 69 + .../wrapper_span_id.go} | 23 +- .../wrapper_trace_id.go} | 23 +- pdata/internal/wrapper_traces.go | 46 + pdata/pcommon/alias.go | 92 - pdata/{internal => pcommon}/common.go | 223 +- pdata/{internal => pcommon}/common_test.go | 110 +- .../{internal => pcommon}/generated_common.go | 84 +- pdata/pcommon/generated_common_alias.go | 45 - .../generated_common_test.go | 130 +- pdata/pcommon/generated_immutable_slice.go | 137 + .../generated_immutable_slice_alias.go | 38 - .../generated_immutable_slice_test.go | 8 +- .../generated_resource.go | 24 +- .../generated_resource_test.go | 41 +- pdata/{internal => pcommon}/spanid.go | 25 +- pdata/{internal => pcommon}/spanid_test.go | 2 +- pdata/{internal => pcommon}/timestamp.go | 2 +- pdata/pcommon/timestamp_alias.go | 24 - pdata/{internal => pcommon}/timestamp_test.go | 2 +- pdata/{internal => pcommon}/traceid.go | 25 +- pdata/{internal => pcommon}/traceid_test.go | 2 +- pdata/plog/alias.go | 134 - pdata/plog/generated_alias.go | 95 - .../generated_logs.go} | 304 +-- pdata/plog/generated_logs_test.go | 526 ++++ pdata/plog/json.go | 4 +- pdata/{internal => plog}/logs.go | 160 +- pdata/{internal => plog}/logs_test.go | 48 +- pdata/plog/pb.go | 6 +- pdata/plog/plogotlp/logs.go | 6 +- pdata/pmetric/alias.go | 82 - pdata/pmetric/generated_alias.go | 325 --- .../generated_metrics.go} | 1099 ++++---- pdata/pmetric/generated_metrics_test.go | 1855 ++++++++++++++ pdata/pmetric/json.go | 5 +- pdata/pmetric/json_test.go | 79 +- pdata/{internal => pmetric}/metrics.go | 93 +- pdata/{internal => pmetric}/metrics_test.go | 131 +- pdata/pmetric/pb.go | 6 +- pdata/pmetric/pmetricotlp/metrics.go | 6 +- pdata/ptrace/alias.go | 80 - pdata/ptrace/generated_alias.go | 162 -- .../generated_traces.go} | 544 ++-- pdata/ptrace/generated_traces_test.go | 920 +++++++ pdata/ptrace/json.go | 4 +- pdata/ptrace/json_test.go | 30 +- pdata/ptrace/pb.go | 6 +- pdata/ptrace/ptraceotlp/traces.go | 6 +- pdata/{internal => ptrace}/traces.go | 56 +- pdata/{internal => ptrace}/traces_test.go | 36 +- 76 files changed, 7413 insertions(+), 7298 deletions(-) delete mode 100644 pdata/internal/generated_immutable_slice.go delete mode 100644 pdata/internal/generated_plog_test.go delete mode 100644 pdata/internal/generated_pmetric_test.go delete mode 100644 pdata/internal/generated_ptrace_test.go create mode 100644 pdata/internal/generated_wrapper_common.go create mode 100644 pdata/internal/generated_wrapper_immutable_slice.go create mode 100644 pdata/internal/generated_wrapper_logs.go create mode 100644 pdata/internal/generated_wrapper_metrics.go rename pdata/{pcommon/generated_resource_alias.go => internal/generated_wrapper_resource.go} (55%) create mode 100644 pdata/internal/generated_wrapper_traces.go create mode 100644 pdata/internal/wrapper_common.go create mode 100644 pdata/internal/wrapper_logs.go create mode 100644 pdata/internal/wrapper_metrics.go rename pdata/{pcommon/spanid_alias.go => internal/wrapper_span_id.go} (55%) rename pdata/{pcommon/traceid_alias.go => internal/wrapper_trace_id.go} (55%) create mode 100644 pdata/internal/wrapper_traces.go delete mode 100644 pdata/pcommon/alias.go rename pdata/{internal => pcommon}/common.go (81%) rename pdata/{internal => pcommon}/common_test.go (94%) rename pdata/{internal => pcommon}/generated_common.go (76%) delete mode 100644 pdata/pcommon/generated_common_alias.go rename pdata/{internal => pcommon}/generated_common_test.go (52%) create mode 100644 pdata/pcommon/generated_immutable_slice.go delete mode 100644 pdata/pcommon/generated_immutable_slice_alias.go rename pdata/{internal => pcommon}/generated_immutable_slice_test.go (94%) rename pdata/{internal => pcommon}/generated_resource.go (83%) rename pdata/{internal => pcommon}/generated_resource_test.go (57%) rename pdata/{internal => pcommon}/spanid.go (67%) rename pdata/{internal => pcommon}/spanid_test.go (98%) rename pdata/{internal => pcommon}/timestamp.go (94%) delete mode 100644 pdata/pcommon/timestamp_alias.go rename pdata/{internal => pcommon}/timestamp_test.go (98%) rename pdata/{internal => pcommon}/traceid.go (67%) rename pdata/{internal => pcommon}/traceid_test.go (98%) delete mode 100644 pdata/plog/alias.go delete mode 100644 pdata/plog/generated_alias.go rename pdata/{internal/generated_plog.go => plog/generated_logs.go} (70%) create mode 100644 pdata/plog/generated_logs_test.go rename pdata/{internal => plog}/logs.go (60%) rename pdata/{internal => plog}/logs_test.go (79%) delete mode 100644 pdata/pmetric/alias.go delete mode 100644 pdata/pmetric/generated_alias.go rename pdata/{internal/generated_pmetric.go => pmetric/generated_metrics.go} (72%) create mode 100644 pdata/pmetric/generated_metrics_test.go rename pdata/{internal => pmetric}/metrics.go (76%) rename pdata/{internal => pmetric}/metrics_test.go (89%) delete mode 100644 pdata/ptrace/alias.go delete mode 100644 pdata/ptrace/generated_alias.go rename pdata/{internal/generated_ptrace.go => ptrace/generated_traces.go} (71%) create mode 100644 pdata/ptrace/generated_traces_test.go rename pdata/{internal => ptrace}/traces.go (73%) rename pdata/{internal => ptrace}/traces_test.go (77%) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e4a4e921d3..867c1249b45 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -41,6 +41,8 @@ - Remove unnecessary duplicate code and allocations for reading enums in JSON. (#5928) - Add "dist.build_tags" configuration option to support passing go build flags to builder. (#5659) - Add an AsRaw func on the flags, lots of places to encode these flags. (#5934) +- Change pdata generated types to use type definition instead of aliases. (#5936) + - Improves documentation, and makes code easier to read/understand. ### 🧰 Bug fixes 🧰 diff --git a/exporter/loggingexporter/internal/otlptext/logs.go b/exporter/loggingexporter/internal/otlptext/logs.go index 9c12bb3101c..2f77a740427 100644 --- a/exporter/loggingexporter/internal/otlptext/logs.go +++ b/exporter/loggingexporter/internal/otlptext/logs.go @@ -52,7 +52,7 @@ func (textLogsMarshaler) MarshalLogs(ld plog.Logs) ([]byte, error) { buf.logAttributes("Attributes", lr.Attributes()) buf.logEntry("Trace ID: %s", lr.TraceID().HexString()) buf.logEntry("Span ID: %s", lr.SpanID().HexString()) - buf.logEntry("Flags: %d", lr.Flags()) + buf.logEntry("Flags: %d", lr.FlagsStruct().AsRaw()) } } } diff --git a/pdata/internal/cmd/pdatagen/internal/base_fields.go b/pdata/internal/cmd/pdatagen/internal/base_fields.go index f9933758a1a..c2e5f729c0a 100644 --- a/pdata/internal/cmd/pdatagen/internal/base_fields.go +++ b/pdata/internal/cmd/pdatagen/internal/base_fields.go @@ -20,66 +20,66 @@ import ( ) const accessorSliceTemplate = `// ${fieldName} returns the ${originFieldName} associated with this ${structName}. -func (ms ${structName}) ${fieldName}() ${returnType} { - return new${returnType}(&ms.orig.${originFieldName}) +func (ms ${structName}) ${fieldName}() ${packageName}${returnType} { + return ${packageName}${returnType}(internal.New${returnType}(&ms.getOrig().${originFieldName})) }` const accessorsSliceTestTemplate = `func Test${structName}_${fieldName}(t *testing.T) { ms := New${structName}() - assert.EqualValues(t, New${returnType}(), ms.${fieldName}()) - fillTest${returnType}(ms.${fieldName}()) - testVal${fieldName} := generateTest${returnType}() - assert.EqualValues(t, testVal${fieldName}, ms.${fieldName}()) + assert.Equal(t, ${packageName}New${returnType}(), ms.${fieldName}()) + internal.FillTest${returnType}(internal.${returnType}(ms.${fieldName}())) + assert.Equal(t, ${packageName}${returnType}(internal.GenerateTest${returnType}()), ms.${fieldName}()) }` const accessorsMessageValueTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) ${fieldName}() ${returnType} { - return new${returnType}(&ms.orig.${originFieldName}) +func (ms ${structName}) ${fieldName}() ${packageName}${returnType} { + return ${packageName}${returnType}(internal.New${returnType}(&ms.getOrig().${originFieldName})) }` const accessorsMessageValueTestTemplate = `func Test${structName}_${fieldName}(t *testing.T) { ms := New${structName}() - fillTest${returnType}(ms.${fieldName}()) - assert.EqualValues(t, generateTest${returnType}(), ms.${fieldName}()) + internal.FillTest${returnType}(internal.${returnType}(ms.${fieldName}())) + assert.Equal(t, ${packageName}${returnType}(internal.GenerateTest${returnType}()), ms.${fieldName}()) }` const accessorsPrimitiveTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) ${fieldName}() ${returnType} { - return ms.orig.${originFieldName} +func (ms ${structName}) ${fieldName}() ${packageName}${returnType} { + return ms.getOrig().${originFieldName} } // Set${fieldName} replaces the ${lowerFieldName} associated with this ${structName}. func (ms ${structName}) Set${fieldName}(v ${returnType}) { - ms.orig.${originFieldName} = v + ms.getOrig().${originFieldName} = v }` -const copyToPrimitiveSliceTestTemplate = ` if len(ms.orig.${originFieldName}) == 0 { - dest.orig.${originFieldName} = nil +const copyToPrimitiveSliceTestTemplate = ` if len(ms.getOrig().${originFieldName}) == 0 { + dest.getOrig().${originFieldName} = nil } else { - dest.orig.${originFieldName} = make(${rawType}, len(ms.orig.${originFieldName})) - copy(dest.orig.${originFieldName}, ms.orig.${originFieldName}) + dest.getOrig().${originFieldName} = make(${rawType}, len(ms.getOrig().${originFieldName})) + copy(dest.getOrig().${originFieldName}, ms.getOrig().${originFieldName}) } ` const accessorsPrimitiveSliceTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) ${fieldName}() ${returnType} { - return ${returnType}{value: ms.orig.${originFieldName}} +func (ms ${structName}) ${fieldName}() ${packageName}${returnType} { + return ${packageName}${returnType}(internal.New${returnType}(ms.getOrig().${originFieldName})) } // Set${fieldName} replaces the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) Set${fieldName}(v ${returnType}) { - ms.orig.${originFieldName} = v.value +func (ms ${structName}) Set${fieldName}(v ${packageName}${returnType}) { + ms.getOrig().${originFieldName} = internal.GetOrig${returnType}(internal.${returnType}(v)) }` const oneOfTypeAccessorHeaderTemplate = `// ${originFieldName}Type returns the type of the ${lowerOriginFieldName} for this ${structName}. // Calling this function on zero-initialized ${structName} will cause a panic. func (ms ${structName}) ${originFieldName}Type() ${typeName} { - switch ms.orig.${originFieldName}.(type) {` + switch ms.getOrig().${originFieldName}.(type) {` -const oneOfTypeAccessorHeaderTestTemplate = `func Test${structName}${originFieldName}Type(t *testing.T) { +const oneOfTypeAccessorHeaderTestTemplate = `func Test${structName}_${originFieldName}Type(t *testing.T) { tv := New${structName}() assert.Equal(t, ${typeName}None, tv.${originFieldName}Type()) - assert.Equal(t, "", ${typeName}(1000).String())` +} +` const accessorsOneOfMessageTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. // @@ -88,7 +88,7 @@ const accessorsOneOfMessageTemplate = `// ${fieldName} returns the ${lowerFieldN // // Calling this function on zero-initialized ${structName} will cause a panic. func (ms ${structName}) ${fieldName}() ${returnType} { - v, ok := ms.orig.Get${originOneOfFieldName}().(*${originStructType}) + v, ok := ms.getOrig().Get${originOneOfFieldName}().(*${originStructType}) if !ok { return ${returnType}{} } @@ -98,17 +98,18 @@ func (ms ${structName}) ${fieldName}() ${returnType} { const accessorsOneOfMessageTestTemplate = `func Test${structName}_${fieldName}(t *testing.T) { ms := New${structName}() ms.Set${originOneOfFieldName}Type(${typeName}) - fillTest${returnType}(ms.${fieldName}()) - assert.EqualValues(t, generateTest${returnType}(), ms.${fieldName}()) + assert.Equal(t, ${typeName}, ms.${originOneOfFieldName}Type()) + internal.FillTest${returnType}(internal.${returnType}(ms.${fieldName}())) + assert.Equal(t, ${returnType}(internal.GenerateTest${returnType}()), ms.${fieldName}()) } func Test${structName}_CopyTo_${fieldName}(t *testing.T) { ms := New${structName}() ms.Set${originOneOfFieldName}Type(${typeName}) - fillTest${returnType}(ms.${fieldName}()) + internal.FillTest${returnType}(internal.${returnType}(ms.${fieldName}())) dest := New${structName}() ms.CopyTo(dest) - assert.EqualValues(t, ms, dest) + assert.Equal(t, ms, dest) }` const copyToValueOneOfMessageTemplate = ` case ${typeName}: @@ -117,56 +118,79 @@ const copyToValueOneOfMessageTemplate = ` case ${typeName}: const accessorsOneOfPrimitiveTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. func (ms ${structName}) ${fieldName}() ${returnType} { - return ms.orig.Get${originFieldName}() + return ms.getOrig().Get${originFieldName}() } // Set${fieldName} replaces the ${lowerFieldName} associated with this ${structName}. func (ms ${structName}) Set${fieldName}(v ${returnType}) { - ms.orig.${originOneOfFieldName} = &${originStructType}{ + ms.getOrig().${originOneOfFieldName} = &${originStructType}{ ${originFieldName}: v, } }` +const accessorsOneOfPrimitiveTestTemplate = `func Test${structName}_${fieldName}(t *testing.T) { + ms := New${structName}() + assert.Equal(t, ${defaultVal}, ms.${fieldName}()) + ms.Set${fieldName}(${testValue}) + assert.Equal(t, ${testValue}, ms.${fieldName}()) + assert.Equal(t, ${typeName}, ms.${originOneOfFieldName}Type()) +}` + const accessorsPrimitiveTestTemplate = `func Test${structName}_${fieldName}(t *testing.T) { ms := New${structName}() - assert.EqualValues(t, ${defaultVal}, ms.${fieldName}()) - testVal${fieldName} := ${testValue} - ms.Set${fieldName}(testVal${fieldName}) - assert.EqualValues(t, testVal${fieldName}, ms.${fieldName}()) + assert.Equal(t, ${defaultVal}, ms.${fieldName}()) + ms.Set${fieldName}(${testValue}) + assert.Equal(t, ${testValue}, ms.${fieldName}()) }` const accessorsPrimitiveTypedTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) ${fieldName}() ${returnType} { - return ${returnType}(ms.orig.${originFieldName}) +func (ms ${structName}) ${fieldName}() ${packageName}${returnType} { + return ${packageName}${returnType}(ms.getOrig().${originFieldName}) } // Set${fieldName} replaces the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) Set${fieldName}(v ${returnType}) { - ms.orig.${originFieldName} = ${rawType}(v) +func (ms ${structName}) Set${fieldName}(v ${packageName}${returnType}) { + ms.getOrig().${originFieldName} = ${rawType}(v) +}` + +const accessorsPrimitiveTypedTestTemplate = `func Test${structName}_${fieldName}(t *testing.T) { + ms := New${structName}() + assert.Equal(t, ${packageName}${returnType}(${defaultVal}), ms.${fieldName}()) + testVal${fieldName} := ${packageName}${returnType}(${testValue}) + ms.Set${fieldName}(testVal${fieldName}) + assert.Equal(t, testVal${fieldName}, ms.${fieldName}()) }` const accessorsPrimitiveStructTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) ${fieldName}() ${returnType} { - return ${returnType}{orig: (ms.orig.${originFieldName})} +func (ms ${structName}) ${fieldName}() ${packageName}${returnType} { + return ${packageName}${returnType}(internal.New${returnType}(ms.getOrig().${originFieldName})) } // Set${fieldName} replaces the ${lowerFieldName} associated with this ${structName}. -func (ms ${structName}) Set${fieldName}(v ${returnType}) { - ms.orig.${originFieldName} = v.orig +func (ms ${structName}) Set${fieldName}(v ${packageName}${returnType}) { + ms.getOrig().${originFieldName} = internal.GetOrig${returnType}(internal.${returnType}(v)) +}` + +const accessorsPrimitiveStructTestTemplate = `func Test${structName}_${fieldName}(t *testing.T) { + ms := New${structName}() + assert.Equal(t, ${packageName}${returnType}(internal.New${returnType}(${defaultVal})), ms.${fieldName}()) + testVal${fieldName} := ${packageName}${returnType}(internal.New${returnType}(${testValue})) + ms.Set${fieldName}(testVal${fieldName}) + assert.Equal(t, testVal${fieldName}, ms.${fieldName}()) }` const accessorsOptionalPrimitiveValueTemplate = `// ${fieldName} returns the ${lowerFieldName} associated with this ${structName}. func (ms ${structName}) ${fieldName}() ${returnType} { - return ms.orig.Get${fieldName}() + return ms.getOrig().Get${fieldName}() } // Has${fieldName} returns true if the ${structName} contains a // ${fieldName} value, false otherwise. func (ms ${structName}) Has${fieldName}() bool { - return ms.orig.${fieldName}_ != nil + return ms.getOrig().${fieldName}_ != nil } // Set${fieldName} replaces the ${lowerFieldName} associated with this ${structName}. func (ms ${structName}) Set${fieldName}(v ${returnType}) { - ms.orig.${fieldName}_ = &${originStructType}{${fieldName}: v} + ms.getOrig().${fieldName}_ = &${originStructType}{${fieldName}: v} }` type baseField interface { @@ -176,7 +200,7 @@ type baseField interface { generateSetWithTestValue(sb *strings.Builder) - generateCopyToValue(sb *strings.Builder) + generateCopyToValue(ms baseStruct, sb *strings.Builder) } type sliceField struct { @@ -192,6 +216,11 @@ func (sf *sliceField) generateAccessors(ms baseStruct, sb *strings.Builder) { return ms.getName() case "fieldName": return sf.fieldName + case "packageName": + if sf.returnSlice.getPackageName() != ms.getPackageName() { + return sf.returnSlice.getPackageName() + "." + } + return "" case "returnType": return sf.returnSlice.getName() case "originFieldName": @@ -209,6 +238,11 @@ func (sf *sliceField) generateAccessorsTest(ms baseStruct, sb *strings.Builder) return ms.getName() case "fieldName": return sf.fieldName + case "packageName": + if sf.returnSlice.getPackageName() != ms.getPackageName() { + return sf.returnSlice.getPackageName() + "." + } + return "" case "returnType": return sf.returnSlice.getName() default: @@ -218,10 +252,10 @@ func (sf *sliceField) generateAccessorsTest(ms baseStruct, sb *strings.Builder) } func (sf *sliceField) generateSetWithTestValue(sb *strings.Builder) { - sb.WriteString("\tfillTest" + sf.returnSlice.getName() + "(tv." + sf.fieldName + "())") + sb.WriteString("\tFillTest" + sf.returnSlice.getName() + "(New" + sf.returnSlice.getName() + "(&tv.orig." + sf.originFieldName + "))") } -func (sf *sliceField) generateCopyToValue(sb *strings.Builder) { +func (sf *sliceField) generateCopyToValue(_ baseStruct, sb *strings.Builder) { sb.WriteString("\tms." + sf.fieldName + "().CopyTo(dest." + sf.fieldName + "())") } @@ -230,7 +264,7 @@ var _ baseField = (*sliceField)(nil) type messageValueField struct { fieldName string originFieldName string - returnMessage *messageValueStruct + returnMessage baseStruct } func (mf *messageValueField) generateAccessors(ms baseStruct, sb *strings.Builder) { @@ -242,8 +276,13 @@ func (mf *messageValueField) generateAccessors(ms baseStruct, sb *strings.Builde return mf.fieldName case "lowerFieldName": return strings.ToLower(mf.fieldName) + case "packageName": + if mf.returnMessage.getPackageName() != ms.getPackageName() { + return mf.returnMessage.getPackageName() + "." + } + return "" case "returnType": - return mf.returnMessage.structName + return mf.returnMessage.getName() case "originFieldName": return mf.originFieldName default: @@ -260,7 +299,12 @@ func (mf *messageValueField) generateAccessorsTest(ms baseStruct, sb *strings.Bu case "fieldName": return mf.fieldName case "returnType": - return mf.returnMessage.structName + return mf.returnMessage.getName() + case "packageName": + if mf.returnMessage.getPackageName() != ms.getPackageName() { + return mf.returnMessage.getPackageName() + "." + } + return "" default: panic(name) } @@ -268,10 +312,10 @@ func (mf *messageValueField) generateAccessorsTest(ms baseStruct, sb *strings.Bu } func (mf *messageValueField) generateSetWithTestValue(sb *strings.Builder) { - sb.WriteString("\tfillTest" + mf.returnMessage.structName + "(tv." + mf.fieldName + "())") + sb.WriteString("\tFillTest" + mf.returnMessage.getName() + "(New" + mf.returnMessage.getName() + "(&tv.orig." + mf.originFieldName + "))") } -func (mf *messageValueField) generateCopyToValue(sb *strings.Builder) { +func (mf *messageValueField) generateCopyToValue(_ baseStruct, sb *strings.Builder) { sb.WriteString("\tms." + mf.fieldName + "().CopyTo(dest." + mf.fieldName + "())") } @@ -290,6 +334,8 @@ func (pf *primitiveField) generateAccessors(ms baseStruct, sb *strings.Builder) switch name { case "structName": return ms.getName() + case "packageName": + return "" case "fieldName": return pf.fieldName case "lowerFieldName": @@ -309,6 +355,8 @@ func (pf *primitiveField) generateAccessorsTest(ms baseStruct, sb *strings.Build switch name { case "structName": return ms.getName() + case "packageName": + return "" case "defaultVal": return pf.defaultVal case "fieldName": @@ -322,23 +370,28 @@ func (pf *primitiveField) generateAccessorsTest(ms baseStruct, sb *strings.Build } func (pf *primitiveField) generateSetWithTestValue(sb *strings.Builder) { - sb.WriteString("\ttv.Set" + pf.fieldName + "(" + pf.testVal + ")") + sb.WriteString("\ttv.orig." + pf.originFieldName + " = " + pf.testVal) } -func (pf *primitiveField) generateCopyToValue(sb *strings.Builder) { +func (pf *primitiveField) generateCopyToValue(_ baseStruct, sb *strings.Builder) { sb.WriteString("\tdest.Set" + pf.fieldName + "(ms." + pf.fieldName + "())") } var _ baseField = (*primitiveField)(nil) +type primitiveType struct { + structName string + packageName string + rawType string + defaultVal string + testVal string +} + // Types that has defined a custom type (e.g. "type Timestamp uint64") type primitiveTypedField struct { fieldName string originFieldName string - returnType string - defaultVal string - testVal string - rawType string + returnType *primitiveType } func (ptf *primitiveTypedField) generateAccessors(ms baseStruct, sb *strings.Builder) { @@ -351,9 +404,14 @@ func (ptf *primitiveTypedField) generateAccessors(ms baseStruct, sb *strings.Bui case "lowerFieldName": return strings.ToLower(ptf.fieldName) case "returnType": - return ptf.returnType + return ptf.returnType.structName + case "packageName": + if ptf.returnType.packageName != ms.getPackageName() { + return ptf.returnType.packageName + "." + } + return "" case "rawType": - return ptf.rawType + return ptf.returnType.rawType case "originFieldName": return ptf.originFieldName default: @@ -363,16 +421,23 @@ func (ptf *primitiveTypedField) generateAccessors(ms baseStruct, sb *strings.Bui } func (ptf *primitiveTypedField) generateAccessorsTest(ms baseStruct, sb *strings.Builder) { - sb.WriteString(os.Expand(accessorsPrimitiveTestTemplate, func(name string) string { + sb.WriteString(os.Expand(accessorsPrimitiveTypedTestTemplate, func(name string) string { switch name { case "structName": return ms.getName() case "defaultVal": - return ptf.defaultVal + return ptf.returnType.defaultVal + case "packageName": + if ptf.returnType.packageName != ms.getPackageName() { + return ptf.returnType.packageName + "." + } + return "" + case "returnType": + return ptf.returnType.structName case "fieldName": return ptf.fieldName case "testValue": - return ptf.testVal + return ptf.returnType.testVal default: panic(name) } @@ -380,10 +445,10 @@ func (ptf *primitiveTypedField) generateAccessorsTest(ms baseStruct, sb *strings } func (ptf *primitiveTypedField) generateSetWithTestValue(sb *strings.Builder) { - sb.WriteString("\ttv.Set" + ptf.fieldName + "(" + ptf.testVal + ")") + sb.WriteString("\ttv.orig." + ptf.originFieldName + " = " + ptf.returnType.testVal) } -func (ptf *primitiveTypedField) generateCopyToValue(sb *strings.Builder) { +func (ptf *primitiveTypedField) generateCopyToValue(_ baseStruct, sb *strings.Builder) { sb.WriteString("\tdest.Set" + ptf.fieldName + "(ms." + ptf.fieldName + "())") } @@ -391,11 +456,12 @@ var _ baseField = (*primitiveTypedField)(nil) // Types that has defined a custom type (e.g. "type TraceID struct {}") type primitiveStructField struct { - fieldName string - originFieldName string - returnType string - defaultVal string - testVal string + fieldName string + originFieldName string + returnStructName string + returnPackageName string + defaultVal string + testVal string } func (ptf *primitiveStructField) generateAccessors(ms baseStruct, sb *strings.Builder) { @@ -408,7 +474,12 @@ func (ptf *primitiveStructField) generateAccessors(ms baseStruct, sb *strings.Bu case "lowerFieldName": return strings.ToLower(ptf.fieldName) case "returnType": - return ptf.returnType + return ptf.returnStructName + case "packageName": + if ptf.returnPackageName != ms.getPackageName() { + return ptf.returnPackageName + "." + } + return "" case "originFieldName": return ptf.originFieldName default: @@ -418,12 +489,19 @@ func (ptf *primitiveStructField) generateAccessors(ms baseStruct, sb *strings.Bu } func (ptf *primitiveStructField) generateAccessorsTest(ms baseStruct, sb *strings.Builder) { - sb.WriteString(os.Expand(accessorsPrimitiveTestTemplate, func(name string) string { + sb.WriteString(os.Expand(accessorsPrimitiveStructTestTemplate, func(name string) string { switch name { case "structName": return ms.getName() case "defaultVal": return ptf.defaultVal + case "returnType": + return ptf.returnStructName + case "packageName": + if ptf.returnPackageName != ms.getPackageName() { + return ptf.returnPackageName + "." + } + return "" case "fieldName": return ptf.fieldName case "testValue": @@ -435,10 +513,10 @@ func (ptf *primitiveStructField) generateAccessorsTest(ms baseStruct, sb *string } func (ptf *primitiveStructField) generateSetWithTestValue(sb *strings.Builder) { - sb.WriteString("\ttv.Set" + ptf.fieldName + "(" + ptf.testVal + ")") + sb.WriteString("\ttv.orig." + ptf.originFieldName + " = " + ptf.testVal) } -func (ptf *primitiveStructField) generateCopyToValue(sb *strings.Builder) { +func (ptf *primitiveStructField) generateCopyToValue(_ baseStruct, sb *strings.Builder) { sb.WriteString("\tdest.Set" + ptf.fieldName + "(ms." + ptf.fieldName + "())") } @@ -446,12 +524,13 @@ var _ baseField = (*primitiveStructField)(nil) // primitiveSliceField is used to generate fields for slice of primitive types type primitiveSliceField struct { - fieldName string - originFieldName string - returnType string - defaultVal string - rawType string - testVal string + fieldName string + originFieldName string + returnPackageName string + returnType string + defaultVal string + rawType string + testVal string } func (psf *primitiveSliceField) generateAccessors(ms baseStruct, sb *strings.Builder) { @@ -465,6 +544,11 @@ func (psf *primitiveSliceField) generateAccessors(ms baseStruct, sb *strings.Bui return strings.ToLower(psf.fieldName) case "returnType": return psf.returnType + case "packageName": + if psf.returnPackageName != ms.getPackageName() { + return psf.returnPackageName + "." + } + return "" case "originFieldName": return psf.originFieldName default: @@ -474,10 +558,17 @@ func (psf *primitiveSliceField) generateAccessors(ms baseStruct, sb *strings.Bui } func (psf *primitiveSliceField) generateAccessorsTest(ms baseStruct, sb *strings.Builder) { - sb.WriteString(os.Expand(accessorsPrimitiveTestTemplate, func(name string) string { + sb.WriteString(os.Expand(accessorsPrimitiveStructTestTemplate, func(name string) string { switch name { case "structName": return ms.getName() + case "packageName": + if psf.returnPackageName != ms.getPackageName() { + return psf.returnPackageName + "." + } + return "" + case "returnType": + return psf.returnType case "defaultVal": return psf.defaultVal case "fieldName": @@ -491,12 +582,14 @@ func (psf *primitiveSliceField) generateAccessorsTest(ms baseStruct, sb *strings } func (psf *primitiveSliceField) generateSetWithTestValue(sb *strings.Builder) { - sb.WriteString("\ttv.Set" + psf.fieldName + "(" + psf.testVal + ")") + sb.WriteString("\ttv.orig." + psf.originFieldName + " = " + psf.testVal) } -func (psf *primitiveSliceField) generateCopyToValue(sb *strings.Builder) { +func (psf *primitiveSliceField) generateCopyToValue(ms baseStruct, sb *strings.Builder) { sb.WriteString(os.Expand(copyToPrimitiveSliceTestTemplate, func(name string) string { switch name { + case "structName": + return ms.getName() case "originFieldName": return psf.originFieldName case "rawType": @@ -553,15 +646,6 @@ func (of *oneOfField) generateTypeAccessors(ms baseStruct, sb *strings.Builder) } func (of *oneOfField) generateAccessorsTest(ms baseStruct, sb *strings.Builder) { - of.generateTypeAccessorsTest(ms, sb) - sb.WriteString("\n") - for _, v := range of.values { - v.generateTests(ms.(*messageValueStruct), of, sb) - sb.WriteString("\n") - } -} - -func (of *oneOfField) generateTypeAccessorsTest(ms baseStruct, sb *strings.Builder) { sb.WriteString(os.Expand(oneOfTypeAccessorHeaderTestTemplate, func(name string) string { switch name { case "structName": @@ -576,23 +660,16 @@ func (of *oneOfField) generateTypeAccessorsTest(ms baseStruct, sb *strings.Build })) sb.WriteString("\n") for _, v := range of.values { - if mv, ok := v.(*oneOfMessageValue); ok { - sb.WriteString("\tassert.Equal(t, " + mv.fieldName + "{}, tv." + mv.fieldName + "())\n") - } - } - for _, v := range of.values { - v.generateSetWithTestValue(of, sb) - sb.WriteString("\n\tassert.Equal(t, " + of.typeName + v.getFieldType() + ", " + - "tv." + of.originFieldName + "Type())\n") + v.generateTests(ms, of, sb) + sb.WriteString("\n") } - sb.WriteString("}\n") } func (of *oneOfField) generateSetWithTestValue(sb *strings.Builder) { of.values[of.testValueIdx].generateSetWithTestValue(of, sb) } -func (of *oneOfField) generateCopyToValue(sb *strings.Builder) { +func (of *oneOfField) generateCopyToValue(_ baseStruct, sb *strings.Builder) { sb.WriteString("\tswitch ms." + of.originFieldName + "Type() {\n") for _, v := range of.values { v.generateCopyToValue(of, sb) @@ -648,17 +725,23 @@ func (opv *oneOfPrimitiveValue) generateAccessors(ms baseStruct, of *oneOfField, sb.WriteString("\n") } -func (opv *oneOfPrimitiveValue) generateTests(ms baseStruct, _ *oneOfField, sb *strings.Builder) { - sb.WriteString(os.Expand(accessorsPrimitiveTestTemplate, func(name string) string { +func (opv *oneOfPrimitiveValue) generateTests(ms baseStruct, of *oneOfField, sb *strings.Builder) { + sb.WriteString(os.Expand(accessorsOneOfPrimitiveTestTemplate, func(name string) string { switch name { case "structName": return ms.getName() case "defaultVal": return opv.defaultVal + case "packageName": + return "" case "fieldName": return opv.fieldName case "testValue": return opv.testVal + case "originOneOfFieldName": + return of.originFieldName + case "typeName": + return of.typeName + opv.fieldType default: panic(name) } @@ -666,13 +749,13 @@ func (opv *oneOfPrimitiveValue) generateTests(ms baseStruct, _ *oneOfField, sb * sb.WriteString("\n") } -func (opv *oneOfPrimitiveValue) generateSetWithTestValue(_ *oneOfField, sb *strings.Builder) { - sb.WriteString("\t tv.Set" + opv.fieldName + "(" + opv.testVal + ")") +func (opv *oneOfPrimitiveValue) generateSetWithTestValue(of *oneOfField, sb *strings.Builder) { + sb.WriteString("\ttv.orig." + of.originFieldName + " = &" + of.originTypePrefix + opv.originFieldName + "{" + opv.originFieldName + ":" + opv.testVal + "}") } func (opv *oneOfPrimitiveValue) generateCopyToValue(of *oneOfField, sb *strings.Builder) { sb.WriteString("\tcase " + of.typeName + opv.fieldType + ":\n") - sb.WriteString("\t dest.Set" + opv.fieldName + "(ms." + opv.fieldName + "())\n") + sb.WriteString("\tdest.Set" + opv.fieldName + "(ms." + opv.fieldName + "())\n") } func (opv *oneOfPrimitiveValue) generateTypeSwitchCase(of *oneOfField, sb *strings.Builder) { @@ -683,9 +766,10 @@ func (opv *oneOfPrimitiveValue) generateTypeSwitchCase(of *oneOfField, sb *strin var _ oneOfValue = (*oneOfPrimitiveValue)(nil) type oneOfMessageValue struct { - fieldName string - originFieldName string - returnMessage *messageValueStruct + fieldName string + originFieldName string + originFieldPackageName string + returnMessage *messageValueStruct } func (omv *oneOfMessageValue) getFieldType() string { @@ -739,8 +823,8 @@ func (omv *oneOfMessageValue) generateTests(ms baseStruct, of *oneOfField, sb *s } func (omv *oneOfMessageValue) generateSetWithTestValue(of *oneOfField, sb *strings.Builder) { - sb.WriteString("tv.Set" + of.originFieldName + "Type(" + of.typeName + omv.returnMessage.structName + ")\n") - sb.WriteString("fillTest" + omv.returnMessage.structName + "(tv." + omv.fieldName + "())") + sb.WriteString("\ttv.orig." + of.originFieldName + " = &" + of.originTypePrefix + omv.originFieldName + "{" + omv.originFieldName + ": &" + omv.originFieldPackageName + "." + omv.originFieldName + "{}}\n") + sb.WriteString("\tFillTest" + omv.returnMessage.structName + "(New" + omv.fieldName + "(tv.orig.Get" + omv.originFieldName + "()))") } func (omv *oneOfMessageValue) generateCopyToValue(of *oneOfField, sb *strings.Builder) { @@ -768,7 +852,6 @@ var _ oneOfValue = (*oneOfMessageValue)(nil) type optionalPrimitiveValue struct { fieldName string - fieldType string defaultVal string testVal string returnType string @@ -803,6 +886,8 @@ func (opv *optionalPrimitiveValue) generateAccessorsTest(ms baseStruct, sb *stri switch name { case "structName": return ms.getName() + case "packageName": + return "" case "defaultVal": return opv.defaultVal case "fieldName": @@ -817,10 +902,10 @@ func (opv *optionalPrimitiveValue) generateAccessorsTest(ms baseStruct, sb *stri } func (opv *optionalPrimitiveValue) generateSetWithTestValue(sb *strings.Builder) { - sb.WriteString("\t tv.Set" + opv.fieldName + "(" + opv.testVal + ")") + sb.WriteString("\ttv.orig." + opv.originFieldName + "_ = &" + opv.originTypePrefix + opv.originFieldName + "{" + opv.originFieldName + ":" + opv.testVal + "}") } -func (opv *optionalPrimitiveValue) generateCopyToValue(sb *strings.Builder) { +func (opv *optionalPrimitiveValue) generateCopyToValue(_ baseStruct, sb *strings.Builder) { sb.WriteString("if ms.Has" + opv.fieldName + "(){\n") sb.WriteString("\tdest.Set" + opv.fieldName + "(ms." + opv.fieldName + "())\n") sb.WriteString("}\n") diff --git a/pdata/internal/cmd/pdatagen/internal/base_slices.go b/pdata/internal/cmd/pdatagen/internal/base_slices.go index b1a9255ead4..2c2e64dd23b 100644 --- a/pdata/internal/cmd/pdatagen/internal/base_slices.go +++ b/pdata/internal/cmd/pdatagen/internal/base_slices.go @@ -23,20 +23,20 @@ const commonSliceTemplate = ` // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ${structName}) MoveAndAppendTo(dest ${structName}) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ${structName}) RemoveIf(f func(${elementName}) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -45,37 +45,37 @@ func (es ${structName}) RemoveIf(f func(${elementName}) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] }` const commonSliceTestTemplate = ` func Test${structName}_MoveAndAppendTo(t *testing.T) { // Test MoveAndAppendTo to empty - expectedSlice := generateTest${structName}() + expectedSlice := ${structName}(internal.GenerateTest${structName}()) dest := New${structName}() - src := generateTest${structName}() + src := ${structName}(internal.GenerateTest${structName}()) src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTest${structName}(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) + assert.Equal(t, ${structName}(internal.GenerateTest${structName}()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) // Test MoveAndAppendTo empty slice src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTest${structName}(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) + assert.Equal(t, ${structName}(internal.GenerateTest${structName}()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) // Test MoveAndAppendTo not empty slice - generateTest${structName}().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) + ${structName}(internal.GenerateTest${structName}()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) } } @@ -88,7 +88,7 @@ func Test${structName}_RemoveIf(t *testing.T) { }) // Test RemoveIf - filtered := generateTest${structName}() + filtered := ${structName}(internal.GenerateTest${structName}()) pos := 0 filtered.RemoveIf(func(el ${elementName}) bool { pos++ @@ -97,33 +97,6 @@ func Test${structName}_RemoveIf(t *testing.T) { assert.Equal(t, 5, filtered.Len()) }` -const commonSliceGenerateTest = `func generateTest${structName}() ${structName} { - tv := New${structName}() - fillTest${structName}(tv) - return tv -} - -func fillTest${structName}(tv ${structName}) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTest${elementName}(tv.AppendEmpty()) - } -}` - -const commonSliceAliasTemplate = `// ${structName} logically represents a slice of ${elementName}. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use New${structName} function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ${structName} = internal.${structName} - -// New${structName} creates a ${structName} with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var New${structName} = internal.New${structName}` - const slicePtrTemplate = `// ${structName} logically represents a slice of ${elementName}. // // This is a reference type. If passed by value and callee modifies it, the @@ -131,28 +104,28 @@ const slicePtrTemplate = `// ${structName} logically represents a slice of ${ele // // Must use New${structName} function to create new instances. // Important: zero-initialized instance is not valid for use. -type ${structName} struct { - // orig points to the slice ${originName} field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*${originName} -} +type ${structName} internal.${structName} func new${structName}(orig *[]*${originName}) ${structName} { - return ${structName}{orig} + return ${structName}(internal.New${structName}(orig)) +} + +func (ms ${structName}) getOrig() *[]*${originName} { + return internal.GetOrig${structName}(internal.${structName}(ms)) } // New${structName} creates a ${structName} with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func New${structName}() ${structName} { orig := []*${originName}(nil) - return ${structName}{&orig} + return new${structName}(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "New${structName}()". func (es ${structName}) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -163,27 +136,27 @@ func (es ${structName}) Len() int { // ... // Do something with the element // } func (es ${structName}) At(ix int) ${elementName} { - return new${elementName}((*es.orig)[ix]) + return new${elementName}((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ${structName}) CopyTo(dest ${structName}) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - new${elementName}((*es.orig)[i]).CopyTo(new${elementName}((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + new${elementName}((*es.getOrig())[i]).CopyTo(new${elementName}((*dest.getOrig())[i])) } return } origs := make([]${originName}, srcLen) wrappers := make([]*${originName}, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - new${elementName}((*es.orig)[i]).CopyTo(new${elementName}(wrappers[i])) + new${elementName}((*es.getOrig())[i]).CopyTo(new${elementName}(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -198,20 +171,20 @@ func (es ${structName}) CopyTo(dest ${structName}) { // // Here should set all the values for e. // } func (es ${structName}) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*${originName}, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*${originName}, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ${elementName}. // It returns the newly added ${elementName}. func (es ${structName}) AppendEmpty() ${elementName} { - *es.orig = append(*es.orig, &${originName}{}) + *es.getOrig() = append(*es.getOrig(), &${originName}{}) return es.At(es.Len() - 1) } @@ -223,28 +196,28 @@ func (es ${structName}) AppendEmpty() ${elementName} { // lessFunc := func(a, b ${elementName}) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ${structName}) Sort(less func(a, b ${elementName}) bool) ${structName} { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } ` const slicePtrTestTemplate = `func Test${structName}(t *testing.T) { es := New${structName}() - assert.EqualValues(t, 0, es.Len()) + assert.Equal(t, 0, es.Len()) es = new${structName}(&[]*${originName}{}) - assert.EqualValues(t, 0, es.Len()) + assert.Equal(t, 0, es.Len()) es.EnsureCapacity(7) emptyVal := new${elementName}(&${originName}{}) - testVal := generateTest${elementName}() - assert.EqualValues(t, 7, cap(*es.orig)) + testVal := ${elementName}(internal.GenerateTest${elementName}()) + assert.Equal(t, 7, cap(*es.getOrig())) for i := 0; i < es.Len(); i++ { el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTest${elementName}(el) - assert.EqualValues(t, testVal, el) + assert.Equal(t, emptyVal, el) + internal.FillTest${elementName}(internal.${elementName}(el)) + assert.Equal(t, testVal, el) } } @@ -252,49 +225,77 @@ func Test${structName}_CopyTo(t *testing.T) { dest := New${structName}() // Test CopyTo to empty New${structName}().CopyTo(dest) - assert.EqualValues(t, New${structName}(), dest) + assert.Equal(t, New${structName}(), dest) // Test CopyTo larger slice - generateTest${structName}().CopyTo(dest) - assert.EqualValues(t, generateTest${structName}(), dest) + ${structName}(internal.GenerateTest${structName}()).CopyTo(dest) + assert.Equal(t, ${structName}(internal.GenerateTest${structName}()), dest) // Test CopyTo same size slice - generateTest${structName}().CopyTo(dest) - assert.EqualValues(t, generateTest${structName}(), dest) + ${structName}(internal.GenerateTest${structName}()).CopyTo(dest) + assert.Equal(t, ${structName}(internal.GenerateTest${structName}()), dest) } func Test${structName}_EnsureCapacity(t *testing.T) { - es := generateTest${structName}() + es := ${structName}(internal.GenerateTest${structName}()) // Test ensure smaller capacity. const ensureSmallLen = 4 expectedEs := make(map[*${originName}]bool) for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true + expectedEs[es.At(i).getOrig()] = true } assert.Equal(t, es.Len(), len(expectedEs)) es.EnsureCapacity(ensureSmallLen) assert.Less(t, ensureSmallLen, es.Len()) foundEs := make(map[*${originName}]bool, es.Len()) for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true + foundEs[es.At(i).getOrig()] = true } - assert.EqualValues(t, expectedEs, foundEs) + assert.Equal(t, expectedEs, foundEs) // Test ensure larger capacity const ensureLargeLen = 9 oldLen := es.Len() expectedEs = make(map[*${originName}]bool, oldLen) for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true + expectedEs[es.At(i).getOrig()] = true } assert.Equal(t, oldLen, len(expectedEs)) es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) foundEs = make(map[*${originName}]bool, oldLen) for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +}` + +const slicePtrGenerateTest = `func GenerateTest${structName}() ${structName} { + orig := []*${originName}{} + tv := New${structName}(&orig) + FillTest${structName}(tv) + return tv +} + +func FillTest${structName}(tv ${structName}) { + *tv.orig = make([]*${originName}, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &${originName}{} + FillTest${elementName}(New${elementName}((*tv.orig)[i])) } - assert.EqualValues(t, expectedEs, foundEs) +}` + +const slicePtrInternalTemplate = ` +type ${structName} struct { + orig *[]*${originName} +} + +func GetOrig${structName}(ms ${structName}) *[]*${originName} { + return ms.orig +} + +func New${structName}(orig *[]*${originName}) ${structName} { + return ${structName}{orig: orig} }` const sliceValueTemplate = `// ${structName} logically represents a slice of ${elementName}. @@ -304,28 +305,28 @@ const sliceValueTemplate = `// ${structName} logically represents a slice of ${e // // Must use New${structName} function to create new instances. // Important: zero-initialized instance is not valid for use. -type ${structName} struct { - // orig points to the slice ${originName} field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]${originName} -} +type ${structName} internal.${structName} func new${structName}(orig *[]${originName}) ${structName} { - return ${structName}{orig} + return ${structName}(internal.New${structName}(orig)) +} + +func (ms ${structName}) getOrig() *[]${originName} { + return internal.GetOrig${structName}(internal.${structName}(ms)) } // New${structName} creates a ${structName} with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func New${structName}() ${structName} { orig := []${originName}(nil) - return ${structName}{&orig} + return ${structName}(internal.New${structName}(&orig)) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "New${structName}()". func (es ${structName}) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -336,21 +337,21 @@ func (es ${structName}) Len() int { // ... // Do something with the element // } func (es ${structName}) At(ix int) ${elementName} { - return new${elementName}(&(*es.orig)[ix]) + return new${elementName}(&(*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ${structName}) CopyTo(dest ${structName}) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] } else { - (*dest.orig) = make([]${originName}, srcLen) + (*dest.getOrig()) = make([]${originName}, srcLen) } - for i := range *es.orig { - new${elementName}(&(*es.orig)[i]).CopyTo(new${elementName}(&(*dest.orig)[i])) + for i := range *es.getOrig() { + new${elementName}(&(*es.getOrig())[i]).CopyTo(new${elementName}(&(*dest.getOrig())[i])) } } @@ -366,38 +367,38 @@ func (es ${structName}) CopyTo(dest ${structName}) { // // Here should set all the values for e. // } func (es ${structName}) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]${originName}, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]${originName}, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ${elementName}. // It returns the newly added ${elementName}. func (es ${structName}) AppendEmpty() ${elementName} { - *es.orig = append(*es.orig, ${originName}{}) + *es.getOrig() = append(*es.getOrig(), ${originName}{}) return es.At(es.Len() - 1) }` const sliceValueTestTemplate = `func Test${structName}(t *testing.T) { es := New${structName}() - assert.EqualValues(t, 0, es.Len()) + assert.Equal(t, 0, es.Len()) es = new${structName}(&[]${originName}{}) - assert.EqualValues(t, 0, es.Len()) + assert.Equal(t, 0, es.Len()) es.EnsureCapacity(7) emptyVal := new${elementName}(&${originName}{}) - testVal := generateTest${elementName}() - assert.EqualValues(t, 7, cap(*es.orig)) + testVal := ${elementName}(internal.GenerateTest${elementName}()) + assert.Equal(t, 7, cap(*es.getOrig())) for i := 0; i < es.Len(); i++ { el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTest${elementName}(el) - assert.EqualValues(t, testVal, el) + assert.Equal(t, emptyVal, el) + internal.FillTest${elementName}(internal.${elementName}(el)) + assert.Equal(t, testVal, el) } } @@ -405,56 +406,89 @@ func Test${structName}_CopyTo(t *testing.T) { dest := New${structName}() // Test CopyTo to empty New${structName}().CopyTo(dest) - assert.EqualValues(t, New${structName}(), dest) + assert.Equal(t, New${structName}(), dest) // Test CopyTo larger slice - generateTest${structName}().CopyTo(dest) - assert.EqualValues(t, generateTest${structName}(), dest) + ${structName}(internal.GenerateTest${structName}()).CopyTo(dest) + assert.Equal(t, ${structName}(internal.GenerateTest${structName}()), dest) // Test CopyTo same size slice - generateTest${structName}().CopyTo(dest) - assert.EqualValues(t, generateTest${structName}(), dest) + ${structName}(internal.GenerateTest${structName}()).CopyTo(dest) + assert.Equal(t, ${structName}(internal.GenerateTest${structName}()), dest) } func Test${structName}_EnsureCapacity(t *testing.T) { - es := generateTest${structName}() + es := ${structName}(internal.GenerateTest${structName}()) // Test ensure smaller capacity. const ensureSmallLen = 4 expectedEs := make(map[*${originName}]bool) for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true + expectedEs[es.At(i).getOrig()] = true } assert.Equal(t, es.Len(), len(expectedEs)) es.EnsureCapacity(ensureSmallLen) assert.Less(t, ensureSmallLen, es.Len()) foundEs := make(map[*${originName}]bool, es.Len()) for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true + foundEs[es.At(i).getOrig()] = true } - assert.EqualValues(t, expectedEs, foundEs) + assert.Equal(t, expectedEs, foundEs) // Test ensure larger capacity const ensureLargeLen = 9 oldLen := es.Len() assert.Equal(t, oldLen, len(expectedEs)) es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) +}` + +const sliceValueGenerateTest = `func GenerateTest${structName}() ${structName} { + orig := []${originName}{} + tv := New${structName}(&orig) + FillTest${structName}(tv) + return tv +} + +func FillTest${structName}(tv ${structName}) { + *tv.orig = make([]${originName}, 7) + for i := 0; i < 7; i++ { + FillTest${elementName}(New${elementName}(&(*tv.orig)[i])) + } +}` + +const sliceValueInternalTemplate = ` +type ${structName} struct { + orig *[]${originName} +} + +func GetOrig${structName}(ms ${structName}) *[]${originName} { + return ms.orig +} + +func New${structName}(orig *[]${originName}) ${structName} { + return ${structName}{orig: orig} }` type baseSlice interface { getName() string + getPackageName() string } // Will generate code only for a slice of pointer fields. type sliceOfPtrs struct { - structName string - element *messageValueStruct + structName string + packageName string + element *messageValueStruct } func (ss *sliceOfPtrs) getName() string { return ss.structName } +func (ss *sliceOfPtrs) getPackageName() string { + return ss.packageName +} + func (ss *sliceOfPtrs) generateStruct(sb *strings.Builder) { sb.WriteString(os.Expand(slicePtrTemplate, ss.templateFields())) sb.WriteString(os.Expand(commonSliceTemplate, ss.templateFields())) @@ -466,7 +500,7 @@ func (ss *sliceOfPtrs) generateTests(sb *strings.Builder) { } func (ss *sliceOfPtrs) generateTestValueHelpers(sb *strings.Builder) { - sb.WriteString(os.Expand(commonSliceGenerateTest, ss.templateFields())) + sb.WriteString(os.Expand(slicePtrGenerateTest, ss.templateFields())) } func (ss *sliceOfPtrs) templateFields() func(name string) string { @@ -484,13 +518,13 @@ func (ss *sliceOfPtrs) templateFields() func(name string) string { } } -func (ss *sliceOfPtrs) generateAlias(sb *strings.Builder) { - sb.WriteString(os.Expand(commonSliceAliasTemplate, func(name string) string { +func (ss *sliceOfPtrs) generateInternal(sb *strings.Builder) { + sb.WriteString(os.Expand(slicePtrInternalTemplate, func(name string) string { switch name { case "structName": return ss.structName - case "elementName": - return ss.element.structName + case "originName": + return ss.element.originFullName default: panic(name) } @@ -502,14 +536,19 @@ var _ baseStruct = (*sliceOfPtrs)(nil) // Will generate code only for a slice of value fields. type sliceOfValues struct { - structName string - element *messageValueStruct + structName string + packageName string + element *messageValueStruct } func (ss *sliceOfValues) getName() string { return ss.structName } +func (ss *sliceOfValues) getPackageName() string { + return ss.packageName +} + func (ss *sliceOfValues) generateStruct(sb *strings.Builder) { sb.WriteString(os.Expand(sliceValueTemplate, ss.templateFields())) sb.WriteString(os.Expand(commonSliceTemplate, ss.templateFields())) @@ -521,7 +560,7 @@ func (ss *sliceOfValues) generateTests(sb *strings.Builder) { } func (ss *sliceOfValues) generateTestValueHelpers(sb *strings.Builder) { - sb.WriteString(os.Expand(commonSliceGenerateTest, ss.templateFields())) + sb.WriteString(os.Expand(sliceValueGenerateTest, ss.templateFields())) } func (ss *sliceOfValues) templateFields() func(name string) string { @@ -539,13 +578,13 @@ func (ss *sliceOfValues) templateFields() func(name string) string { } } -func (ss *sliceOfValues) generateAlias(sb *strings.Builder) { - sb.WriteString(os.Expand(commonSliceAliasTemplate, func(name string) string { +func (ss *sliceOfValues) generateInternal(sb *strings.Builder) { + sb.WriteString(os.Expand(sliceValueInternalTemplate, func(name string) string { switch name { case "structName": return ss.structName - case "elementName": - return ss.element.structName + case "originName": + return ss.element.originFullName default: panic(name) } diff --git a/pdata/internal/cmd/pdatagen/internal/base_structs.go b/pdata/internal/cmd/pdatagen/internal/base_structs.go index 060a9dbebe8..a47dbe19350 100644 --- a/pdata/internal/cmd/pdatagen/internal/base_structs.go +++ b/pdata/internal/cmd/pdatagen/internal/base_structs.go @@ -26,12 +26,15 @@ const messageValueTemplate = `${description} // // Must use New${structName} function to create new instances. // Important: zero-initialized instance is not valid for use. -type ${structName} struct { - orig *${originName} -} + +type ${structName} internal.${structName} func new${structName}(orig *${originName}) ${structName} { - return ${structName}{orig: orig} + return ${structName}(internal.New${structName}(orig)) +} + +func (ms ${structName}) getOrig() *${originName} { + return internal.GetOrig${structName}(internal.${structName}(ms)) } // New${structName} creates a new empty ${structName}. @@ -45,8 +48,8 @@ func New${structName}() ${structName} { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ${structName}) MoveTo(dest ${structName}) { - *dest.orig = *ms.orig - *ms.orig = ${originName}{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = ${originName}{} }` const messageValueCopyToHeaderTemplate = `// CopyTo copies all properties from the current struct to the dest. @@ -56,62 +59,65 @@ const messageValueCopyToFooterTemplate = `}` const messageValueTestTemplate = ` func Test${structName}_MoveTo(t *testing.T) { - ms := generateTest${structName}() + ms := ${structName}(internal.GenerateTest${structName}()) dest := New${structName}() ms.MoveTo(dest) - assert.EqualValues(t, New${structName}(), ms) - assert.EqualValues(t, generateTest${structName}(), dest) + assert.Equal(t, New${structName}(), ms) + assert.Equal(t, ${structName}(internal.GenerateTest${structName}()), dest) } func Test${structName}_CopyTo(t *testing.T) { ms := New${structName}() orig := New${structName}() orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTest${structName}() + assert.Equal(t, orig, ms) + orig = ${structName}(internal.GenerateTest${structName}()) orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) + assert.Equal(t, orig, ms) }` -const messageValueGenerateTestTemplate = `func generateTest${structName}() ${structName} { - tv := New${structName}() - fillTest${structName}(tv) +const messageValueGenerateTestTemplate = `func GenerateTest${structName}() ${structName} { + orig := ${originName}{} + tv := New${structName}(&orig) + FillTest${structName}(tv) return tv }` -const messageValueFillTestHeaderTemplate = `func fillTest${structName}(tv ${structName}) {` +const messageValueFillTestHeaderTemplate = `func FillTest${structName}(tv ${structName}) {` const messageValueFillTestFooterTemplate = `}` -const messageValueAliasTemplate = `${description} -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use New${structName} function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ${structName} = internal.${structName} +const messageValueAliasTemplate = ` +type ${structName} struct { + orig *${originName} +} -// New${structName} is an alias for a function to create a new empty ${structName}. -var New${structName} = internal.New${structName}` +func GetOrig${structName}(ms ${structName}) *${originName} { + return ms.orig +} + +func New${structName}(orig *${originName}) ${structName} { + return ${structName}{orig: orig} +}` const newLine = "\n" type baseStruct interface { getName() string + getPackageName() string + generateStruct(sb *strings.Builder) generateTests(sb *strings.Builder) generateTestValueHelpers(sb *strings.Builder) -} -type aliasGenerator interface { - generateAlias(sb *strings.Builder) + generateInternal(sb *strings.Builder) } type messageValueStruct struct { structName string + packageName string description string originFullName string fields []baseField @@ -121,6 +127,10 @@ func (ms *messageValueStruct) getName() string { return ms.structName } +func (ms *messageValueStruct) getPackageName() string { + return ms.packageName +} + func (ms *messageValueStruct) generateStruct(sb *strings.Builder) { sb.WriteString(os.Expand(messageValueTemplate, func(name string) string { switch name { @@ -151,7 +161,7 @@ func (ms *messageValueStruct) generateStruct(sb *strings.Builder) { // Write accessors CopyTo for the struct for _, f := range ms.fields { sb.WriteString(newLine) - f.generateCopyToValue(sb) + f.generateCopyToValue(ms, sb) } sb.WriteString(newLine) sb.WriteString(os.Expand(messageValueCopyToFooterTemplate, func(name string) string { @@ -207,13 +217,13 @@ func (ms *messageValueStruct) generateTestValueHelpers(sb *strings.Builder) { })) } -func (ms *messageValueStruct) generateAlias(sb *strings.Builder) { +func (ms *messageValueStruct) generateInternal(sb *strings.Builder) { sb.WriteString(os.Expand(messageValueAliasTemplate, func(name string) string { switch name { case "structName": return ms.structName - case "description": - return ms.description + case "originName": + return ms.originFullName default: panic(name) } diff --git a/pdata/internal/cmd/pdatagen/internal/common_structs.go b/pdata/internal/cmd/pdatagen/internal/common_structs.go index e96484b452d..11217e4feff 100644 --- a/pdata/internal/cmd/pdatagen/internal/common_structs.go +++ b/pdata/internal/cmd/pdatagen/internal/common_structs.go @@ -15,8 +15,8 @@ package internal // import "go.opentelemetry.io/collector/pdata/internal/cmd/pdatagen/internal" var commonFile = &File{ - Name: "common", - IsCommon: true, + Name: "common", + PackageName: "pcommon", imports: []string{ `otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1"`, }, @@ -25,6 +25,7 @@ var commonFile = &File{ ``, `"github.com/stretchr/testify/assert"`, ``, + `"go.opentelemetry.io/collector/pdata/internal"`, `otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1"`, }, structs: []baseStruct{ @@ -35,6 +36,7 @@ var commonFile = &File{ var scope = &messageValueStruct{ structName: "InstrumentationScope", + packageName: "pcommon", description: "// InstrumentationScope is a message representing the instrumentation scope information.", originFullName: "otlpcommon.InstrumentationScope", fields: []baseField{ @@ -54,11 +56,15 @@ var scope = &messageValueStruct{ // This will not be generated by this class. // Defined here just to be available as returned message for the fields. var mapStruct = &sliceOfPtrs{ - structName: "Map", - element: attributeKeyValue, + structName: "Map", + packageName: "pcommon", + element: attributeKeyValue, } -var attributeKeyValue = &messageValueStruct{} +var attributeKeyValue = &messageValueStruct{ + structName: "Map", + packageName: "pcommon", +} var scopeField = &messageValueField{ fieldName: "Scope", @@ -66,31 +72,30 @@ var scopeField = &messageValueField{ returnMessage: scope, } +var timestampType = &primitiveType{ + structName: "Timestamp", + packageName: "pcommon", + rawType: "uint64", + defaultVal: "0", + testVal: "1234567890", +} + var startTimeField = &primitiveTypedField{ fieldName: "StartTimestamp", originFieldName: "StartTimeUnixNano", - returnType: "Timestamp", - rawType: "uint64", - defaultVal: "Timestamp(0)", - testVal: "Timestamp(1234567890)", + returnType: timestampType, } var timeField = &primitiveTypedField{ fieldName: "Timestamp", originFieldName: "TimeUnixNano", - returnType: "Timestamp", - rawType: "uint64", - defaultVal: "Timestamp(0)", - testVal: "Timestamp(1234567890)", + returnType: timestampType, } var endTimeField = &primitiveTypedField{ fieldName: "EndTimestamp", originFieldName: "EndTimeUnixNano", - returnType: "Timestamp", - rawType: "uint64", - defaultVal: "Timestamp(0)", - testVal: "Timestamp(1234567890)", + returnType: timestampType, } var attributes = &sliceField{ @@ -109,6 +114,7 @@ var nameField = &primitiveField{ var anyValue = &messageValueStruct{ structName: "Value", + packageName: "pcommon", originFullName: "otlpcommon.AnyValue", } @@ -117,6 +123,33 @@ var attributeValueSlice = &sliceOfValues{ element: anyValue, } +var traceIDField = &primitiveStructField{ + fieldName: "TraceID", + originFieldName: "TraceId", + returnStructName: "TraceID", + returnPackageName: "pcommon", + defaultVal: "data.NewTraceID([16]byte{})", + testVal: "data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1})", +} + +var spanIDField = &primitiveStructField{ + fieldName: "SpanID", + originFieldName: "SpanId", + returnStructName: "SpanID", + returnPackageName: "pcommon", + defaultVal: "data.NewSpanID([8]byte{})", + testVal: "data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8})", +} + +var parentSpanIDField = &primitiveStructField{ + fieldName: "ParentSpanID", + originFieldName: "ParentSpanId", + returnStructName: "SpanID", + returnPackageName: "pcommon", + defaultVal: "data.NewSpanID([8]byte{})", + testVal: "data.NewSpanID([8]byte{8, 7, 6, 5, 4, 3, 2, 1})", +} + var schemaURLField = &primitiveField{ fieldName: "SchemaUrl", originFieldName: "SchemaUrl", diff --git a/pdata/internal/cmd/pdatagen/internal/files.go b/pdata/internal/cmd/pdatagen/internal/files.go index 075cbd88c13..0e35e27afbe 100644 --- a/pdata/internal/cmd/pdatagen/internal/files.go +++ b/pdata/internal/cmd/pdatagen/internal/files.go @@ -14,7 +14,11 @@ package internal // import "go.opentelemetry.io/collector/pdata/internal/cmd/pdatagen/internal" -import "strings" +import ( + "os" + "path/filepath" + "strings" +) const header = `// Copyright The OpenTelemetry Authors // @@ -46,7 +50,7 @@ var AllFiles = []*File{ // File represents the struct for one generated file. type File struct { Name string - IsCommon bool + PackageName string imports []string testImports []string // Can be any of sliceOfPtrs, sliceOfValues, messageValueStruct, or messagePtrStruct @@ -54,10 +58,14 @@ type File struct { } // GenerateFile generates the configured data structures for this File. -func (f *File) GenerateFile() string { - var sb strings.Builder +func (f *File) GenerateFile() error { + fp, err := os.Create(filepath.Join(".", "pdata", f.PackageName, generateFileName(f.Name))) + if err != nil { + return err + } - generateHeader(&sb, "internal") + var sb strings.Builder + generateHeader(&sb, f.PackageName) // Add imports sb.WriteString("import (" + newLine) @@ -76,14 +84,23 @@ func (f *File) GenerateFile() string { s.generateStruct(&sb) } sb.WriteString(newLine) - return sb.String() + + _, err = fp.WriteString(sb.String()) + if err != nil { + return err + } + return fp.Close() } // GenerateTestFile generates tests for the configured data structures for this File. -func (f *File) GenerateTestFile() string { - var sb strings.Builder +func (f *File) GenerateTestFile() error { + fp, err := os.Create(filepath.Join(".", "pdata", f.PackageName, generateTestFileName(f.Name))) + if err != nil { + return err + } - generateHeader(&sb, "internal") + var sb strings.Builder + generateHeader(&sb, f.PackageName) // Add imports sb.WriteString("import (" + newLine) @@ -102,32 +119,64 @@ func (f *File) GenerateTestFile() string { s.generateTests(&sb) } - // Write all tests generate value - for _, s := range f.structs { - sb.WriteString(newLine + newLine) - s.generateTestValueHelpers(&sb) + _, err = fp.WriteString(sb.String()) + if err != nil { + return err } - sb.WriteString(newLine) - return sb.String() + return fp.Close() } -// GenerateFile generates the aliases for data structures for this File. -func (f *File) GenerateAliasFile(packageName string) string { - var sb strings.Builder +// GenerateInternalFile generates the internal pdata structures for this File. +func (f *File) GenerateInternalFile() error { + fp, err := os.Create(filepath.Join(".", "pdata", "internal", generateInternalFileName(f.Name))) + if err != nil { + return err + } - generateHeader(&sb, packageName) + var sb strings.Builder + generateHeader(&sb, "internal") - // Add import - sb.WriteString("import \"go.opentelemetry.io/collector/pdata/internal\"" + newLine + newLine) + // Add imports + sb.WriteString("import (" + newLine) + for _, imp := range f.imports { + if imp != "" { + sb.WriteString("\t" + imp + newLine) + } else { + sb.WriteString(newLine) + } + } + sb.WriteString(")") // Write all types and funcs for _, s := range f.structs { - if ag, ok := s.(aliasGenerator); ok { - ag.generateAlias(&sb) - } + s.generateInternal(&sb) + } + sb.WriteString(newLine) + + // Write all tests generate value + for _, s := range f.structs { + sb.WriteString(newLine + newLine) + s.generateTestValueHelpers(&sb) } sb.WriteString(newLine) - return sb.String() + + _, err = fp.WriteString(sb.String()) + if err != nil { + return err + } + return fp.Close() +} + +func generateFileName(fileName string) string { + return "generated_" + fileName + ".go" +} + +func generateInternalFileName(fileName string) string { + return "generated_wrapper_" + fileName + ".go" +} + +func generateTestFileName(fileName string) string { + return "generated_" + fileName + "_test.go" } func generateHeader(sb *strings.Builder, packageName string) { diff --git a/pdata/internal/cmd/pdatagen/internal/immutable_slice_structs.go b/pdata/internal/cmd/pdatagen/internal/immutable_slice_structs.go index d5654ae03e2..e4aa378c81c 100644 --- a/pdata/internal/cmd/pdatagen/internal/immutable_slice_structs.go +++ b/pdata/internal/cmd/pdatagen/internal/immutable_slice_structs.go @@ -21,38 +21,41 @@ import ( const immutableSliceTemplate = `// ${structName} represents a []${itemType} slice that cannot be mutated. // The instance of ${structName} can be assigned to multiple objects since it's immutable. -type ${structName} struct { - value []${itemType} +type ${structName} internal.${structName} + +func (ms ${structName}) getOrig() []${itemType} { + return internal.GetOrig${structName}(internal.${structName}(ms)) } // New${structName} creates a new ${structName} by copying the provided []${itemType} slice. -func New${structName}(val []${itemType}) ${structName} { - is := ${structName}{} - if len(val) != 0 { - is.value = make([]${itemType}, len(val)) - copy(is.value, val) +func New${structName}(orig []${itemType}) ${structName} { + if len(orig) == 0 { + return ${structName}(internal.New${structName}(nil)) } - return is + copyOrig := make([]${itemType}, len(orig)) + copy(copyOrig, orig) + return ${structName}(internal.New${structName}(copyOrig)) } // AsRaw returns a copy of the []${itemType} slice. -func (is ${structName}) AsRaw() []${itemType} { - if len(is.value) == 0 { +func (ms ${structName}) AsRaw() []${itemType} { + orig := ms.getOrig() + if len(orig) == 0 { return nil } - val := make([]${itemType}, len(is.value)) - copy(val, is.value) - return val + copyOrig := make([]${itemType}, len(orig)) + copy(copyOrig, orig) + return copyOrig } // Len returns length of the []${itemType} slice value. -func (is ${structName}) Len() int { - return len(is.value) +func (ms ${structName}) Len() int { + return len(ms.getOrig()) } // At returns an item from particular index. -func (is ${structName}) At(i int) ${itemType} { - return is.value[i] +func (ms ${structName}) At(i int) ${itemType} { + return ms.getOrig()[i] }` const immutableSliceTestTemplate = `func TestNew${structName}(t *testing.T) { @@ -82,7 +85,7 @@ const immutableSliceTestTemplate = `func TestNew${structName}(t *testing.T) { t.Run(tt.name, func(t *testing.T) { s := New${structName}(tt.orig) assert.Equal(t, tt.want, s.AsRaw()) - assert.Equal(t, len(s.value), s.Len()) + assert.Equal(t, len(s.getOrig()), s.Len()) if len(tt.orig) > 0 { // verify that orig mutation doesn't have any effect tt.orig[0] = ${itemType}(0) @@ -92,22 +95,33 @@ const immutableSliceTestTemplate = `func TestNew${structName}(t *testing.T) { } }` -const immutableSliceAliasTemplate = `// ${structName} represents a []${itemType} slice that cannot be mutated. -type ${structName} = internal.${structName} +const immutableSliceInternalTemplate = ` +type ${structName} struct { + orig []${itemType} +} -// New${structName} creates a new ${structName} by copying the provided []${itemType} slice. -var New${structName} = internal.New${structName} -` +func GetOrig${structName}(ms ${structName}) []${itemType} { + return ms.orig +} + +func New${structName}(orig []${itemType}) ${structName} { + return ${structName}{orig: orig} +}` type immutableSliceStruct struct { - structName string - itemType string + structName string + packageName string + itemType string } func (iss *immutableSliceStruct) getName() string { return iss.structName } +func (iss *immutableSliceStruct) getPackageName() string { + return iss.packageName +} + func (iss *immutableSliceStruct) generateStruct(sb *strings.Builder) { sb.WriteString(os.Expand(immutableSliceTemplate, func(name string) string { switch name { @@ -136,8 +150,8 @@ func (iss *immutableSliceStruct) generateTests(sb *strings.Builder) { func (iss *immutableSliceStruct) generateTestValueHelpers(*strings.Builder) {} -func (iss *immutableSliceStruct) generateAlias(sb *strings.Builder) { - sb.WriteString(os.Expand(immutableSliceAliasTemplate, func(name string) string { +func (iss *immutableSliceStruct) generateInternal(sb *strings.Builder) { + sb.WriteString(os.Expand(immutableSliceInternalTemplate, func(name string) string { switch name { case "structName": return iss.structName @@ -150,12 +164,14 @@ func (iss *immutableSliceStruct) generateAlias(sb *strings.Builder) { } var immutableSliceFile = &File{ - Name: "immutable_slice", - IsCommon: true, + Name: "immutable_slice", + PackageName: "pcommon", testImports: []string{ `"testing"`, ``, `"github.com/stretchr/testify/assert"`, + ``, + `"go.opentelemetry.io/collector/pdata/internal"`, }, structs: []baseStruct{ immutableByteSliceStruct, @@ -165,16 +181,19 @@ var immutableSliceFile = &File{ } var immutableByteSliceStruct = &immutableSliceStruct{ - structName: "ImmutableByteSlice", - itemType: "byte", + structName: "ImmutableByteSlice", + packageName: "pcommon", + itemType: "byte", } var immutableFloat64SliceStruct = &immutableSliceStruct{ - structName: "ImmutableFloat64Slice", - itemType: "float64", + structName: "ImmutableFloat64Slice", + packageName: "pcommon", + itemType: "float64", } var immutableUInt64SliceStruct = &immutableSliceStruct{ - structName: "ImmutableUInt64Slice", - itemType: "uint64", + structName: "ImmutableUInt64Slice", + packageName: "pcommon", + itemType: "uint64", } diff --git a/pdata/internal/cmd/pdatagen/internal/log_structs.go b/pdata/internal/cmd/pdatagen/internal/log_structs.go index f52c75f0b93..5a8d9935d53 100644 --- a/pdata/internal/cmd/pdatagen/internal/log_structs.go +++ b/pdata/internal/cmd/pdatagen/internal/log_structs.go @@ -15,7 +15,8 @@ package internal // import "go.opentelemetry.io/collector/pdata/internal/cmd/pdatagen/internal" var logFile = &File{ - Name: "plog", + Name: "logs", + PackageName: "plog", imports: []string{ `"sort"`, ``, @@ -26,7 +27,10 @@ var logFile = &File{ ``, `"github.com/stretchr/testify/assert"`, ``, + `"go.opentelemetry.io/collector/pdata/internal"`, + `"go.opentelemetry.io/collector/pdata/internal/data"`, `otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1"`, + `"go.opentelemetry.io/collector/pdata/pcommon"`, }, structs: []baseStruct{ resourceLogsSlice, @@ -91,18 +95,12 @@ var logRecord = &messageValueStruct{ &primitiveTypedField{ fieldName: "ObservedTimestamp", originFieldName: "ObservedTimeUnixNano", - returnType: "Timestamp", - rawType: "uint64", - defaultVal: "Timestamp(0)", - testVal: "Timestamp(1234567890)", + returnType: timestampType, }, &primitiveTypedField{ fieldName: "Timestamp", originFieldName: "TimeUnixNano", - returnType: "Timestamp", - rawType: "uint64", - defaultVal: "Timestamp(0)", - testVal: "Timestamp(1234567890)", + returnType: timestampType, }, traceIDField, spanIDField, @@ -124,10 +122,12 @@ var logRecord = &messageValueStruct{ &primitiveTypedField{ fieldName: "SeverityNumber", originFieldName: "SeverityNumber", - returnType: "SeverityNumber", - rawType: "otlplogs.SeverityNumber", - defaultVal: `SeverityNumberUndefined`, - testVal: `SeverityNumberInfo`, + returnType: &primitiveType{ + structName: "SeverityNumber", + rawType: "otlplogs.SeverityNumber", + defaultVal: `otlplogs.SeverityNumber(0)`, + testVal: `otlplogs.SeverityNumber(5)`, + }, }, bodyField, attributes, diff --git a/pdata/internal/cmd/pdatagen/internal/metrics_structs.go b/pdata/internal/cmd/pdatagen/internal/metrics_structs.go index 4179ed5b762..bb255f06e12 100644 --- a/pdata/internal/cmd/pdatagen/internal/metrics_structs.go +++ b/pdata/internal/cmd/pdatagen/internal/metrics_structs.go @@ -15,7 +15,8 @@ package internal // import "go.opentelemetry.io/collector/pdata/internal/cmd/pdatagen/internal" var metricsFile = &File{ - Name: "pmetric", + Name: "metrics", + PackageName: "pmetric", imports: []string{ `"sort"`, ``, @@ -26,7 +27,10 @@ var metricsFile = &File{ ``, `"github.com/stretchr/testify/assert"`, ``, + `"go.opentelemetry.io/collector/pdata/internal"`, + `"go.opentelemetry.io/collector/pdata/internal/data"`, `otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1"`, + `"go.opentelemetry.io/collector/pdata/pcommon"`, }, structs: []baseStruct{ resourceMetricsSlice, @@ -129,29 +133,34 @@ var metric = &messageValueStruct{ testValueIdx: 1, // Sum values: []oneOfValue{ &oneOfMessageValue{ - fieldName: "Gauge", - originFieldName: "Gauge", - returnMessage: gauge, + fieldName: "Gauge", + originFieldName: "Gauge", + originFieldPackageName: "otlpmetrics", + returnMessage: gauge, }, &oneOfMessageValue{ - fieldName: "Sum", - originFieldName: "Sum", - returnMessage: sum, + fieldName: "Sum", + originFieldName: "Sum", + originFieldPackageName: "otlpmetrics", + returnMessage: sum, }, &oneOfMessageValue{ - fieldName: "Histogram", - originFieldName: "Histogram", - returnMessage: histogram, + fieldName: "Histogram", + originFieldName: "Histogram", + originFieldPackageName: "otlpmetrics", + returnMessage: histogram, }, &oneOfMessageValue{ - fieldName: "ExponentialHistogram", - originFieldName: "ExponentialHistogram", - returnMessage: exponentialHistogram, + fieldName: "ExponentialHistogram", + originFieldName: "ExponentialHistogram", + originFieldPackageName: "otlpmetrics", + returnMessage: exponentialHistogram, }, &oneOfMessageValue{ - fieldName: "Summary", - originFieldName: "Summary", - returnMessage: summary, + fieldName: "Summary", + originFieldName: "Summary", + originFieldPackageName: "otlpmetrics", + returnMessage: summary, }, }, }, @@ -291,7 +300,6 @@ var histogramDataPoint = &messageValueStruct{ dataPointFlagsField, &optionalPrimitiveValue{ fieldName: "Min", - fieldType: "Double", originFieldName: "Min", originTypePrefix: "otlpmetrics.HistogramDataPoint_", returnType: "float64", @@ -300,7 +308,6 @@ var histogramDataPoint = &messageValueStruct{ }, &optionalPrimitiveValue{ fieldName: "Max", - fieldType: "Double", originFieldName: "Max", originTypePrefix: "otlpmetrics.HistogramDataPoint_", returnType: "float64", @@ -329,26 +336,23 @@ var exponentialHistogramDataPoint = &messageValueStruct{ countField, &optionalPrimitiveValue{ fieldName: "Sum", - fieldType: "Double", originFieldName: "Sum", originTypePrefix: "otlpmetrics.ExponentialHistogramDataPoint_", returnType: "float64", defaultVal: "float64(0.0)", testVal: "float64(17.13)", }, - &primitiveTypedField{ + &primitiveField{ fieldName: "Scale", originFieldName: "Scale", returnType: "int32", - rawType: "int32", defaultVal: "int32(0)", testVal: "int32(4)", }, - &primitiveTypedField{ + &primitiveField{ fieldName: "ZeroCount", originFieldName: "ZeroCount", returnType: "uint64", - rawType: "uint64", defaultVal: "uint64(0)", testVal: "uint64(201)", }, @@ -366,7 +370,6 @@ var exponentialHistogramDataPoint = &messageValueStruct{ dataPointFlagsField, &optionalPrimitiveValue{ fieldName: "Min", - fieldType: "Double", originFieldName: "Min", originTypePrefix: "otlpmetrics.ExponentialHistogramDataPoint_", returnType: "float64", @@ -375,7 +378,6 @@ var exponentialHistogramDataPoint = &messageValueStruct{ }, &optionalPrimitiveValue{ fieldName: "Max", - fieldType: "Double", originFieldName: "Max", originTypePrefix: "otlpmetrics.ExponentialHistogramDataPoint_", returnType: "float64", @@ -390,11 +392,10 @@ var bucketsValues = &messageValueStruct{ description: "// Buckets are a set of bucket counts, encoded in a contiguous array of counts.", originFullName: "otlpmetrics.ExponentialHistogramDataPoint_Buckets", fields: []baseField{ - &primitiveTypedField{ + &primitiveField{ fieldName: "Offset", originFieldName: "Offset", returnType: "int32", - rawType: "int32", defaultVal: "int32(0)", testVal: "int32(909)", }, @@ -529,21 +530,23 @@ var valueFloat64Field = &primitiveField{ } var bucketCountsField = &primitiveSliceField{ - fieldName: "BucketCounts", - originFieldName: "BucketCounts", - returnType: "ImmutableUInt64Slice", - defaultVal: "ImmutableUInt64Slice{}", - rawType: "[]uint64", - testVal: "NewImmutableUInt64Slice([]uint64{1, 2, 3})", + fieldName: "BucketCounts", + originFieldName: "BucketCounts", + returnType: "ImmutableUInt64Slice", + returnPackageName: "pcommon", + defaultVal: "[]uint64(nil)", + rawType: "[]uint64", + testVal: "[]uint64{1, 2, 3}", } var explicitBoundsField = &primitiveSliceField{ - fieldName: "ExplicitBounds", - originFieldName: "ExplicitBounds", - returnType: "ImmutableFloat64Slice", - defaultVal: "ImmutableFloat64Slice{}", - rawType: "[]float64", - testVal: "NewImmutableFloat64Slice([]float64{1, 2, 3})", + fieldName: "ExplicitBounds", + originFieldName: "ExplicitBounds", + returnType: "ImmutableFloat64Slice", + returnPackageName: "pcommon", + defaultVal: "[]float64(nil)", + rawType: "[]float64", + testVal: "[]float64{1, 2, 3}", } var quantileField = &primitiveField{ @@ -565,15 +568,16 @@ var isMonotonicField = &primitiveField{ var aggregationTemporalityField = &primitiveTypedField{ fieldName: "AggregationTemporality", originFieldName: "AggregationTemporality", - returnType: "MetricAggregationTemporality", - rawType: "otlpmetrics.AggregationTemporality", - defaultVal: "MetricAggregationTemporalityUnspecified", - testVal: "MetricAggregationTemporalityCumulative", + returnType: &primitiveType{ + structName: "MetricAggregationTemporality", + rawType: "otlpmetrics.AggregationTemporality", + defaultVal: "otlpmetrics.AggregationTemporality(0)", + testVal: "otlpmetrics.AggregationTemporality(1)", + }, } var optionalDoubleSumField = &optionalPrimitiveValue{ fieldName: "Sum", - fieldType: "Double", originFieldName: "Sum", originTypePrefix: "otlpmetrics.HistogramDataPoint_", returnType: "float64", diff --git a/pdata/internal/cmd/pdatagen/internal/resource_structs.go b/pdata/internal/cmd/pdatagen/internal/resource_structs.go index d506e8db999..6688ffc085b 100644 --- a/pdata/internal/cmd/pdatagen/internal/resource_structs.go +++ b/pdata/internal/cmd/pdatagen/internal/resource_structs.go @@ -15,8 +15,8 @@ package internal // import "go.opentelemetry.io/collector/pdata/internal/cmd/pdatagen/internal" var resourceFile = &File{ - Name: "resource", - IsCommon: true, + Name: "resource", + PackageName: "pcommon", imports: []string{ `otlpresource "go.opentelemetry.io/collector/pdata/internal/data/protogen/resource/v1"`, }, @@ -24,6 +24,8 @@ var resourceFile = &File{ `"testing"`, ``, `"github.com/stretchr/testify/assert"`, + ``, + `"go.opentelemetry.io/collector/pdata/internal"`, }, structs: []baseStruct{ resource, @@ -32,6 +34,7 @@ var resourceFile = &File{ var resource = &messageValueStruct{ structName: "Resource", + packageName: "pcommon", description: "// Resource is a message representing the resource information.", originFullName: "otlpresource.Resource", fields: []baseField{ diff --git a/pdata/internal/cmd/pdatagen/internal/trace_structs.go b/pdata/internal/cmd/pdatagen/internal/trace_structs.go index aede753093b..12e808e4a66 100644 --- a/pdata/internal/cmd/pdatagen/internal/trace_structs.go +++ b/pdata/internal/cmd/pdatagen/internal/trace_structs.go @@ -15,7 +15,8 @@ package internal // import "go.opentelemetry.io/collector/pdata/internal/cmd/pdatagen/internal" var traceFile = &File{ - Name: "ptrace", + Name: "traces", + PackageName: "ptrace", imports: []string{ `"sort"`, ``, @@ -26,7 +27,10 @@ var traceFile = &File{ ``, `"github.com/stretchr/testify/assert"`, ``, + `"go.opentelemetry.io/collector/pdata/internal"`, + `"go.opentelemetry.io/collector/pdata/internal/data"`, `otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1"`, + `"go.opentelemetry.io/collector/pdata/pcommon"`, }, structs: []baseStruct{ resourceSpansSlice, @@ -102,10 +106,12 @@ var span = &messageValueStruct{ &primitiveTypedField{ fieldName: "Kind", originFieldName: "Kind", - returnType: "SpanKind", - rawType: "otlptrace.Span_SpanKind", - defaultVal: "SpanKindUnspecified", - testVal: "SpanKindServer", + returnType: &primitiveType{ + structName: "SpanKind", + rawType: "otlptrace.Span_SpanKind", + defaultVal: "otlptrace.Span_SpanKind(0)", + testVal: "otlptrace.Span_SpanKind(3)", + }, }, startTimeField, endTimeField, @@ -190,10 +196,12 @@ var spanStatus = &messageValueStruct{ &primitiveTypedField{ fieldName: "Code", originFieldName: "Code", - returnType: "StatusCode", - rawType: "otlptrace.Status_StatusCode", - defaultVal: "StatusCode(0)", - testVal: "StatusCode(1)", + returnType: &primitiveType{ + structName: "StatusCode", + rawType: "otlptrace.Status_StatusCode", + defaultVal: "0", + testVal: "1", + }, }, &primitiveField{ fieldName: "Message", @@ -205,37 +213,15 @@ var spanStatus = &messageValueStruct{ }, } -var traceIDField = &primitiveStructField{ - fieldName: "TraceID", - originFieldName: "TraceId", - returnType: "TraceID", - defaultVal: "NewTraceID([16]byte{})", - testVal: "NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1})", -} - -var spanIDField = &primitiveStructField{ - fieldName: "SpanID", - originFieldName: "SpanId", - returnType: "SpanID", - defaultVal: "NewSpanID([8]byte{})", - testVal: "NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8})", -} - -var parentSpanIDField = &primitiveStructField{ - fieldName: "ParentSpanID", - originFieldName: "ParentSpanId", - returnType: "SpanID", - defaultVal: "NewSpanID([8]byte{})", - testVal: "NewSpanID([8]byte{8, 7, 6, 5, 4, 3, 2, 1})", -} - var traceStateField = &primitiveTypedField{ fieldName: "TraceState", originFieldName: "TraceState", - returnType: "TraceState", - rawType: "string", - defaultVal: `TraceState("")`, - testVal: `TraceState("congo=congos")`, + returnType: &primitiveType{ + structName: "TraceState", + rawType: "string", + defaultVal: `""`, + testVal: `"congo=congos"`, + }, } var droppedAttributesCount = &primitiveField{ diff --git a/pdata/internal/cmd/pdatagen/main.go b/pdata/internal/cmd/pdatagen/main.go index a85f7ca9a6d..d868b018c62 100644 --- a/pdata/internal/cmd/pdatagen/main.go +++ b/pdata/internal/cmd/pdatagen/main.go @@ -15,9 +15,6 @@ package main import ( - "os" - "path/filepath" - "go.opentelemetry.io/collector/pdata/internal/cmd/pdatagen/internal" ) @@ -29,26 +26,8 @@ func check(e error) { func main() { for _, fp := range internal.AllFiles { - f, err := os.Create("./pdata/internal/generated_" + fp.Name + ".go") - check(err) - _, err = f.WriteString(fp.GenerateFile()) - check(err) - check(f.Close()) - f, err = os.Create("./pdata/internal/generated_" + fp.Name + "_test.go") - check(err) - _, err = f.WriteString(fp.GenerateTestFile()) - check(err) - check(f.Close()) - fileName := "generated_alias.go" - packageName := fp.Name - if fp.IsCommon { - fileName = "generated_" + fp.Name + "_alias.go" - packageName = "pcommon" - } - f, err = os.Create(filepath.Clean("./pdata/" + packageName + "/" + fileName)) - check(err) - _, err = f.WriteString(fp.GenerateAliasFile(packageName)) - check(err) - check(f.Close()) + check(fp.GenerateFile()) + check(fp.GenerateTestFile()) + check(fp.GenerateInternalFile()) } } diff --git a/pdata/internal/generated_immutable_slice.go b/pdata/internal/generated_immutable_slice.go deleted file mode 100644 index 5ca3d0f68b4..00000000000 --- a/pdata/internal/generated_immutable_slice.go +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package internal - -// ImmutableByteSlice represents a []byte slice that cannot be mutated. -// The instance of ImmutableByteSlice can be assigned to multiple objects since it's immutable. -type ImmutableByteSlice struct { - value []byte -} - -// NewImmutableByteSlice creates a new ImmutableByteSlice by copying the provided []byte slice. -func NewImmutableByteSlice(val []byte) ImmutableByteSlice { - is := ImmutableByteSlice{} - if len(val) != 0 { - is.value = make([]byte, len(val)) - copy(is.value, val) - } - return is -} - -// AsRaw returns a copy of the []byte slice. -func (is ImmutableByteSlice) AsRaw() []byte { - if len(is.value) == 0 { - return nil - } - val := make([]byte, len(is.value)) - copy(val, is.value) - return val -} - -// Len returns length of the []byte slice value. -func (is ImmutableByteSlice) Len() int { - return len(is.value) -} - -// At returns an item from particular index. -func (is ImmutableByteSlice) At(i int) byte { - return is.value[i] -} - -// ImmutableFloat64Slice represents a []float64 slice that cannot be mutated. -// The instance of ImmutableFloat64Slice can be assigned to multiple objects since it's immutable. -type ImmutableFloat64Slice struct { - value []float64 -} - -// NewImmutableFloat64Slice creates a new ImmutableFloat64Slice by copying the provided []float64 slice. -func NewImmutableFloat64Slice(val []float64) ImmutableFloat64Slice { - is := ImmutableFloat64Slice{} - if len(val) != 0 { - is.value = make([]float64, len(val)) - copy(is.value, val) - } - return is -} - -// AsRaw returns a copy of the []float64 slice. -func (is ImmutableFloat64Slice) AsRaw() []float64 { - if len(is.value) == 0 { - return nil - } - val := make([]float64, len(is.value)) - copy(val, is.value) - return val -} - -// Len returns length of the []float64 slice value. -func (is ImmutableFloat64Slice) Len() int { - return len(is.value) -} - -// At returns an item from particular index. -func (is ImmutableFloat64Slice) At(i int) float64 { - return is.value[i] -} - -// ImmutableUInt64Slice represents a []uint64 slice that cannot be mutated. -// The instance of ImmutableUInt64Slice can be assigned to multiple objects since it's immutable. -type ImmutableUInt64Slice struct { - value []uint64 -} - -// NewImmutableUInt64Slice creates a new ImmutableUInt64Slice by copying the provided []uint64 slice. -func NewImmutableUInt64Slice(val []uint64) ImmutableUInt64Slice { - is := ImmutableUInt64Slice{} - if len(val) != 0 { - is.value = make([]uint64, len(val)) - copy(is.value, val) - } - return is -} - -// AsRaw returns a copy of the []uint64 slice. -func (is ImmutableUInt64Slice) AsRaw() []uint64 { - if len(is.value) == 0 { - return nil - } - val := make([]uint64, len(is.value)) - copy(val, is.value) - return val -} - -// Len returns length of the []uint64 slice value. -func (is ImmutableUInt64Slice) Len() int { - return len(is.value) -} - -// At returns an item from particular index. -func (is ImmutableUInt64Slice) At(i int) uint64 { - return is.value[i] -} diff --git a/pdata/internal/generated_plog_test.go b/pdata/internal/generated_plog_test.go deleted file mode 100644 index 9950f7b1b1b..00000000000 --- a/pdata/internal/generated_plog_test.go +++ /dev/null @@ -1,615 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package internal - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1" -) - -func TestResourceLogsSlice(t *testing.T) { - es := NewResourceLogsSlice() - assert.EqualValues(t, 0, es.Len()) - es = newResourceLogsSlice(&[]*otlplogs.ResourceLogs{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newResourceLogs(&otlplogs.ResourceLogs{}) - testVal := generateTestResourceLogs() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestResourceLogs(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestResourceLogsSlice_CopyTo(t *testing.T) { - dest := NewResourceLogsSlice() - // Test CopyTo to empty - NewResourceLogsSlice().CopyTo(dest) - assert.EqualValues(t, NewResourceLogsSlice(), dest) - - // Test CopyTo larger slice - generateTestResourceLogsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestResourceLogsSlice(), dest) - - // Test CopyTo same size slice - generateTestResourceLogsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestResourceLogsSlice(), dest) -} - -func TestResourceLogsSlice_EnsureCapacity(t *testing.T) { - es := generateTestResourceLogsSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlplogs.ResourceLogs]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlplogs.ResourceLogs]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlplogs.ResourceLogs]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlplogs.ResourceLogs]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestResourceLogsSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestResourceLogsSlice() - dest := NewResourceLogsSlice() - src := generateTestResourceLogsSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestResourceLogsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestResourceLogsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestResourceLogsSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestResourceLogsSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewResourceLogsSlice() - emptySlice.RemoveIf(func(el ResourceLogs) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestResourceLogsSlice() - pos := 0 - filtered.RemoveIf(func(el ResourceLogs) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestResourceLogs_MoveTo(t *testing.T) { - ms := generateTestResourceLogs() - dest := NewResourceLogs() - ms.MoveTo(dest) - assert.EqualValues(t, NewResourceLogs(), ms) - assert.EqualValues(t, generateTestResourceLogs(), dest) -} - -func TestResourceLogs_CopyTo(t *testing.T) { - ms := NewResourceLogs() - orig := NewResourceLogs() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestResourceLogs() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestResourceLogs_Resource(t *testing.T) { - ms := NewResourceLogs() - fillTestResource(ms.Resource()) - assert.EqualValues(t, generateTestResource(), ms.Resource()) -} - -func TestResourceLogs_SchemaUrl(t *testing.T) { - ms := NewResourceLogs() - assert.EqualValues(t, "", ms.SchemaUrl()) - testValSchemaUrl := "https://opentelemetry.io/schemas/1.5.0" - ms.SetSchemaUrl(testValSchemaUrl) - assert.EqualValues(t, testValSchemaUrl, ms.SchemaUrl()) -} - -func TestResourceLogs_ScopeLogs(t *testing.T) { - ms := NewResourceLogs() - assert.EqualValues(t, NewScopeLogsSlice(), ms.ScopeLogs()) - fillTestScopeLogsSlice(ms.ScopeLogs()) - testValScopeLogs := generateTestScopeLogsSlice() - assert.EqualValues(t, testValScopeLogs, ms.ScopeLogs()) -} - -func TestScopeLogsSlice(t *testing.T) { - es := NewScopeLogsSlice() - assert.EqualValues(t, 0, es.Len()) - es = newScopeLogsSlice(&[]*otlplogs.ScopeLogs{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newScopeLogs(&otlplogs.ScopeLogs{}) - testVal := generateTestScopeLogs() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestScopeLogs(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestScopeLogsSlice_CopyTo(t *testing.T) { - dest := NewScopeLogsSlice() - // Test CopyTo to empty - NewScopeLogsSlice().CopyTo(dest) - assert.EqualValues(t, NewScopeLogsSlice(), dest) - - // Test CopyTo larger slice - generateTestScopeLogsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestScopeLogsSlice(), dest) - - // Test CopyTo same size slice - generateTestScopeLogsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestScopeLogsSlice(), dest) -} - -func TestScopeLogsSlice_EnsureCapacity(t *testing.T) { - es := generateTestScopeLogsSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlplogs.ScopeLogs]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlplogs.ScopeLogs]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlplogs.ScopeLogs]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlplogs.ScopeLogs]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestScopeLogsSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestScopeLogsSlice() - dest := NewScopeLogsSlice() - src := generateTestScopeLogsSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestScopeLogsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestScopeLogsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestScopeLogsSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestScopeLogsSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewScopeLogsSlice() - emptySlice.RemoveIf(func(el ScopeLogs) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestScopeLogsSlice() - pos := 0 - filtered.RemoveIf(func(el ScopeLogs) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestScopeLogs_MoveTo(t *testing.T) { - ms := generateTestScopeLogs() - dest := NewScopeLogs() - ms.MoveTo(dest) - assert.EqualValues(t, NewScopeLogs(), ms) - assert.EqualValues(t, generateTestScopeLogs(), dest) -} - -func TestScopeLogs_CopyTo(t *testing.T) { - ms := NewScopeLogs() - orig := NewScopeLogs() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestScopeLogs() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestScopeLogs_Scope(t *testing.T) { - ms := NewScopeLogs() - fillTestInstrumentationScope(ms.Scope()) - assert.EqualValues(t, generateTestInstrumentationScope(), ms.Scope()) -} - -func TestScopeLogs_SchemaUrl(t *testing.T) { - ms := NewScopeLogs() - assert.EqualValues(t, "", ms.SchemaUrl()) - testValSchemaUrl := "https://opentelemetry.io/schemas/1.5.0" - ms.SetSchemaUrl(testValSchemaUrl) - assert.EqualValues(t, testValSchemaUrl, ms.SchemaUrl()) -} - -func TestScopeLogs_LogRecords(t *testing.T) { - ms := NewScopeLogs() - assert.EqualValues(t, NewLogRecordSlice(), ms.LogRecords()) - fillTestLogRecordSlice(ms.LogRecords()) - testValLogRecords := generateTestLogRecordSlice() - assert.EqualValues(t, testValLogRecords, ms.LogRecords()) -} - -func TestLogRecordSlice(t *testing.T) { - es := NewLogRecordSlice() - assert.EqualValues(t, 0, es.Len()) - es = newLogRecordSlice(&[]*otlplogs.LogRecord{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newLogRecord(&otlplogs.LogRecord{}) - testVal := generateTestLogRecord() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestLogRecord(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestLogRecordSlice_CopyTo(t *testing.T) { - dest := NewLogRecordSlice() - // Test CopyTo to empty - NewLogRecordSlice().CopyTo(dest) - assert.EqualValues(t, NewLogRecordSlice(), dest) - - // Test CopyTo larger slice - generateTestLogRecordSlice().CopyTo(dest) - assert.EqualValues(t, generateTestLogRecordSlice(), dest) - - // Test CopyTo same size slice - generateTestLogRecordSlice().CopyTo(dest) - assert.EqualValues(t, generateTestLogRecordSlice(), dest) -} - -func TestLogRecordSlice_EnsureCapacity(t *testing.T) { - es := generateTestLogRecordSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlplogs.LogRecord]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlplogs.LogRecord]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlplogs.LogRecord]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlplogs.LogRecord]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestLogRecordSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestLogRecordSlice() - dest := NewLogRecordSlice() - src := generateTestLogRecordSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestLogRecordSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestLogRecordSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestLogRecordSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestLogRecordSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewLogRecordSlice() - emptySlice.RemoveIf(func(el LogRecord) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestLogRecordSlice() - pos := 0 - filtered.RemoveIf(func(el LogRecord) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestLogRecord_MoveTo(t *testing.T) { - ms := generateTestLogRecord() - dest := NewLogRecord() - ms.MoveTo(dest) - assert.EqualValues(t, NewLogRecord(), ms) - assert.EqualValues(t, generateTestLogRecord(), dest) -} - -func TestLogRecord_CopyTo(t *testing.T) { - ms := NewLogRecord() - orig := NewLogRecord() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestLogRecord() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestLogRecord_ObservedTimestamp(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, Timestamp(0), ms.ObservedTimestamp()) - testValObservedTimestamp := Timestamp(1234567890) - ms.SetObservedTimestamp(testValObservedTimestamp) - assert.EqualValues(t, testValObservedTimestamp, ms.ObservedTimestamp()) -} - -func TestLogRecord_Timestamp(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, Timestamp(0), ms.Timestamp()) - testValTimestamp := Timestamp(1234567890) - ms.SetTimestamp(testValTimestamp) - assert.EqualValues(t, testValTimestamp, ms.Timestamp()) -} - -func TestLogRecord_TraceID(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, NewTraceID([16]byte{}), ms.TraceID()) - testValTraceID := NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) - ms.SetTraceID(testValTraceID) - assert.EqualValues(t, testValTraceID, ms.TraceID()) -} - -func TestLogRecord_SpanID(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, NewSpanID([8]byte{}), ms.SpanID()) - testValSpanID := NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - ms.SetSpanID(testValSpanID) - assert.EqualValues(t, testValSpanID, ms.SpanID()) -} - -func TestLogRecord_FlagsStruct(t *testing.T) { - ms := NewLogRecord() - fillTestLogRecordFlags(ms.FlagsStruct()) - assert.EqualValues(t, generateTestLogRecordFlags(), ms.FlagsStruct()) -} - -func TestLogRecord_SeverityText(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, "", ms.SeverityText()) - testValSeverityText := "INFO" - ms.SetSeverityText(testValSeverityText) - assert.EqualValues(t, testValSeverityText, ms.SeverityText()) -} - -func TestLogRecord_SeverityNumber(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, SeverityNumberUndefined, ms.SeverityNumber()) - testValSeverityNumber := SeverityNumberInfo - ms.SetSeverityNumber(testValSeverityNumber) - assert.EqualValues(t, testValSeverityNumber, ms.SeverityNumber()) -} - -func TestLogRecord_Body(t *testing.T) { - ms := NewLogRecord() - fillTestValue(ms.Body()) - assert.EqualValues(t, generateTestValue(), ms.Body()) -} - -func TestLogRecord_Attributes(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestLogRecord_DroppedAttributesCount(t *testing.T) { - ms := NewLogRecord() - assert.EqualValues(t, uint32(0), ms.DroppedAttributesCount()) - testValDroppedAttributesCount := uint32(17) - ms.SetDroppedAttributesCount(testValDroppedAttributesCount) - assert.EqualValues(t, testValDroppedAttributesCount, ms.DroppedAttributesCount()) -} - -func generateTestResourceLogsSlice() ResourceLogsSlice { - tv := NewResourceLogsSlice() - fillTestResourceLogsSlice(tv) - return tv -} - -func fillTestResourceLogsSlice(tv ResourceLogsSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestResourceLogs(tv.AppendEmpty()) - } -} - -func generateTestResourceLogs() ResourceLogs { - tv := NewResourceLogs() - fillTestResourceLogs(tv) - return tv -} - -func fillTestResourceLogs(tv ResourceLogs) { - fillTestResource(tv.Resource()) - tv.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") - fillTestScopeLogsSlice(tv.ScopeLogs()) -} - -func generateTestScopeLogsSlice() ScopeLogsSlice { - tv := NewScopeLogsSlice() - fillTestScopeLogsSlice(tv) - return tv -} - -func fillTestScopeLogsSlice(tv ScopeLogsSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestScopeLogs(tv.AppendEmpty()) - } -} - -func generateTestScopeLogs() ScopeLogs { - tv := NewScopeLogs() - fillTestScopeLogs(tv) - return tv -} - -func fillTestScopeLogs(tv ScopeLogs) { - fillTestInstrumentationScope(tv.Scope()) - tv.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") - fillTestLogRecordSlice(tv.LogRecords()) -} - -func generateTestLogRecordSlice() LogRecordSlice { - tv := NewLogRecordSlice() - fillTestLogRecordSlice(tv) - return tv -} - -func fillTestLogRecordSlice(tv LogRecordSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestLogRecord(tv.AppendEmpty()) - } -} - -func generateTestLogRecord() LogRecord { - tv := NewLogRecord() - fillTestLogRecord(tv) - return tv -} - -func fillTestLogRecord(tv LogRecord) { - tv.SetObservedTimestamp(Timestamp(1234567890)) - tv.SetTimestamp(Timestamp(1234567890)) - tv.SetTraceID(NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1})) - tv.SetSpanID(NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8})) - fillTestLogRecordFlags(tv.FlagsStruct()) - tv.SetSeverityText("INFO") - tv.SetSeverityNumber(SeverityNumberInfo) - fillTestValue(tv.Body()) - fillTestMap(tv.Attributes()) - tv.SetDroppedAttributesCount(uint32(17)) -} diff --git a/pdata/internal/generated_pmetric_test.go b/pdata/internal/generated_pmetric_test.go deleted file mode 100644 index 1ad9c656a87..00000000000 --- a/pdata/internal/generated_pmetric_test.go +++ /dev/null @@ -1,2242 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package internal - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" -) - -func TestResourceMetricsSlice(t *testing.T) { - es := NewResourceMetricsSlice() - assert.EqualValues(t, 0, es.Len()) - es = newResourceMetricsSlice(&[]*otlpmetrics.ResourceMetrics{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newResourceMetrics(&otlpmetrics.ResourceMetrics{}) - testVal := generateTestResourceMetrics() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestResourceMetrics(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestResourceMetricsSlice_CopyTo(t *testing.T) { - dest := NewResourceMetricsSlice() - // Test CopyTo to empty - NewResourceMetricsSlice().CopyTo(dest) - assert.EqualValues(t, NewResourceMetricsSlice(), dest) - - // Test CopyTo larger slice - generateTestResourceMetricsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestResourceMetricsSlice(), dest) - - // Test CopyTo same size slice - generateTestResourceMetricsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestResourceMetricsSlice(), dest) -} - -func TestResourceMetricsSlice_EnsureCapacity(t *testing.T) { - es := generateTestResourceMetricsSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.ResourceMetrics]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.ResourceMetrics]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.ResourceMetrics]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.ResourceMetrics]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestResourceMetricsSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestResourceMetricsSlice() - dest := NewResourceMetricsSlice() - src := generateTestResourceMetricsSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestResourceMetricsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestResourceMetricsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestResourceMetricsSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestResourceMetricsSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewResourceMetricsSlice() - emptySlice.RemoveIf(func(el ResourceMetrics) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestResourceMetricsSlice() - pos := 0 - filtered.RemoveIf(func(el ResourceMetrics) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestResourceMetrics_MoveTo(t *testing.T) { - ms := generateTestResourceMetrics() - dest := NewResourceMetrics() - ms.MoveTo(dest) - assert.EqualValues(t, NewResourceMetrics(), ms) - assert.EqualValues(t, generateTestResourceMetrics(), dest) -} - -func TestResourceMetrics_CopyTo(t *testing.T) { - ms := NewResourceMetrics() - orig := NewResourceMetrics() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestResourceMetrics() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestResourceMetrics_Resource(t *testing.T) { - ms := NewResourceMetrics() - fillTestResource(ms.Resource()) - assert.EqualValues(t, generateTestResource(), ms.Resource()) -} - -func TestResourceMetrics_SchemaUrl(t *testing.T) { - ms := NewResourceMetrics() - assert.EqualValues(t, "", ms.SchemaUrl()) - testValSchemaUrl := "https://opentelemetry.io/schemas/1.5.0" - ms.SetSchemaUrl(testValSchemaUrl) - assert.EqualValues(t, testValSchemaUrl, ms.SchemaUrl()) -} - -func TestResourceMetrics_ScopeMetrics(t *testing.T) { - ms := NewResourceMetrics() - assert.EqualValues(t, NewScopeMetricsSlice(), ms.ScopeMetrics()) - fillTestScopeMetricsSlice(ms.ScopeMetrics()) - testValScopeMetrics := generateTestScopeMetricsSlice() - assert.EqualValues(t, testValScopeMetrics, ms.ScopeMetrics()) -} - -func TestScopeMetricsSlice(t *testing.T) { - es := NewScopeMetricsSlice() - assert.EqualValues(t, 0, es.Len()) - es = newScopeMetricsSlice(&[]*otlpmetrics.ScopeMetrics{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newScopeMetrics(&otlpmetrics.ScopeMetrics{}) - testVal := generateTestScopeMetrics() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestScopeMetrics(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestScopeMetricsSlice_CopyTo(t *testing.T) { - dest := NewScopeMetricsSlice() - // Test CopyTo to empty - NewScopeMetricsSlice().CopyTo(dest) - assert.EqualValues(t, NewScopeMetricsSlice(), dest) - - // Test CopyTo larger slice - generateTestScopeMetricsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestScopeMetricsSlice(), dest) - - // Test CopyTo same size slice - generateTestScopeMetricsSlice().CopyTo(dest) - assert.EqualValues(t, generateTestScopeMetricsSlice(), dest) -} - -func TestScopeMetricsSlice_EnsureCapacity(t *testing.T) { - es := generateTestScopeMetricsSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.ScopeMetrics]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.ScopeMetrics]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.ScopeMetrics]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.ScopeMetrics]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestScopeMetricsSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestScopeMetricsSlice() - dest := NewScopeMetricsSlice() - src := generateTestScopeMetricsSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestScopeMetricsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestScopeMetricsSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestScopeMetricsSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestScopeMetricsSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewScopeMetricsSlice() - emptySlice.RemoveIf(func(el ScopeMetrics) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestScopeMetricsSlice() - pos := 0 - filtered.RemoveIf(func(el ScopeMetrics) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestScopeMetrics_MoveTo(t *testing.T) { - ms := generateTestScopeMetrics() - dest := NewScopeMetrics() - ms.MoveTo(dest) - assert.EqualValues(t, NewScopeMetrics(), ms) - assert.EqualValues(t, generateTestScopeMetrics(), dest) -} - -func TestScopeMetrics_CopyTo(t *testing.T) { - ms := NewScopeMetrics() - orig := NewScopeMetrics() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestScopeMetrics() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestScopeMetrics_Scope(t *testing.T) { - ms := NewScopeMetrics() - fillTestInstrumentationScope(ms.Scope()) - assert.EqualValues(t, generateTestInstrumentationScope(), ms.Scope()) -} - -func TestScopeMetrics_SchemaUrl(t *testing.T) { - ms := NewScopeMetrics() - assert.EqualValues(t, "", ms.SchemaUrl()) - testValSchemaUrl := "https://opentelemetry.io/schemas/1.5.0" - ms.SetSchemaUrl(testValSchemaUrl) - assert.EqualValues(t, testValSchemaUrl, ms.SchemaUrl()) -} - -func TestScopeMetrics_Metrics(t *testing.T) { - ms := NewScopeMetrics() - assert.EqualValues(t, NewMetricSlice(), ms.Metrics()) - fillTestMetricSlice(ms.Metrics()) - testValMetrics := generateTestMetricSlice() - assert.EqualValues(t, testValMetrics, ms.Metrics()) -} - -func TestMetricSlice(t *testing.T) { - es := NewMetricSlice() - assert.EqualValues(t, 0, es.Len()) - es = newMetricSlice(&[]*otlpmetrics.Metric{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newMetric(&otlpmetrics.Metric{}) - testVal := generateTestMetric() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestMetric(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestMetricSlice_CopyTo(t *testing.T) { - dest := NewMetricSlice() - // Test CopyTo to empty - NewMetricSlice().CopyTo(dest) - assert.EqualValues(t, NewMetricSlice(), dest) - - // Test CopyTo larger slice - generateTestMetricSlice().CopyTo(dest) - assert.EqualValues(t, generateTestMetricSlice(), dest) - - // Test CopyTo same size slice - generateTestMetricSlice().CopyTo(dest) - assert.EqualValues(t, generateTestMetricSlice(), dest) -} - -func TestMetricSlice_EnsureCapacity(t *testing.T) { - es := generateTestMetricSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.Metric]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.Metric]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.Metric]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.Metric]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestMetricSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestMetricSlice() - dest := NewMetricSlice() - src := generateTestMetricSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestMetricSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestMetricSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestMetricSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestMetricSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewMetricSlice() - emptySlice.RemoveIf(func(el Metric) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestMetricSlice() - pos := 0 - filtered.RemoveIf(func(el Metric) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestMetric_MoveTo(t *testing.T) { - ms := generateTestMetric() - dest := NewMetric() - ms.MoveTo(dest) - assert.EqualValues(t, NewMetric(), ms) - assert.EqualValues(t, generateTestMetric(), dest) -} - -func TestMetric_CopyTo(t *testing.T) { - ms := NewMetric() - orig := NewMetric() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestMetric() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestMetric_Name(t *testing.T) { - ms := NewMetric() - assert.EqualValues(t, "", ms.Name()) - testValName := "test_name" - ms.SetName(testValName) - assert.EqualValues(t, testValName, ms.Name()) -} - -func TestMetric_Description(t *testing.T) { - ms := NewMetric() - assert.EqualValues(t, "", ms.Description()) - testValDescription := "test_description" - ms.SetDescription(testValDescription) - assert.EqualValues(t, testValDescription, ms.Description()) -} - -func TestMetric_Unit(t *testing.T) { - ms := NewMetric() - assert.EqualValues(t, "", ms.Unit()) - testValUnit := "1" - ms.SetUnit(testValUnit) - assert.EqualValues(t, testValUnit, ms.Unit()) -} - -func TestMetricDataType(t *testing.T) { - tv := NewMetric() - assert.Equal(t, MetricDataTypeNone, tv.DataType()) - assert.Equal(t, "", MetricDataType(1000).String()) - assert.Equal(t, Gauge{}, tv.Gauge()) - assert.Equal(t, Sum{}, tv.Sum()) - assert.Equal(t, Histogram{}, tv.Histogram()) - assert.Equal(t, ExponentialHistogram{}, tv.ExponentialHistogram()) - assert.Equal(t, Summary{}, tv.Summary()) - tv.SetDataType(MetricDataTypeGauge) - fillTestGauge(tv.Gauge()) - assert.Equal(t, MetricDataTypeGauge, tv.DataType()) - tv.SetDataType(MetricDataTypeSum) - fillTestSum(tv.Sum()) - assert.Equal(t, MetricDataTypeSum, tv.DataType()) - tv.SetDataType(MetricDataTypeHistogram) - fillTestHistogram(tv.Histogram()) - assert.Equal(t, MetricDataTypeHistogram, tv.DataType()) - tv.SetDataType(MetricDataTypeExponentialHistogram) - fillTestExponentialHistogram(tv.ExponentialHistogram()) - assert.Equal(t, MetricDataTypeExponentialHistogram, tv.DataType()) - tv.SetDataType(MetricDataTypeSummary) - fillTestSummary(tv.Summary()) - assert.Equal(t, MetricDataTypeSummary, tv.DataType()) -} - -func TestMetric_Gauge(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeGauge) - fillTestGauge(ms.Gauge()) - assert.EqualValues(t, generateTestGauge(), ms.Gauge()) -} - -func TestMetric_CopyTo_Gauge(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeGauge) - fillTestGauge(ms.Gauge()) - dest := NewMetric() - ms.CopyTo(dest) - assert.EqualValues(t, ms, dest) -} - -func TestMetric_Sum(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeSum) - fillTestSum(ms.Sum()) - assert.EqualValues(t, generateTestSum(), ms.Sum()) -} - -func TestMetric_CopyTo_Sum(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeSum) - fillTestSum(ms.Sum()) - dest := NewMetric() - ms.CopyTo(dest) - assert.EqualValues(t, ms, dest) -} - -func TestMetric_Histogram(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeHistogram) - fillTestHistogram(ms.Histogram()) - assert.EqualValues(t, generateTestHistogram(), ms.Histogram()) -} - -func TestMetric_CopyTo_Histogram(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeHistogram) - fillTestHistogram(ms.Histogram()) - dest := NewMetric() - ms.CopyTo(dest) - assert.EqualValues(t, ms, dest) -} - -func TestMetric_ExponentialHistogram(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeExponentialHistogram) - fillTestExponentialHistogram(ms.ExponentialHistogram()) - assert.EqualValues(t, generateTestExponentialHistogram(), ms.ExponentialHistogram()) -} - -func TestMetric_CopyTo_ExponentialHistogram(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeExponentialHistogram) - fillTestExponentialHistogram(ms.ExponentialHistogram()) - dest := NewMetric() - ms.CopyTo(dest) - assert.EqualValues(t, ms, dest) -} - -func TestMetric_Summary(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeSummary) - fillTestSummary(ms.Summary()) - assert.EqualValues(t, generateTestSummary(), ms.Summary()) -} - -func TestMetric_CopyTo_Summary(t *testing.T) { - ms := NewMetric() - ms.SetDataType(MetricDataTypeSummary) - fillTestSummary(ms.Summary()) - dest := NewMetric() - ms.CopyTo(dest) - assert.EqualValues(t, ms, dest) -} - -func TestGauge_MoveTo(t *testing.T) { - ms := generateTestGauge() - dest := NewGauge() - ms.MoveTo(dest) - assert.EqualValues(t, NewGauge(), ms) - assert.EqualValues(t, generateTestGauge(), dest) -} - -func TestGauge_CopyTo(t *testing.T) { - ms := NewGauge() - orig := NewGauge() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestGauge() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestGauge_DataPoints(t *testing.T) { - ms := NewGauge() - assert.EqualValues(t, NewNumberDataPointSlice(), ms.DataPoints()) - fillTestNumberDataPointSlice(ms.DataPoints()) - testValDataPoints := generateTestNumberDataPointSlice() - assert.EqualValues(t, testValDataPoints, ms.DataPoints()) -} - -func TestSum_MoveTo(t *testing.T) { - ms := generateTestSum() - dest := NewSum() - ms.MoveTo(dest) - assert.EqualValues(t, NewSum(), ms) - assert.EqualValues(t, generateTestSum(), dest) -} - -func TestSum_CopyTo(t *testing.T) { - ms := NewSum() - orig := NewSum() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestSum() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestSum_AggregationTemporality(t *testing.T) { - ms := NewSum() - assert.EqualValues(t, MetricAggregationTemporalityUnspecified, ms.AggregationTemporality()) - testValAggregationTemporality := MetricAggregationTemporalityCumulative - ms.SetAggregationTemporality(testValAggregationTemporality) - assert.EqualValues(t, testValAggregationTemporality, ms.AggregationTemporality()) -} - -func TestSum_IsMonotonic(t *testing.T) { - ms := NewSum() - assert.EqualValues(t, false, ms.IsMonotonic()) - testValIsMonotonic := true - ms.SetIsMonotonic(testValIsMonotonic) - assert.EqualValues(t, testValIsMonotonic, ms.IsMonotonic()) -} - -func TestSum_DataPoints(t *testing.T) { - ms := NewSum() - assert.EqualValues(t, NewNumberDataPointSlice(), ms.DataPoints()) - fillTestNumberDataPointSlice(ms.DataPoints()) - testValDataPoints := generateTestNumberDataPointSlice() - assert.EqualValues(t, testValDataPoints, ms.DataPoints()) -} - -func TestHistogram_MoveTo(t *testing.T) { - ms := generateTestHistogram() - dest := NewHistogram() - ms.MoveTo(dest) - assert.EqualValues(t, NewHistogram(), ms) - assert.EqualValues(t, generateTestHistogram(), dest) -} - -func TestHistogram_CopyTo(t *testing.T) { - ms := NewHistogram() - orig := NewHistogram() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestHistogram() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestHistogram_AggregationTemporality(t *testing.T) { - ms := NewHistogram() - assert.EqualValues(t, MetricAggregationTemporalityUnspecified, ms.AggregationTemporality()) - testValAggregationTemporality := MetricAggregationTemporalityCumulative - ms.SetAggregationTemporality(testValAggregationTemporality) - assert.EqualValues(t, testValAggregationTemporality, ms.AggregationTemporality()) -} - -func TestHistogram_DataPoints(t *testing.T) { - ms := NewHistogram() - assert.EqualValues(t, NewHistogramDataPointSlice(), ms.DataPoints()) - fillTestHistogramDataPointSlice(ms.DataPoints()) - testValDataPoints := generateTestHistogramDataPointSlice() - assert.EqualValues(t, testValDataPoints, ms.DataPoints()) -} - -func TestExponentialHistogram_MoveTo(t *testing.T) { - ms := generateTestExponentialHistogram() - dest := NewExponentialHistogram() - ms.MoveTo(dest) - assert.EqualValues(t, NewExponentialHistogram(), ms) - assert.EqualValues(t, generateTestExponentialHistogram(), dest) -} - -func TestExponentialHistogram_CopyTo(t *testing.T) { - ms := NewExponentialHistogram() - orig := NewExponentialHistogram() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestExponentialHistogram() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestExponentialHistogram_AggregationTemporality(t *testing.T) { - ms := NewExponentialHistogram() - assert.EqualValues(t, MetricAggregationTemporalityUnspecified, ms.AggregationTemporality()) - testValAggregationTemporality := MetricAggregationTemporalityCumulative - ms.SetAggregationTemporality(testValAggregationTemporality) - assert.EqualValues(t, testValAggregationTemporality, ms.AggregationTemporality()) -} - -func TestExponentialHistogram_DataPoints(t *testing.T) { - ms := NewExponentialHistogram() - assert.EqualValues(t, NewExponentialHistogramDataPointSlice(), ms.DataPoints()) - fillTestExponentialHistogramDataPointSlice(ms.DataPoints()) - testValDataPoints := generateTestExponentialHistogramDataPointSlice() - assert.EqualValues(t, testValDataPoints, ms.DataPoints()) -} - -func TestSummary_MoveTo(t *testing.T) { - ms := generateTestSummary() - dest := NewSummary() - ms.MoveTo(dest) - assert.EqualValues(t, NewSummary(), ms) - assert.EqualValues(t, generateTestSummary(), dest) -} - -func TestSummary_CopyTo(t *testing.T) { - ms := NewSummary() - orig := NewSummary() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestSummary() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestSummary_DataPoints(t *testing.T) { - ms := NewSummary() - assert.EqualValues(t, NewSummaryDataPointSlice(), ms.DataPoints()) - fillTestSummaryDataPointSlice(ms.DataPoints()) - testValDataPoints := generateTestSummaryDataPointSlice() - assert.EqualValues(t, testValDataPoints, ms.DataPoints()) -} - -func TestNumberDataPointSlice(t *testing.T) { - es := NewNumberDataPointSlice() - assert.EqualValues(t, 0, es.Len()) - es = newNumberDataPointSlice(&[]*otlpmetrics.NumberDataPoint{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newNumberDataPoint(&otlpmetrics.NumberDataPoint{}) - testVal := generateTestNumberDataPoint() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestNumberDataPoint(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestNumberDataPointSlice_CopyTo(t *testing.T) { - dest := NewNumberDataPointSlice() - // Test CopyTo to empty - NewNumberDataPointSlice().CopyTo(dest) - assert.EqualValues(t, NewNumberDataPointSlice(), dest) - - // Test CopyTo larger slice - generateTestNumberDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestNumberDataPointSlice(), dest) - - // Test CopyTo same size slice - generateTestNumberDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestNumberDataPointSlice(), dest) -} - -func TestNumberDataPointSlice_EnsureCapacity(t *testing.T) { - es := generateTestNumberDataPointSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.NumberDataPoint]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.NumberDataPoint]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.NumberDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.NumberDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestNumberDataPointSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestNumberDataPointSlice() - dest := NewNumberDataPointSlice() - src := generateTestNumberDataPointSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestNumberDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestNumberDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestNumberDataPointSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestNumberDataPointSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewNumberDataPointSlice() - emptySlice.RemoveIf(func(el NumberDataPoint) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestNumberDataPointSlice() - pos := 0 - filtered.RemoveIf(func(el NumberDataPoint) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestNumberDataPoint_MoveTo(t *testing.T) { - ms := generateTestNumberDataPoint() - dest := NewNumberDataPoint() - ms.MoveTo(dest) - assert.EqualValues(t, NewNumberDataPoint(), ms) - assert.EqualValues(t, generateTestNumberDataPoint(), dest) -} - -func TestNumberDataPoint_CopyTo(t *testing.T) { - ms := NewNumberDataPoint() - orig := NewNumberDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestNumberDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestNumberDataPoint_Attributes(t *testing.T) { - ms := NewNumberDataPoint() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestNumberDataPoint_StartTimestamp(t *testing.T) { - ms := NewNumberDataPoint() - assert.EqualValues(t, Timestamp(0), ms.StartTimestamp()) - testValStartTimestamp := Timestamp(1234567890) - ms.SetStartTimestamp(testValStartTimestamp) - assert.EqualValues(t, testValStartTimestamp, ms.StartTimestamp()) -} - -func TestNumberDataPoint_Timestamp(t *testing.T) { - ms := NewNumberDataPoint() - assert.EqualValues(t, Timestamp(0), ms.Timestamp()) - testValTimestamp := Timestamp(1234567890) - ms.SetTimestamp(testValTimestamp) - assert.EqualValues(t, testValTimestamp, ms.Timestamp()) -} - -func TestNumberDataPointValueType(t *testing.T) { - tv := NewNumberDataPoint() - assert.Equal(t, NumberDataPointValueTypeNone, tv.ValueType()) - assert.Equal(t, "", NumberDataPointValueType(1000).String()) - tv.SetDoubleVal(float64(17.13)) - assert.Equal(t, NumberDataPointValueTypeDouble, tv.ValueType()) - tv.SetIntVal(int64(17)) - assert.Equal(t, NumberDataPointValueTypeInt, tv.ValueType()) -} - -func TestNumberDataPoint_DoubleVal(t *testing.T) { - ms := NewNumberDataPoint() - assert.EqualValues(t, float64(0.0), ms.DoubleVal()) - testValDoubleVal := float64(17.13) - ms.SetDoubleVal(testValDoubleVal) - assert.EqualValues(t, testValDoubleVal, ms.DoubleVal()) -} - -func TestNumberDataPoint_IntVal(t *testing.T) { - ms := NewNumberDataPoint() - assert.EqualValues(t, int64(0), ms.IntVal()) - testValIntVal := int64(17) - ms.SetIntVal(testValIntVal) - assert.EqualValues(t, testValIntVal, ms.IntVal()) -} - -func TestNumberDataPoint_Exemplars(t *testing.T) { - ms := NewNumberDataPoint() - assert.EqualValues(t, NewExemplarSlice(), ms.Exemplars()) - fillTestExemplarSlice(ms.Exemplars()) - testValExemplars := generateTestExemplarSlice() - assert.EqualValues(t, testValExemplars, ms.Exemplars()) -} - -func TestNumberDataPoint_Flags(t *testing.T) { - ms := NewNumberDataPoint() - fillTestMetricDataPointFlags(ms.Flags()) - assert.EqualValues(t, generateTestMetricDataPointFlags(), ms.Flags()) -} - -func TestHistogramDataPointSlice(t *testing.T) { - es := NewHistogramDataPointSlice() - assert.EqualValues(t, 0, es.Len()) - es = newHistogramDataPointSlice(&[]*otlpmetrics.HistogramDataPoint{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newHistogramDataPoint(&otlpmetrics.HistogramDataPoint{}) - testVal := generateTestHistogramDataPoint() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestHistogramDataPoint(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestHistogramDataPointSlice_CopyTo(t *testing.T) { - dest := NewHistogramDataPointSlice() - // Test CopyTo to empty - NewHistogramDataPointSlice().CopyTo(dest) - assert.EqualValues(t, NewHistogramDataPointSlice(), dest) - - // Test CopyTo larger slice - generateTestHistogramDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestHistogramDataPointSlice(), dest) - - // Test CopyTo same size slice - generateTestHistogramDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestHistogramDataPointSlice(), dest) -} - -func TestHistogramDataPointSlice_EnsureCapacity(t *testing.T) { - es := generateTestHistogramDataPointSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.HistogramDataPoint]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.HistogramDataPoint]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.HistogramDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.HistogramDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestHistogramDataPointSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestHistogramDataPointSlice() - dest := NewHistogramDataPointSlice() - src := generateTestHistogramDataPointSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestHistogramDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestHistogramDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestHistogramDataPointSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestHistogramDataPointSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewHistogramDataPointSlice() - emptySlice.RemoveIf(func(el HistogramDataPoint) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestHistogramDataPointSlice() - pos := 0 - filtered.RemoveIf(func(el HistogramDataPoint) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestHistogramDataPoint_MoveTo(t *testing.T) { - ms := generateTestHistogramDataPoint() - dest := NewHistogramDataPoint() - ms.MoveTo(dest) - assert.EqualValues(t, NewHistogramDataPoint(), ms) - assert.EqualValues(t, generateTestHistogramDataPoint(), dest) -} - -func TestHistogramDataPoint_CopyTo(t *testing.T) { - ms := NewHistogramDataPoint() - orig := NewHistogramDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestHistogramDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestHistogramDataPoint_Attributes(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestHistogramDataPoint_StartTimestamp(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, Timestamp(0), ms.StartTimestamp()) - testValStartTimestamp := Timestamp(1234567890) - ms.SetStartTimestamp(testValStartTimestamp) - assert.EqualValues(t, testValStartTimestamp, ms.StartTimestamp()) -} - -func TestHistogramDataPoint_Timestamp(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, Timestamp(0), ms.Timestamp()) - testValTimestamp := Timestamp(1234567890) - ms.SetTimestamp(testValTimestamp) - assert.EqualValues(t, testValTimestamp, ms.Timestamp()) -} - -func TestHistogramDataPoint_Count(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, uint64(0), ms.Count()) - testValCount := uint64(17) - ms.SetCount(testValCount) - assert.EqualValues(t, testValCount, ms.Count()) -} - -func TestHistogramDataPoint_Sum(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, float64(0.0), ms.Sum()) - testValSum := float64(17.13) - ms.SetSum(testValSum) - assert.EqualValues(t, testValSum, ms.Sum()) -} - -func TestHistogramDataPoint_BucketCounts(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, ImmutableUInt64Slice{}, ms.BucketCounts()) - testValBucketCounts := NewImmutableUInt64Slice([]uint64{1, 2, 3}) - ms.SetBucketCounts(testValBucketCounts) - assert.EqualValues(t, testValBucketCounts, ms.BucketCounts()) -} - -func TestHistogramDataPoint_ExplicitBounds(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, ImmutableFloat64Slice{}, ms.ExplicitBounds()) - testValExplicitBounds := NewImmutableFloat64Slice([]float64{1, 2, 3}) - ms.SetExplicitBounds(testValExplicitBounds) - assert.EqualValues(t, testValExplicitBounds, ms.ExplicitBounds()) -} - -func TestHistogramDataPoint_Exemplars(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, NewExemplarSlice(), ms.Exemplars()) - fillTestExemplarSlice(ms.Exemplars()) - testValExemplars := generateTestExemplarSlice() - assert.EqualValues(t, testValExemplars, ms.Exemplars()) -} - -func TestHistogramDataPoint_Flags(t *testing.T) { - ms := NewHistogramDataPoint() - fillTestMetricDataPointFlags(ms.Flags()) - assert.EqualValues(t, generateTestMetricDataPointFlags(), ms.Flags()) -} - -func TestHistogramDataPoint_Min(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, float64(0.0), ms.Min()) - testValMin := float64(9.23) - ms.SetMin(testValMin) - assert.EqualValues(t, testValMin, ms.Min()) -} - -func TestHistogramDataPoint_Max(t *testing.T) { - ms := NewHistogramDataPoint() - assert.EqualValues(t, float64(0.0), ms.Max()) - testValMax := float64(182.55) - ms.SetMax(testValMax) - assert.EqualValues(t, testValMax, ms.Max()) -} - -func TestExponentialHistogramDataPointSlice(t *testing.T) { - es := NewExponentialHistogramDataPointSlice() - assert.EqualValues(t, 0, es.Len()) - es = newExponentialHistogramDataPointSlice(&[]*otlpmetrics.ExponentialHistogramDataPoint{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newExponentialHistogramDataPoint(&otlpmetrics.ExponentialHistogramDataPoint{}) - testVal := generateTestExponentialHistogramDataPoint() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestExponentialHistogramDataPoint(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestExponentialHistogramDataPointSlice_CopyTo(t *testing.T) { - dest := NewExponentialHistogramDataPointSlice() - // Test CopyTo to empty - NewExponentialHistogramDataPointSlice().CopyTo(dest) - assert.EqualValues(t, NewExponentialHistogramDataPointSlice(), dest) - - // Test CopyTo larger slice - generateTestExponentialHistogramDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestExponentialHistogramDataPointSlice(), dest) - - // Test CopyTo same size slice - generateTestExponentialHistogramDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestExponentialHistogramDataPointSlice(), dest) -} - -func TestExponentialHistogramDataPointSlice_EnsureCapacity(t *testing.T) { - es := generateTestExponentialHistogramDataPointSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestExponentialHistogramDataPointSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestExponentialHistogramDataPointSlice() - dest := NewExponentialHistogramDataPointSlice() - src := generateTestExponentialHistogramDataPointSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestExponentialHistogramDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestExponentialHistogramDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestExponentialHistogramDataPointSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestExponentialHistogramDataPointSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewExponentialHistogramDataPointSlice() - emptySlice.RemoveIf(func(el ExponentialHistogramDataPoint) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestExponentialHistogramDataPointSlice() - pos := 0 - filtered.RemoveIf(func(el ExponentialHistogramDataPoint) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestExponentialHistogramDataPoint_MoveTo(t *testing.T) { - ms := generateTestExponentialHistogramDataPoint() - dest := NewExponentialHistogramDataPoint() - ms.MoveTo(dest) - assert.EqualValues(t, NewExponentialHistogramDataPoint(), ms) - assert.EqualValues(t, generateTestExponentialHistogramDataPoint(), dest) -} - -func TestExponentialHistogramDataPoint_CopyTo(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - orig := NewExponentialHistogramDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestExponentialHistogramDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestExponentialHistogramDataPoint_Attributes(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestExponentialHistogramDataPoint_StartTimestamp(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, Timestamp(0), ms.StartTimestamp()) - testValStartTimestamp := Timestamp(1234567890) - ms.SetStartTimestamp(testValStartTimestamp) - assert.EqualValues(t, testValStartTimestamp, ms.StartTimestamp()) -} - -func TestExponentialHistogramDataPoint_Timestamp(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, Timestamp(0), ms.Timestamp()) - testValTimestamp := Timestamp(1234567890) - ms.SetTimestamp(testValTimestamp) - assert.EqualValues(t, testValTimestamp, ms.Timestamp()) -} - -func TestExponentialHistogramDataPoint_Count(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, uint64(0), ms.Count()) - testValCount := uint64(17) - ms.SetCount(testValCount) - assert.EqualValues(t, testValCount, ms.Count()) -} - -func TestExponentialHistogramDataPoint_Sum(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, float64(0.0), ms.Sum()) - testValSum := float64(17.13) - ms.SetSum(testValSum) - assert.EqualValues(t, testValSum, ms.Sum()) -} - -func TestExponentialHistogramDataPoint_Scale(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, int32(0), ms.Scale()) - testValScale := int32(4) - ms.SetScale(testValScale) - assert.EqualValues(t, testValScale, ms.Scale()) -} - -func TestExponentialHistogramDataPoint_ZeroCount(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, uint64(0), ms.ZeroCount()) - testValZeroCount := uint64(201) - ms.SetZeroCount(testValZeroCount) - assert.EqualValues(t, testValZeroCount, ms.ZeroCount()) -} - -func TestExponentialHistogramDataPoint_Positive(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - fillTestBuckets(ms.Positive()) - assert.EqualValues(t, generateTestBuckets(), ms.Positive()) -} - -func TestExponentialHistogramDataPoint_Negative(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - fillTestBuckets(ms.Negative()) - assert.EqualValues(t, generateTestBuckets(), ms.Negative()) -} - -func TestExponentialHistogramDataPoint_Exemplars(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, NewExemplarSlice(), ms.Exemplars()) - fillTestExemplarSlice(ms.Exemplars()) - testValExemplars := generateTestExemplarSlice() - assert.EqualValues(t, testValExemplars, ms.Exemplars()) -} - -func TestExponentialHistogramDataPoint_Flags(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - fillTestMetricDataPointFlags(ms.Flags()) - assert.EqualValues(t, generateTestMetricDataPointFlags(), ms.Flags()) -} - -func TestExponentialHistogramDataPoint_Min(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, float64(0.0), ms.Min()) - testValMin := float64(9.23) - ms.SetMin(testValMin) - assert.EqualValues(t, testValMin, ms.Min()) -} - -func TestExponentialHistogramDataPoint_Max(t *testing.T) { - ms := NewExponentialHistogramDataPoint() - assert.EqualValues(t, float64(0.0), ms.Max()) - testValMax := float64(182.55) - ms.SetMax(testValMax) - assert.EqualValues(t, testValMax, ms.Max()) -} - -func TestBuckets_MoveTo(t *testing.T) { - ms := generateTestBuckets() - dest := NewBuckets() - ms.MoveTo(dest) - assert.EqualValues(t, NewBuckets(), ms) - assert.EqualValues(t, generateTestBuckets(), dest) -} - -func TestBuckets_CopyTo(t *testing.T) { - ms := NewBuckets() - orig := NewBuckets() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestBuckets() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestBuckets_Offset(t *testing.T) { - ms := NewBuckets() - assert.EqualValues(t, int32(0), ms.Offset()) - testValOffset := int32(909) - ms.SetOffset(testValOffset) - assert.EqualValues(t, testValOffset, ms.Offset()) -} - -func TestBuckets_BucketCounts(t *testing.T) { - ms := NewBuckets() - assert.EqualValues(t, ImmutableUInt64Slice{}, ms.BucketCounts()) - testValBucketCounts := NewImmutableUInt64Slice([]uint64{1, 2, 3}) - ms.SetBucketCounts(testValBucketCounts) - assert.EqualValues(t, testValBucketCounts, ms.BucketCounts()) -} - -func TestSummaryDataPointSlice(t *testing.T) { - es := NewSummaryDataPointSlice() - assert.EqualValues(t, 0, es.Len()) - es = newSummaryDataPointSlice(&[]*otlpmetrics.SummaryDataPoint{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newSummaryDataPoint(&otlpmetrics.SummaryDataPoint{}) - testVal := generateTestSummaryDataPoint() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestSummaryDataPoint(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestSummaryDataPointSlice_CopyTo(t *testing.T) { - dest := NewSummaryDataPointSlice() - // Test CopyTo to empty - NewSummaryDataPointSlice().CopyTo(dest) - assert.EqualValues(t, NewSummaryDataPointSlice(), dest) - - // Test CopyTo larger slice - generateTestSummaryDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSummaryDataPointSlice(), dest) - - // Test CopyTo same size slice - generateTestSummaryDataPointSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSummaryDataPointSlice(), dest) -} - -func TestSummaryDataPointSlice_EnsureCapacity(t *testing.T) { - es := generateTestSummaryDataPointSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.SummaryDataPoint]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.SummaryDataPoint]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.SummaryDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.SummaryDataPoint]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestSummaryDataPointSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestSummaryDataPointSlice() - dest := NewSummaryDataPointSlice() - src := generateTestSummaryDataPointSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSummaryDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSummaryDataPointSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestSummaryDataPointSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestSummaryDataPointSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewSummaryDataPointSlice() - emptySlice.RemoveIf(func(el SummaryDataPoint) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestSummaryDataPointSlice() - pos := 0 - filtered.RemoveIf(func(el SummaryDataPoint) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestSummaryDataPoint_MoveTo(t *testing.T) { - ms := generateTestSummaryDataPoint() - dest := NewSummaryDataPoint() - ms.MoveTo(dest) - assert.EqualValues(t, NewSummaryDataPoint(), ms) - assert.EqualValues(t, generateTestSummaryDataPoint(), dest) -} - -func TestSummaryDataPoint_CopyTo(t *testing.T) { - ms := NewSummaryDataPoint() - orig := NewSummaryDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestSummaryDataPoint() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestSummaryDataPoint_Attributes(t *testing.T) { - ms := NewSummaryDataPoint() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestSummaryDataPoint_StartTimestamp(t *testing.T) { - ms := NewSummaryDataPoint() - assert.EqualValues(t, Timestamp(0), ms.StartTimestamp()) - testValStartTimestamp := Timestamp(1234567890) - ms.SetStartTimestamp(testValStartTimestamp) - assert.EqualValues(t, testValStartTimestamp, ms.StartTimestamp()) -} - -func TestSummaryDataPoint_Timestamp(t *testing.T) { - ms := NewSummaryDataPoint() - assert.EqualValues(t, Timestamp(0), ms.Timestamp()) - testValTimestamp := Timestamp(1234567890) - ms.SetTimestamp(testValTimestamp) - assert.EqualValues(t, testValTimestamp, ms.Timestamp()) -} - -func TestSummaryDataPoint_Count(t *testing.T) { - ms := NewSummaryDataPoint() - assert.EqualValues(t, uint64(0), ms.Count()) - testValCount := uint64(17) - ms.SetCount(testValCount) - assert.EqualValues(t, testValCount, ms.Count()) -} - -func TestSummaryDataPoint_Sum(t *testing.T) { - ms := NewSummaryDataPoint() - assert.EqualValues(t, float64(0.0), ms.Sum()) - testValSum := float64(17.13) - ms.SetSum(testValSum) - assert.EqualValues(t, testValSum, ms.Sum()) -} - -func TestSummaryDataPoint_QuantileValues(t *testing.T) { - ms := NewSummaryDataPoint() - assert.EqualValues(t, NewValueAtQuantileSlice(), ms.QuantileValues()) - fillTestValueAtQuantileSlice(ms.QuantileValues()) - testValQuantileValues := generateTestValueAtQuantileSlice() - assert.EqualValues(t, testValQuantileValues, ms.QuantileValues()) -} - -func TestSummaryDataPoint_Flags(t *testing.T) { - ms := NewSummaryDataPoint() - fillTestMetricDataPointFlags(ms.Flags()) - assert.EqualValues(t, generateTestMetricDataPointFlags(), ms.Flags()) -} - -func TestValueAtQuantileSlice(t *testing.T) { - es := NewValueAtQuantileSlice() - assert.EqualValues(t, 0, es.Len()) - es = newValueAtQuantileSlice(&[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newValueAtQuantile(&otlpmetrics.SummaryDataPoint_ValueAtQuantile{}) - testVal := generateTestValueAtQuantile() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestValueAtQuantile(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestValueAtQuantileSlice_CopyTo(t *testing.T) { - dest := NewValueAtQuantileSlice() - // Test CopyTo to empty - NewValueAtQuantileSlice().CopyTo(dest) - assert.EqualValues(t, NewValueAtQuantileSlice(), dest) - - // Test CopyTo larger slice - generateTestValueAtQuantileSlice().CopyTo(dest) - assert.EqualValues(t, generateTestValueAtQuantileSlice(), dest) - - // Test CopyTo same size slice - generateTestValueAtQuantileSlice().CopyTo(dest) - assert.EqualValues(t, generateTestValueAtQuantileSlice(), dest) -} - -func TestValueAtQuantileSlice_EnsureCapacity(t *testing.T) { - es := generateTestValueAtQuantileSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestValueAtQuantileSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestValueAtQuantileSlice() - dest := NewValueAtQuantileSlice() - src := generateTestValueAtQuantileSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestValueAtQuantileSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestValueAtQuantileSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestValueAtQuantileSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestValueAtQuantileSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewValueAtQuantileSlice() - emptySlice.RemoveIf(func(el ValueAtQuantile) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestValueAtQuantileSlice() - pos := 0 - filtered.RemoveIf(func(el ValueAtQuantile) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestValueAtQuantile_MoveTo(t *testing.T) { - ms := generateTestValueAtQuantile() - dest := NewValueAtQuantile() - ms.MoveTo(dest) - assert.EqualValues(t, NewValueAtQuantile(), ms) - assert.EqualValues(t, generateTestValueAtQuantile(), dest) -} - -func TestValueAtQuantile_CopyTo(t *testing.T) { - ms := NewValueAtQuantile() - orig := NewValueAtQuantile() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestValueAtQuantile() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestValueAtQuantile_Quantile(t *testing.T) { - ms := NewValueAtQuantile() - assert.EqualValues(t, float64(0.0), ms.Quantile()) - testValQuantile := float64(17.13) - ms.SetQuantile(testValQuantile) - assert.EqualValues(t, testValQuantile, ms.Quantile()) -} - -func TestValueAtQuantile_Value(t *testing.T) { - ms := NewValueAtQuantile() - assert.EqualValues(t, float64(0.0), ms.Value()) - testValValue := float64(17.13) - ms.SetValue(testValValue) - assert.EqualValues(t, testValValue, ms.Value()) -} - -func TestExemplarSlice(t *testing.T) { - es := NewExemplarSlice() - assert.EqualValues(t, 0, es.Len()) - es = newExemplarSlice(&[]otlpmetrics.Exemplar{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newExemplar(&otlpmetrics.Exemplar{}) - testVal := generateTestExemplar() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestExemplar(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestExemplarSlice_CopyTo(t *testing.T) { - dest := NewExemplarSlice() - // Test CopyTo to empty - NewExemplarSlice().CopyTo(dest) - assert.EqualValues(t, NewExemplarSlice(), dest) - - // Test CopyTo larger slice - generateTestExemplarSlice().CopyTo(dest) - assert.EqualValues(t, generateTestExemplarSlice(), dest) - - // Test CopyTo same size slice - generateTestExemplarSlice().CopyTo(dest) - assert.EqualValues(t, generateTestExemplarSlice(), dest) -} - -func TestExemplarSlice_EnsureCapacity(t *testing.T) { - es := generateTestExemplarSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlpmetrics.Exemplar]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlpmetrics.Exemplar]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) -} - -func TestExemplarSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestExemplarSlice() - dest := NewExemplarSlice() - src := generateTestExemplarSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestExemplarSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestExemplarSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestExemplarSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestExemplarSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewExemplarSlice() - emptySlice.RemoveIf(func(el Exemplar) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestExemplarSlice() - pos := 0 - filtered.RemoveIf(func(el Exemplar) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestExemplar_MoveTo(t *testing.T) { - ms := generateTestExemplar() - dest := NewExemplar() - ms.MoveTo(dest) - assert.EqualValues(t, NewExemplar(), ms) - assert.EqualValues(t, generateTestExemplar(), dest) -} - -func TestExemplar_CopyTo(t *testing.T) { - ms := NewExemplar() - orig := NewExemplar() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestExemplar() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestExemplar_Timestamp(t *testing.T) { - ms := NewExemplar() - assert.EqualValues(t, Timestamp(0), ms.Timestamp()) - testValTimestamp := Timestamp(1234567890) - ms.SetTimestamp(testValTimestamp) - assert.EqualValues(t, testValTimestamp, ms.Timestamp()) -} - -func TestExemplarValueType(t *testing.T) { - tv := NewExemplar() - assert.Equal(t, ExemplarValueTypeNone, tv.ValueType()) - assert.Equal(t, "", ExemplarValueType(1000).String()) - tv.SetDoubleVal(float64(17.13)) - assert.Equal(t, ExemplarValueTypeDouble, tv.ValueType()) - tv.SetIntVal(int64(17)) - assert.Equal(t, ExemplarValueTypeInt, tv.ValueType()) -} - -func TestExemplar_DoubleVal(t *testing.T) { - ms := NewExemplar() - assert.EqualValues(t, float64(0.0), ms.DoubleVal()) - testValDoubleVal := float64(17.13) - ms.SetDoubleVal(testValDoubleVal) - assert.EqualValues(t, testValDoubleVal, ms.DoubleVal()) -} - -func TestExemplar_IntVal(t *testing.T) { - ms := NewExemplar() - assert.EqualValues(t, int64(0), ms.IntVal()) - testValIntVal := int64(17) - ms.SetIntVal(testValIntVal) - assert.EqualValues(t, testValIntVal, ms.IntVal()) -} - -func TestExemplar_FilteredAttributes(t *testing.T) { - ms := NewExemplar() - assert.EqualValues(t, NewMap(), ms.FilteredAttributes()) - fillTestMap(ms.FilteredAttributes()) - testValFilteredAttributes := generateTestMap() - assert.EqualValues(t, testValFilteredAttributes, ms.FilteredAttributes()) -} - -func TestExemplar_TraceID(t *testing.T) { - ms := NewExemplar() - assert.EqualValues(t, NewTraceID([16]byte{}), ms.TraceID()) - testValTraceID := NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) - ms.SetTraceID(testValTraceID) - assert.EqualValues(t, testValTraceID, ms.TraceID()) -} - -func TestExemplar_SpanID(t *testing.T) { - ms := NewExemplar() - assert.EqualValues(t, NewSpanID([8]byte{}), ms.SpanID()) - testValSpanID := NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - ms.SetSpanID(testValSpanID) - assert.EqualValues(t, testValSpanID, ms.SpanID()) -} - -func generateTestResourceMetricsSlice() ResourceMetricsSlice { - tv := NewResourceMetricsSlice() - fillTestResourceMetricsSlice(tv) - return tv -} - -func fillTestResourceMetricsSlice(tv ResourceMetricsSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestResourceMetrics(tv.AppendEmpty()) - } -} - -func generateTestResourceMetrics() ResourceMetrics { - tv := NewResourceMetrics() - fillTestResourceMetrics(tv) - return tv -} - -func fillTestResourceMetrics(tv ResourceMetrics) { - fillTestResource(tv.Resource()) - tv.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") - fillTestScopeMetricsSlice(tv.ScopeMetrics()) -} - -func generateTestScopeMetricsSlice() ScopeMetricsSlice { - tv := NewScopeMetricsSlice() - fillTestScopeMetricsSlice(tv) - return tv -} - -func fillTestScopeMetricsSlice(tv ScopeMetricsSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestScopeMetrics(tv.AppendEmpty()) - } -} - -func generateTestScopeMetrics() ScopeMetrics { - tv := NewScopeMetrics() - fillTestScopeMetrics(tv) - return tv -} - -func fillTestScopeMetrics(tv ScopeMetrics) { - fillTestInstrumentationScope(tv.Scope()) - tv.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") - fillTestMetricSlice(tv.Metrics()) -} - -func generateTestMetricSlice() MetricSlice { - tv := NewMetricSlice() - fillTestMetricSlice(tv) - return tv -} - -func fillTestMetricSlice(tv MetricSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestMetric(tv.AppendEmpty()) - } -} - -func generateTestMetric() Metric { - tv := NewMetric() - fillTestMetric(tv) - return tv -} - -func fillTestMetric(tv Metric) { - tv.SetName("test_name") - tv.SetDescription("test_description") - tv.SetUnit("1") - tv.SetDataType(MetricDataTypeSum) - fillTestSum(tv.Sum()) -} - -func generateTestGauge() Gauge { - tv := NewGauge() - fillTestGauge(tv) - return tv -} - -func fillTestGauge(tv Gauge) { - fillTestNumberDataPointSlice(tv.DataPoints()) -} - -func generateTestSum() Sum { - tv := NewSum() - fillTestSum(tv) - return tv -} - -func fillTestSum(tv Sum) { - tv.SetAggregationTemporality(MetricAggregationTemporalityCumulative) - tv.SetIsMonotonic(true) - fillTestNumberDataPointSlice(tv.DataPoints()) -} - -func generateTestHistogram() Histogram { - tv := NewHistogram() - fillTestHistogram(tv) - return tv -} - -func fillTestHistogram(tv Histogram) { - tv.SetAggregationTemporality(MetricAggregationTemporalityCumulative) - fillTestHistogramDataPointSlice(tv.DataPoints()) -} - -func generateTestExponentialHistogram() ExponentialHistogram { - tv := NewExponentialHistogram() - fillTestExponentialHistogram(tv) - return tv -} - -func fillTestExponentialHistogram(tv ExponentialHistogram) { - tv.SetAggregationTemporality(MetricAggregationTemporalityCumulative) - fillTestExponentialHistogramDataPointSlice(tv.DataPoints()) -} - -func generateTestSummary() Summary { - tv := NewSummary() - fillTestSummary(tv) - return tv -} - -func fillTestSummary(tv Summary) { - fillTestSummaryDataPointSlice(tv.DataPoints()) -} - -func generateTestNumberDataPointSlice() NumberDataPointSlice { - tv := NewNumberDataPointSlice() - fillTestNumberDataPointSlice(tv) - return tv -} - -func fillTestNumberDataPointSlice(tv NumberDataPointSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestNumberDataPoint(tv.AppendEmpty()) - } -} - -func generateTestNumberDataPoint() NumberDataPoint { - tv := NewNumberDataPoint() - fillTestNumberDataPoint(tv) - return tv -} - -func fillTestNumberDataPoint(tv NumberDataPoint) { - fillTestMap(tv.Attributes()) - tv.SetStartTimestamp(Timestamp(1234567890)) - tv.SetTimestamp(Timestamp(1234567890)) - tv.SetDoubleVal(float64(17.13)) - fillTestExemplarSlice(tv.Exemplars()) - fillTestMetricDataPointFlags(tv.Flags()) -} - -func generateTestHistogramDataPointSlice() HistogramDataPointSlice { - tv := NewHistogramDataPointSlice() - fillTestHistogramDataPointSlice(tv) - return tv -} - -func fillTestHistogramDataPointSlice(tv HistogramDataPointSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestHistogramDataPoint(tv.AppendEmpty()) - } -} - -func generateTestHistogramDataPoint() HistogramDataPoint { - tv := NewHistogramDataPoint() - fillTestHistogramDataPoint(tv) - return tv -} - -func fillTestHistogramDataPoint(tv HistogramDataPoint) { - fillTestMap(tv.Attributes()) - tv.SetStartTimestamp(Timestamp(1234567890)) - tv.SetTimestamp(Timestamp(1234567890)) - tv.SetCount(uint64(17)) - tv.SetSum(float64(17.13)) - tv.SetBucketCounts(NewImmutableUInt64Slice([]uint64{1, 2, 3})) - tv.SetExplicitBounds(NewImmutableFloat64Slice([]float64{1, 2, 3})) - fillTestExemplarSlice(tv.Exemplars()) - fillTestMetricDataPointFlags(tv.Flags()) - tv.SetMin(float64(9.23)) - tv.SetMax(float64(182.55)) -} - -func generateTestExponentialHistogramDataPointSlice() ExponentialHistogramDataPointSlice { - tv := NewExponentialHistogramDataPointSlice() - fillTestExponentialHistogramDataPointSlice(tv) - return tv -} - -func fillTestExponentialHistogramDataPointSlice(tv ExponentialHistogramDataPointSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestExponentialHistogramDataPoint(tv.AppendEmpty()) - } -} - -func generateTestExponentialHistogramDataPoint() ExponentialHistogramDataPoint { - tv := NewExponentialHistogramDataPoint() - fillTestExponentialHistogramDataPoint(tv) - return tv -} - -func fillTestExponentialHistogramDataPoint(tv ExponentialHistogramDataPoint) { - fillTestMap(tv.Attributes()) - tv.SetStartTimestamp(Timestamp(1234567890)) - tv.SetTimestamp(Timestamp(1234567890)) - tv.SetCount(uint64(17)) - tv.SetSum(float64(17.13)) - tv.SetScale(int32(4)) - tv.SetZeroCount(uint64(201)) - fillTestBuckets(tv.Positive()) - fillTestBuckets(tv.Negative()) - fillTestExemplarSlice(tv.Exemplars()) - fillTestMetricDataPointFlags(tv.Flags()) - tv.SetMin(float64(9.23)) - tv.SetMax(float64(182.55)) -} - -func generateTestBuckets() Buckets { - tv := NewBuckets() - fillTestBuckets(tv) - return tv -} - -func fillTestBuckets(tv Buckets) { - tv.SetOffset(int32(909)) - tv.SetBucketCounts(NewImmutableUInt64Slice([]uint64{1, 2, 3})) -} - -func generateTestSummaryDataPointSlice() SummaryDataPointSlice { - tv := NewSummaryDataPointSlice() - fillTestSummaryDataPointSlice(tv) - return tv -} - -func fillTestSummaryDataPointSlice(tv SummaryDataPointSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestSummaryDataPoint(tv.AppendEmpty()) - } -} - -func generateTestSummaryDataPoint() SummaryDataPoint { - tv := NewSummaryDataPoint() - fillTestSummaryDataPoint(tv) - return tv -} - -func fillTestSummaryDataPoint(tv SummaryDataPoint) { - fillTestMap(tv.Attributes()) - tv.SetStartTimestamp(Timestamp(1234567890)) - tv.SetTimestamp(Timestamp(1234567890)) - tv.SetCount(uint64(17)) - tv.SetSum(float64(17.13)) - fillTestValueAtQuantileSlice(tv.QuantileValues()) - fillTestMetricDataPointFlags(tv.Flags()) -} - -func generateTestValueAtQuantileSlice() ValueAtQuantileSlice { - tv := NewValueAtQuantileSlice() - fillTestValueAtQuantileSlice(tv) - return tv -} - -func fillTestValueAtQuantileSlice(tv ValueAtQuantileSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestValueAtQuantile(tv.AppendEmpty()) - } -} - -func generateTestValueAtQuantile() ValueAtQuantile { - tv := NewValueAtQuantile() - fillTestValueAtQuantile(tv) - return tv -} - -func fillTestValueAtQuantile(tv ValueAtQuantile) { - tv.SetQuantile(float64(17.13)) - tv.SetValue(float64(17.13)) -} - -func generateTestExemplarSlice() ExemplarSlice { - tv := NewExemplarSlice() - fillTestExemplarSlice(tv) - return tv -} - -func fillTestExemplarSlice(tv ExemplarSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestExemplar(tv.AppendEmpty()) - } -} - -func generateTestExemplar() Exemplar { - tv := NewExemplar() - fillTestExemplar(tv) - return tv -} - -func fillTestExemplar(tv Exemplar) { - tv.SetTimestamp(Timestamp(1234567890)) - tv.SetIntVal(int64(17)) - fillTestMap(tv.FilteredAttributes()) - tv.SetTraceID(NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1})) - tv.SetSpanID(NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8})) -} diff --git a/pdata/internal/generated_ptrace_test.go b/pdata/internal/generated_ptrace_test.go deleted file mode 100644 index 514ff17739a..00000000000 --- a/pdata/internal/generated_ptrace_test.go +++ /dev/null @@ -1,1090 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package internal - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" -) - -func TestResourceSpansSlice(t *testing.T) { - es := NewResourceSpansSlice() - assert.EqualValues(t, 0, es.Len()) - es = newResourceSpansSlice(&[]*otlptrace.ResourceSpans{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newResourceSpans(&otlptrace.ResourceSpans{}) - testVal := generateTestResourceSpans() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestResourceSpans(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestResourceSpansSlice_CopyTo(t *testing.T) { - dest := NewResourceSpansSlice() - // Test CopyTo to empty - NewResourceSpansSlice().CopyTo(dest) - assert.EqualValues(t, NewResourceSpansSlice(), dest) - - // Test CopyTo larger slice - generateTestResourceSpansSlice().CopyTo(dest) - assert.EqualValues(t, generateTestResourceSpansSlice(), dest) - - // Test CopyTo same size slice - generateTestResourceSpansSlice().CopyTo(dest) - assert.EqualValues(t, generateTestResourceSpansSlice(), dest) -} - -func TestResourceSpansSlice_EnsureCapacity(t *testing.T) { - es := generateTestResourceSpansSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlptrace.ResourceSpans]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlptrace.ResourceSpans]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlptrace.ResourceSpans]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlptrace.ResourceSpans]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestResourceSpansSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestResourceSpansSlice() - dest := NewResourceSpansSlice() - src := generateTestResourceSpansSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestResourceSpansSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestResourceSpansSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestResourceSpansSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestResourceSpansSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewResourceSpansSlice() - emptySlice.RemoveIf(func(el ResourceSpans) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestResourceSpansSlice() - pos := 0 - filtered.RemoveIf(func(el ResourceSpans) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestResourceSpans_MoveTo(t *testing.T) { - ms := generateTestResourceSpans() - dest := NewResourceSpans() - ms.MoveTo(dest) - assert.EqualValues(t, NewResourceSpans(), ms) - assert.EqualValues(t, generateTestResourceSpans(), dest) -} - -func TestResourceSpans_CopyTo(t *testing.T) { - ms := NewResourceSpans() - orig := NewResourceSpans() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestResourceSpans() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestResourceSpans_Resource(t *testing.T) { - ms := NewResourceSpans() - fillTestResource(ms.Resource()) - assert.EqualValues(t, generateTestResource(), ms.Resource()) -} - -func TestResourceSpans_SchemaUrl(t *testing.T) { - ms := NewResourceSpans() - assert.EqualValues(t, "", ms.SchemaUrl()) - testValSchemaUrl := "https://opentelemetry.io/schemas/1.5.0" - ms.SetSchemaUrl(testValSchemaUrl) - assert.EqualValues(t, testValSchemaUrl, ms.SchemaUrl()) -} - -func TestResourceSpans_ScopeSpans(t *testing.T) { - ms := NewResourceSpans() - assert.EqualValues(t, NewScopeSpansSlice(), ms.ScopeSpans()) - fillTestScopeSpansSlice(ms.ScopeSpans()) - testValScopeSpans := generateTestScopeSpansSlice() - assert.EqualValues(t, testValScopeSpans, ms.ScopeSpans()) -} - -func TestScopeSpansSlice(t *testing.T) { - es := NewScopeSpansSlice() - assert.EqualValues(t, 0, es.Len()) - es = newScopeSpansSlice(&[]*otlptrace.ScopeSpans{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newScopeSpans(&otlptrace.ScopeSpans{}) - testVal := generateTestScopeSpans() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestScopeSpans(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestScopeSpansSlice_CopyTo(t *testing.T) { - dest := NewScopeSpansSlice() - // Test CopyTo to empty - NewScopeSpansSlice().CopyTo(dest) - assert.EqualValues(t, NewScopeSpansSlice(), dest) - - // Test CopyTo larger slice - generateTestScopeSpansSlice().CopyTo(dest) - assert.EqualValues(t, generateTestScopeSpansSlice(), dest) - - // Test CopyTo same size slice - generateTestScopeSpansSlice().CopyTo(dest) - assert.EqualValues(t, generateTestScopeSpansSlice(), dest) -} - -func TestScopeSpansSlice_EnsureCapacity(t *testing.T) { - es := generateTestScopeSpansSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlptrace.ScopeSpans]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlptrace.ScopeSpans]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlptrace.ScopeSpans]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlptrace.ScopeSpans]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestScopeSpansSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestScopeSpansSlice() - dest := NewScopeSpansSlice() - src := generateTestScopeSpansSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestScopeSpansSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestScopeSpansSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestScopeSpansSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestScopeSpansSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewScopeSpansSlice() - emptySlice.RemoveIf(func(el ScopeSpans) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestScopeSpansSlice() - pos := 0 - filtered.RemoveIf(func(el ScopeSpans) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestScopeSpans_MoveTo(t *testing.T) { - ms := generateTestScopeSpans() - dest := NewScopeSpans() - ms.MoveTo(dest) - assert.EqualValues(t, NewScopeSpans(), ms) - assert.EqualValues(t, generateTestScopeSpans(), dest) -} - -func TestScopeSpans_CopyTo(t *testing.T) { - ms := NewScopeSpans() - orig := NewScopeSpans() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestScopeSpans() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestScopeSpans_Scope(t *testing.T) { - ms := NewScopeSpans() - fillTestInstrumentationScope(ms.Scope()) - assert.EqualValues(t, generateTestInstrumentationScope(), ms.Scope()) -} - -func TestScopeSpans_SchemaUrl(t *testing.T) { - ms := NewScopeSpans() - assert.EqualValues(t, "", ms.SchemaUrl()) - testValSchemaUrl := "https://opentelemetry.io/schemas/1.5.0" - ms.SetSchemaUrl(testValSchemaUrl) - assert.EqualValues(t, testValSchemaUrl, ms.SchemaUrl()) -} - -func TestScopeSpans_Spans(t *testing.T) { - ms := NewScopeSpans() - assert.EqualValues(t, NewSpanSlice(), ms.Spans()) - fillTestSpanSlice(ms.Spans()) - testValSpans := generateTestSpanSlice() - assert.EqualValues(t, testValSpans, ms.Spans()) -} - -func TestSpanSlice(t *testing.T) { - es := NewSpanSlice() - assert.EqualValues(t, 0, es.Len()) - es = newSpanSlice(&[]*otlptrace.Span{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newSpan(&otlptrace.Span{}) - testVal := generateTestSpan() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestSpan(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestSpanSlice_CopyTo(t *testing.T) { - dest := NewSpanSlice() - // Test CopyTo to empty - NewSpanSlice().CopyTo(dest) - assert.EqualValues(t, NewSpanSlice(), dest) - - // Test CopyTo larger slice - generateTestSpanSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSpanSlice(), dest) - - // Test CopyTo same size slice - generateTestSpanSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSpanSlice(), dest) -} - -func TestSpanSlice_EnsureCapacity(t *testing.T) { - es := generateTestSpanSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlptrace.Span]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlptrace.Span]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlptrace.Span]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlptrace.Span]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestSpanSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestSpanSlice() - dest := NewSpanSlice() - src := generateTestSpanSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSpanSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSpanSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestSpanSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestSpanSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewSpanSlice() - emptySlice.RemoveIf(func(el Span) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestSpanSlice() - pos := 0 - filtered.RemoveIf(func(el Span) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestSpan_MoveTo(t *testing.T) { - ms := generateTestSpan() - dest := NewSpan() - ms.MoveTo(dest) - assert.EqualValues(t, NewSpan(), ms) - assert.EqualValues(t, generateTestSpan(), dest) -} - -func TestSpan_CopyTo(t *testing.T) { - ms := NewSpan() - orig := NewSpan() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestSpan() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestSpan_TraceID(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, NewTraceID([16]byte{}), ms.TraceID()) - testValTraceID := NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) - ms.SetTraceID(testValTraceID) - assert.EqualValues(t, testValTraceID, ms.TraceID()) -} - -func TestSpan_SpanID(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, NewSpanID([8]byte{}), ms.SpanID()) - testValSpanID := NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - ms.SetSpanID(testValSpanID) - assert.EqualValues(t, testValSpanID, ms.SpanID()) -} - -func TestSpan_TraceState(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, TraceState(""), ms.TraceState()) - testValTraceState := TraceState("congo=congos") - ms.SetTraceState(testValTraceState) - assert.EqualValues(t, testValTraceState, ms.TraceState()) -} - -func TestSpan_ParentSpanID(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, NewSpanID([8]byte{}), ms.ParentSpanID()) - testValParentSpanID := NewSpanID([8]byte{8, 7, 6, 5, 4, 3, 2, 1}) - ms.SetParentSpanID(testValParentSpanID) - assert.EqualValues(t, testValParentSpanID, ms.ParentSpanID()) -} - -func TestSpan_Name(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, "", ms.Name()) - testValName := "test_name" - ms.SetName(testValName) - assert.EqualValues(t, testValName, ms.Name()) -} - -func TestSpan_Kind(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, SpanKindUnspecified, ms.Kind()) - testValKind := SpanKindServer - ms.SetKind(testValKind) - assert.EqualValues(t, testValKind, ms.Kind()) -} - -func TestSpan_StartTimestamp(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, Timestamp(0), ms.StartTimestamp()) - testValStartTimestamp := Timestamp(1234567890) - ms.SetStartTimestamp(testValStartTimestamp) - assert.EqualValues(t, testValStartTimestamp, ms.StartTimestamp()) -} - -func TestSpan_EndTimestamp(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, Timestamp(0), ms.EndTimestamp()) - testValEndTimestamp := Timestamp(1234567890) - ms.SetEndTimestamp(testValEndTimestamp) - assert.EqualValues(t, testValEndTimestamp, ms.EndTimestamp()) -} - -func TestSpan_Attributes(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestSpan_DroppedAttributesCount(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, uint32(0), ms.DroppedAttributesCount()) - testValDroppedAttributesCount := uint32(17) - ms.SetDroppedAttributesCount(testValDroppedAttributesCount) - assert.EqualValues(t, testValDroppedAttributesCount, ms.DroppedAttributesCount()) -} - -func TestSpan_Events(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, NewSpanEventSlice(), ms.Events()) - fillTestSpanEventSlice(ms.Events()) - testValEvents := generateTestSpanEventSlice() - assert.EqualValues(t, testValEvents, ms.Events()) -} - -func TestSpan_DroppedEventsCount(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, uint32(0), ms.DroppedEventsCount()) - testValDroppedEventsCount := uint32(17) - ms.SetDroppedEventsCount(testValDroppedEventsCount) - assert.EqualValues(t, testValDroppedEventsCount, ms.DroppedEventsCount()) -} - -func TestSpan_Links(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, NewSpanLinkSlice(), ms.Links()) - fillTestSpanLinkSlice(ms.Links()) - testValLinks := generateTestSpanLinkSlice() - assert.EqualValues(t, testValLinks, ms.Links()) -} - -func TestSpan_DroppedLinksCount(t *testing.T) { - ms := NewSpan() - assert.EqualValues(t, uint32(0), ms.DroppedLinksCount()) - testValDroppedLinksCount := uint32(17) - ms.SetDroppedLinksCount(testValDroppedLinksCount) - assert.EqualValues(t, testValDroppedLinksCount, ms.DroppedLinksCount()) -} - -func TestSpan_Status(t *testing.T) { - ms := NewSpan() - fillTestSpanStatus(ms.Status()) - assert.EqualValues(t, generateTestSpanStatus(), ms.Status()) -} - -func TestSpanEventSlice(t *testing.T) { - es := NewSpanEventSlice() - assert.EqualValues(t, 0, es.Len()) - es = newSpanEventSlice(&[]*otlptrace.Span_Event{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newSpanEvent(&otlptrace.Span_Event{}) - testVal := generateTestSpanEvent() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestSpanEvent(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestSpanEventSlice_CopyTo(t *testing.T) { - dest := NewSpanEventSlice() - // Test CopyTo to empty - NewSpanEventSlice().CopyTo(dest) - assert.EqualValues(t, NewSpanEventSlice(), dest) - - // Test CopyTo larger slice - generateTestSpanEventSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSpanEventSlice(), dest) - - // Test CopyTo same size slice - generateTestSpanEventSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSpanEventSlice(), dest) -} - -func TestSpanEventSlice_EnsureCapacity(t *testing.T) { - es := generateTestSpanEventSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlptrace.Span_Event]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlptrace.Span_Event]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlptrace.Span_Event]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlptrace.Span_Event]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestSpanEventSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestSpanEventSlice() - dest := NewSpanEventSlice() - src := generateTestSpanEventSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSpanEventSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSpanEventSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestSpanEventSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestSpanEventSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewSpanEventSlice() - emptySlice.RemoveIf(func(el SpanEvent) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestSpanEventSlice() - pos := 0 - filtered.RemoveIf(func(el SpanEvent) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestSpanEvent_MoveTo(t *testing.T) { - ms := generateTestSpanEvent() - dest := NewSpanEvent() - ms.MoveTo(dest) - assert.EqualValues(t, NewSpanEvent(), ms) - assert.EqualValues(t, generateTestSpanEvent(), dest) -} - -func TestSpanEvent_CopyTo(t *testing.T) { - ms := NewSpanEvent() - orig := NewSpanEvent() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestSpanEvent() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestSpanEvent_Timestamp(t *testing.T) { - ms := NewSpanEvent() - assert.EqualValues(t, Timestamp(0), ms.Timestamp()) - testValTimestamp := Timestamp(1234567890) - ms.SetTimestamp(testValTimestamp) - assert.EqualValues(t, testValTimestamp, ms.Timestamp()) -} - -func TestSpanEvent_Name(t *testing.T) { - ms := NewSpanEvent() - assert.EqualValues(t, "", ms.Name()) - testValName := "test_name" - ms.SetName(testValName) - assert.EqualValues(t, testValName, ms.Name()) -} - -func TestSpanEvent_Attributes(t *testing.T) { - ms := NewSpanEvent() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestSpanEvent_DroppedAttributesCount(t *testing.T) { - ms := NewSpanEvent() - assert.EqualValues(t, uint32(0), ms.DroppedAttributesCount()) - testValDroppedAttributesCount := uint32(17) - ms.SetDroppedAttributesCount(testValDroppedAttributesCount) - assert.EqualValues(t, testValDroppedAttributesCount, ms.DroppedAttributesCount()) -} - -func TestSpanLinkSlice(t *testing.T) { - es := NewSpanLinkSlice() - assert.EqualValues(t, 0, es.Len()) - es = newSpanLinkSlice(&[]*otlptrace.Span_Link{}) - assert.EqualValues(t, 0, es.Len()) - - es.EnsureCapacity(7) - emptyVal := newSpanLink(&otlptrace.Span_Link{}) - testVal := generateTestSpanLink() - assert.EqualValues(t, 7, cap(*es.orig)) - for i := 0; i < es.Len(); i++ { - el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestSpanLink(el) - assert.EqualValues(t, testVal, el) - } -} - -func TestSpanLinkSlice_CopyTo(t *testing.T) { - dest := NewSpanLinkSlice() - // Test CopyTo to empty - NewSpanLinkSlice().CopyTo(dest) - assert.EqualValues(t, NewSpanLinkSlice(), dest) - - // Test CopyTo larger slice - generateTestSpanLinkSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSpanLinkSlice(), dest) - - // Test CopyTo same size slice - generateTestSpanLinkSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSpanLinkSlice(), dest) -} - -func TestSpanLinkSlice_EnsureCapacity(t *testing.T) { - es := generateTestSpanLinkSlice() - // Test ensure smaller capacity. - const ensureSmallLen = 4 - expectedEs := make(map[*otlptrace.Span_Link]bool) - for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, es.Len(), len(expectedEs)) - es.EnsureCapacity(ensureSmallLen) - assert.Less(t, ensureSmallLen, es.Len()) - foundEs := make(map[*otlptrace.Span_Link]bool, es.Len()) - for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) - - // Test ensure larger capacity - const ensureLargeLen = 9 - oldLen := es.Len() - expectedEs = make(map[*otlptrace.Span_Link]bool, oldLen) - for i := 0; i < oldLen; i++ { - expectedEs[es.At(i).orig] = true - } - assert.Equal(t, oldLen, len(expectedEs)) - es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) - foundEs = make(map[*otlptrace.Span_Link]bool, oldLen) - for i := 0; i < oldLen; i++ { - foundEs[es.At(i).orig] = true - } - assert.EqualValues(t, expectedEs, foundEs) -} - -func TestSpanLinkSlice_MoveAndAppendTo(t *testing.T) { - // Test MoveAndAppendTo to empty - expectedSlice := generateTestSpanLinkSlice() - dest := NewSpanLinkSlice() - src := generateTestSpanLinkSlice() - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSpanLinkSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo empty slice - src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSpanLinkSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) - - // Test MoveAndAppendTo not empty slice - generateTestSpanLinkSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) - for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) - } -} - -func TestSpanLinkSlice_RemoveIf(t *testing.T) { - // Test RemoveIf on empty slice - emptySlice := NewSpanLinkSlice() - emptySlice.RemoveIf(func(el SpanLink) bool { - t.Fail() - return false - }) - - // Test RemoveIf - filtered := generateTestSpanLinkSlice() - pos := 0 - filtered.RemoveIf(func(el SpanLink) bool { - pos++ - return pos%3 == 0 - }) - assert.Equal(t, 5, filtered.Len()) -} - -func TestSpanLink_MoveTo(t *testing.T) { - ms := generateTestSpanLink() - dest := NewSpanLink() - ms.MoveTo(dest) - assert.EqualValues(t, NewSpanLink(), ms) - assert.EqualValues(t, generateTestSpanLink(), dest) -} - -func TestSpanLink_CopyTo(t *testing.T) { - ms := NewSpanLink() - orig := NewSpanLink() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestSpanLink() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestSpanLink_TraceID(t *testing.T) { - ms := NewSpanLink() - assert.EqualValues(t, NewTraceID([16]byte{}), ms.TraceID()) - testValTraceID := NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) - ms.SetTraceID(testValTraceID) - assert.EqualValues(t, testValTraceID, ms.TraceID()) -} - -func TestSpanLink_SpanID(t *testing.T) { - ms := NewSpanLink() - assert.EqualValues(t, NewSpanID([8]byte{}), ms.SpanID()) - testValSpanID := NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) - ms.SetSpanID(testValSpanID) - assert.EqualValues(t, testValSpanID, ms.SpanID()) -} - -func TestSpanLink_TraceState(t *testing.T) { - ms := NewSpanLink() - assert.EqualValues(t, TraceState(""), ms.TraceState()) - testValTraceState := TraceState("congo=congos") - ms.SetTraceState(testValTraceState) - assert.EqualValues(t, testValTraceState, ms.TraceState()) -} - -func TestSpanLink_Attributes(t *testing.T) { - ms := NewSpanLink() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) -} - -func TestSpanLink_DroppedAttributesCount(t *testing.T) { - ms := NewSpanLink() - assert.EqualValues(t, uint32(0), ms.DroppedAttributesCount()) - testValDroppedAttributesCount := uint32(17) - ms.SetDroppedAttributesCount(testValDroppedAttributesCount) - assert.EqualValues(t, testValDroppedAttributesCount, ms.DroppedAttributesCount()) -} - -func TestSpanStatus_MoveTo(t *testing.T) { - ms := generateTestSpanStatus() - dest := NewSpanStatus() - ms.MoveTo(dest) - assert.EqualValues(t, NewSpanStatus(), ms) - assert.EqualValues(t, generateTestSpanStatus(), dest) -} - -func TestSpanStatus_CopyTo(t *testing.T) { - ms := NewSpanStatus() - orig := NewSpanStatus() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestSpanStatus() - orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) -} - -func TestSpanStatus_Code(t *testing.T) { - ms := NewSpanStatus() - assert.EqualValues(t, StatusCode(0), ms.Code()) - testValCode := StatusCode(1) - ms.SetCode(testValCode) - assert.EqualValues(t, testValCode, ms.Code()) -} - -func TestSpanStatus_Message(t *testing.T) { - ms := NewSpanStatus() - assert.EqualValues(t, "", ms.Message()) - testValMessage := "cancelled" - ms.SetMessage(testValMessage) - assert.EqualValues(t, testValMessage, ms.Message()) -} - -func generateTestResourceSpansSlice() ResourceSpansSlice { - tv := NewResourceSpansSlice() - fillTestResourceSpansSlice(tv) - return tv -} - -func fillTestResourceSpansSlice(tv ResourceSpansSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestResourceSpans(tv.AppendEmpty()) - } -} - -func generateTestResourceSpans() ResourceSpans { - tv := NewResourceSpans() - fillTestResourceSpans(tv) - return tv -} - -func fillTestResourceSpans(tv ResourceSpans) { - fillTestResource(tv.Resource()) - tv.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") - fillTestScopeSpansSlice(tv.ScopeSpans()) -} - -func generateTestScopeSpansSlice() ScopeSpansSlice { - tv := NewScopeSpansSlice() - fillTestScopeSpansSlice(tv) - return tv -} - -func fillTestScopeSpansSlice(tv ScopeSpansSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestScopeSpans(tv.AppendEmpty()) - } -} - -func generateTestScopeSpans() ScopeSpans { - tv := NewScopeSpans() - fillTestScopeSpans(tv) - return tv -} - -func fillTestScopeSpans(tv ScopeSpans) { - fillTestInstrumentationScope(tv.Scope()) - tv.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") - fillTestSpanSlice(tv.Spans()) -} - -func generateTestSpanSlice() SpanSlice { - tv := NewSpanSlice() - fillTestSpanSlice(tv) - return tv -} - -func fillTestSpanSlice(tv SpanSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestSpan(tv.AppendEmpty()) - } -} - -func generateTestSpan() Span { - tv := NewSpan() - fillTestSpan(tv) - return tv -} - -func fillTestSpan(tv Span) { - tv.SetTraceID(NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1})) - tv.SetSpanID(NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8})) - tv.SetTraceState(TraceState("congo=congos")) - tv.SetParentSpanID(NewSpanID([8]byte{8, 7, 6, 5, 4, 3, 2, 1})) - tv.SetName("test_name") - tv.SetKind(SpanKindServer) - tv.SetStartTimestamp(Timestamp(1234567890)) - tv.SetEndTimestamp(Timestamp(1234567890)) - fillTestMap(tv.Attributes()) - tv.SetDroppedAttributesCount(uint32(17)) - fillTestSpanEventSlice(tv.Events()) - tv.SetDroppedEventsCount(uint32(17)) - fillTestSpanLinkSlice(tv.Links()) - tv.SetDroppedLinksCount(uint32(17)) - fillTestSpanStatus(tv.Status()) -} - -func generateTestSpanEventSlice() SpanEventSlice { - tv := NewSpanEventSlice() - fillTestSpanEventSlice(tv) - return tv -} - -func fillTestSpanEventSlice(tv SpanEventSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestSpanEvent(tv.AppendEmpty()) - } -} - -func generateTestSpanEvent() SpanEvent { - tv := NewSpanEvent() - fillTestSpanEvent(tv) - return tv -} - -func fillTestSpanEvent(tv SpanEvent) { - tv.SetTimestamp(Timestamp(1234567890)) - tv.SetName("test_name") - fillTestMap(tv.Attributes()) - tv.SetDroppedAttributesCount(uint32(17)) -} - -func generateTestSpanLinkSlice() SpanLinkSlice { - tv := NewSpanLinkSlice() - fillTestSpanLinkSlice(tv) - return tv -} - -func fillTestSpanLinkSlice(tv SpanLinkSlice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestSpanLink(tv.AppendEmpty()) - } -} - -func generateTestSpanLink() SpanLink { - tv := NewSpanLink() - fillTestSpanLink(tv) - return tv -} - -func fillTestSpanLink(tv SpanLink) { - tv.SetTraceID(NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1})) - tv.SetSpanID(NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8})) - tv.SetTraceState(TraceState("congo=congos")) - fillTestMap(tv.Attributes()) - tv.SetDroppedAttributesCount(uint32(17)) -} - -func generateTestSpanStatus() SpanStatus { - tv := NewSpanStatus() - fillTestSpanStatus(tv) - return tv -} - -func fillTestSpanStatus(tv SpanStatus) { - tv.SetCode(StatusCode(1)) - tv.SetMessage("cancelled") -} diff --git a/pdata/internal/generated_wrapper_common.go b/pdata/internal/generated_wrapper_common.go new file mode 100644 index 00000000000..28c256c871c --- /dev/null +++ b/pdata/internal/generated_wrapper_common.go @@ -0,0 +1,74 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package internal + +import ( + otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1" +) + +type InstrumentationScope struct { + orig *otlpcommon.InstrumentationScope +} + +func GetOrigInstrumentationScope(ms InstrumentationScope) *otlpcommon.InstrumentationScope { + return ms.orig +} + +func NewInstrumentationScope(orig *otlpcommon.InstrumentationScope) InstrumentationScope { + return InstrumentationScope{orig: orig} +} + +type Slice struct { + orig *[]otlpcommon.AnyValue +} + +func GetOrigSlice(ms Slice) *[]otlpcommon.AnyValue { + return ms.orig +} + +func NewSlice(orig *[]otlpcommon.AnyValue) Slice { + return Slice{orig: orig} +} + +func GenerateTestInstrumentationScope() InstrumentationScope { + orig := otlpcommon.InstrumentationScope{} + tv := NewInstrumentationScope(&orig) + FillTestInstrumentationScope(tv) + return tv +} + +func FillTestInstrumentationScope(tv InstrumentationScope) { + tv.orig.Name = "test_name" + tv.orig.Version = "test_version" + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.DroppedAttributesCount = uint32(17) +} + +func GenerateTestSlice() Slice { + orig := []otlpcommon.AnyValue{} + tv := NewSlice(&orig) + FillTestSlice(tv) + return tv +} + +func FillTestSlice(tv Slice) { + *tv.orig = make([]otlpcommon.AnyValue, 7) + for i := 0; i < 7; i++ { + FillTestValue(NewValue(&(*tv.orig)[i])) + } +} diff --git a/pdata/internal/generated_wrapper_immutable_slice.go b/pdata/internal/generated_wrapper_immutable_slice.go new file mode 100644 index 00000000000..d898f0722f6 --- /dev/null +++ b/pdata/internal/generated_wrapper_immutable_slice.go @@ -0,0 +1,54 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package internal + +type ImmutableByteSlice struct { + orig []byte +} + +func GetOrigImmutableByteSlice(ms ImmutableByteSlice) []byte { + return ms.orig +} + +func NewImmutableByteSlice(orig []byte) ImmutableByteSlice { + return ImmutableByteSlice{orig: orig} +} + +type ImmutableFloat64Slice struct { + orig []float64 +} + +func GetOrigImmutableFloat64Slice(ms ImmutableFloat64Slice) []float64 { + return ms.orig +} + +func NewImmutableFloat64Slice(orig []float64) ImmutableFloat64Slice { + return ImmutableFloat64Slice{orig: orig} +} + +type ImmutableUInt64Slice struct { + orig []uint64 +} + +func GetOrigImmutableUInt64Slice(ms ImmutableUInt64Slice) []uint64 { + return ms.orig +} + +func NewImmutableUInt64Slice(orig []uint64) ImmutableUInt64Slice { + return ImmutableUInt64Slice{orig: orig} +} diff --git a/pdata/internal/generated_wrapper_logs.go b/pdata/internal/generated_wrapper_logs.go new file mode 100644 index 00000000000..451b22bc34a --- /dev/null +++ b/pdata/internal/generated_wrapper_logs.go @@ -0,0 +1,186 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package internal + +import ( + "go.opentelemetry.io/collector/pdata/internal/data" + otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1" +) + +type ResourceLogsSlice struct { + orig *[]*otlplogs.ResourceLogs +} + +func GetOrigResourceLogsSlice(ms ResourceLogsSlice) *[]*otlplogs.ResourceLogs { + return ms.orig +} + +func NewResourceLogsSlice(orig *[]*otlplogs.ResourceLogs) ResourceLogsSlice { + return ResourceLogsSlice{orig: orig} +} + +type ResourceLogs struct { + orig *otlplogs.ResourceLogs +} + +func GetOrigResourceLogs(ms ResourceLogs) *otlplogs.ResourceLogs { + return ms.orig +} + +func NewResourceLogs(orig *otlplogs.ResourceLogs) ResourceLogs { + return ResourceLogs{orig: orig} +} + +type ScopeLogsSlice struct { + orig *[]*otlplogs.ScopeLogs +} + +func GetOrigScopeLogsSlice(ms ScopeLogsSlice) *[]*otlplogs.ScopeLogs { + return ms.orig +} + +func NewScopeLogsSlice(orig *[]*otlplogs.ScopeLogs) ScopeLogsSlice { + return ScopeLogsSlice{orig: orig} +} + +type ScopeLogs struct { + orig *otlplogs.ScopeLogs +} + +func GetOrigScopeLogs(ms ScopeLogs) *otlplogs.ScopeLogs { + return ms.orig +} + +func NewScopeLogs(orig *otlplogs.ScopeLogs) ScopeLogs { + return ScopeLogs{orig: orig} +} + +type LogRecordSlice struct { + orig *[]*otlplogs.LogRecord +} + +func GetOrigLogRecordSlice(ms LogRecordSlice) *[]*otlplogs.LogRecord { + return ms.orig +} + +func NewLogRecordSlice(orig *[]*otlplogs.LogRecord) LogRecordSlice { + return LogRecordSlice{orig: orig} +} + +type LogRecord struct { + orig *otlplogs.LogRecord +} + +func GetOrigLogRecord(ms LogRecord) *otlplogs.LogRecord { + return ms.orig +} + +func NewLogRecord(orig *otlplogs.LogRecord) LogRecord { + return LogRecord{orig: orig} +} + +func GenerateTestResourceLogsSlice() ResourceLogsSlice { + orig := []*otlplogs.ResourceLogs{} + tv := NewResourceLogsSlice(&orig) + FillTestResourceLogsSlice(tv) + return tv +} + +func FillTestResourceLogsSlice(tv ResourceLogsSlice) { + *tv.orig = make([]*otlplogs.ResourceLogs, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlplogs.ResourceLogs{} + FillTestResourceLogs(NewResourceLogs((*tv.orig)[i])) + } +} + +func GenerateTestResourceLogs() ResourceLogs { + orig := otlplogs.ResourceLogs{} + tv := NewResourceLogs(&orig) + FillTestResourceLogs(tv) + return tv +} + +func FillTestResourceLogs(tv ResourceLogs) { + FillTestResource(NewResource(&tv.orig.Resource)) + tv.orig.SchemaUrl = "https://opentelemetry.io/schemas/1.5.0" + FillTestScopeLogsSlice(NewScopeLogsSlice(&tv.orig.ScopeLogs)) +} + +func GenerateTestScopeLogsSlice() ScopeLogsSlice { + orig := []*otlplogs.ScopeLogs{} + tv := NewScopeLogsSlice(&orig) + FillTestScopeLogsSlice(tv) + return tv +} + +func FillTestScopeLogsSlice(tv ScopeLogsSlice) { + *tv.orig = make([]*otlplogs.ScopeLogs, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlplogs.ScopeLogs{} + FillTestScopeLogs(NewScopeLogs((*tv.orig)[i])) + } +} + +func GenerateTestScopeLogs() ScopeLogs { + orig := otlplogs.ScopeLogs{} + tv := NewScopeLogs(&orig) + FillTestScopeLogs(tv) + return tv +} + +func FillTestScopeLogs(tv ScopeLogs) { + FillTestInstrumentationScope(NewInstrumentationScope(&tv.orig.Scope)) + tv.orig.SchemaUrl = "https://opentelemetry.io/schemas/1.5.0" + FillTestLogRecordSlice(NewLogRecordSlice(&tv.orig.LogRecords)) +} + +func GenerateTestLogRecordSlice() LogRecordSlice { + orig := []*otlplogs.LogRecord{} + tv := NewLogRecordSlice(&orig) + FillTestLogRecordSlice(tv) + return tv +} + +func FillTestLogRecordSlice(tv LogRecordSlice) { + *tv.orig = make([]*otlplogs.LogRecord, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlplogs.LogRecord{} + FillTestLogRecord(NewLogRecord((*tv.orig)[i])) + } +} + +func GenerateTestLogRecord() LogRecord { + orig := otlplogs.LogRecord{} + tv := NewLogRecord(&orig) + FillTestLogRecord(tv) + return tv +} + +func FillTestLogRecord(tv LogRecord) { + tv.orig.ObservedTimeUnixNano = 1234567890 + tv.orig.TimeUnixNano = 1234567890 + tv.orig.TraceId = data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) + tv.orig.SpanId = data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) + FillTestLogRecordFlags(NewLogRecordFlags(&tv.orig.Flags)) + tv.orig.SeverityText = "INFO" + tv.orig.SeverityNumber = otlplogs.SeverityNumber(5) + FillTestValue(NewValue(&tv.orig.Body)) + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.DroppedAttributesCount = uint32(17) +} diff --git a/pdata/internal/generated_wrapper_metrics.go b/pdata/internal/generated_wrapper_metrics.go new file mode 100644 index 00000000000..494036854ee --- /dev/null +++ b/pdata/internal/generated_wrapper_metrics.go @@ -0,0 +1,661 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package internal + +import ( + "go.opentelemetry.io/collector/pdata/internal/data" + otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" +) + +type ResourceMetricsSlice struct { + orig *[]*otlpmetrics.ResourceMetrics +} + +func GetOrigResourceMetricsSlice(ms ResourceMetricsSlice) *[]*otlpmetrics.ResourceMetrics { + return ms.orig +} + +func NewResourceMetricsSlice(orig *[]*otlpmetrics.ResourceMetrics) ResourceMetricsSlice { + return ResourceMetricsSlice{orig: orig} +} + +type ResourceMetrics struct { + orig *otlpmetrics.ResourceMetrics +} + +func GetOrigResourceMetrics(ms ResourceMetrics) *otlpmetrics.ResourceMetrics { + return ms.orig +} + +func NewResourceMetrics(orig *otlpmetrics.ResourceMetrics) ResourceMetrics { + return ResourceMetrics{orig: orig} +} + +type ScopeMetricsSlice struct { + orig *[]*otlpmetrics.ScopeMetrics +} + +func GetOrigScopeMetricsSlice(ms ScopeMetricsSlice) *[]*otlpmetrics.ScopeMetrics { + return ms.orig +} + +func NewScopeMetricsSlice(orig *[]*otlpmetrics.ScopeMetrics) ScopeMetricsSlice { + return ScopeMetricsSlice{orig: orig} +} + +type ScopeMetrics struct { + orig *otlpmetrics.ScopeMetrics +} + +func GetOrigScopeMetrics(ms ScopeMetrics) *otlpmetrics.ScopeMetrics { + return ms.orig +} + +func NewScopeMetrics(orig *otlpmetrics.ScopeMetrics) ScopeMetrics { + return ScopeMetrics{orig: orig} +} + +type MetricSlice struct { + orig *[]*otlpmetrics.Metric +} + +func GetOrigMetricSlice(ms MetricSlice) *[]*otlpmetrics.Metric { + return ms.orig +} + +func NewMetricSlice(orig *[]*otlpmetrics.Metric) MetricSlice { + return MetricSlice{orig: orig} +} + +type Metric struct { + orig *otlpmetrics.Metric +} + +func GetOrigMetric(ms Metric) *otlpmetrics.Metric { + return ms.orig +} + +func NewMetric(orig *otlpmetrics.Metric) Metric { + return Metric{orig: orig} +} + +type Gauge struct { + orig *otlpmetrics.Gauge +} + +func GetOrigGauge(ms Gauge) *otlpmetrics.Gauge { + return ms.orig +} + +func NewGauge(orig *otlpmetrics.Gauge) Gauge { + return Gauge{orig: orig} +} + +type Sum struct { + orig *otlpmetrics.Sum +} + +func GetOrigSum(ms Sum) *otlpmetrics.Sum { + return ms.orig +} + +func NewSum(orig *otlpmetrics.Sum) Sum { + return Sum{orig: orig} +} + +type Histogram struct { + orig *otlpmetrics.Histogram +} + +func GetOrigHistogram(ms Histogram) *otlpmetrics.Histogram { + return ms.orig +} + +func NewHistogram(orig *otlpmetrics.Histogram) Histogram { + return Histogram{orig: orig} +} + +type ExponentialHistogram struct { + orig *otlpmetrics.ExponentialHistogram +} + +func GetOrigExponentialHistogram(ms ExponentialHistogram) *otlpmetrics.ExponentialHistogram { + return ms.orig +} + +func NewExponentialHistogram(orig *otlpmetrics.ExponentialHistogram) ExponentialHistogram { + return ExponentialHistogram{orig: orig} +} + +type Summary struct { + orig *otlpmetrics.Summary +} + +func GetOrigSummary(ms Summary) *otlpmetrics.Summary { + return ms.orig +} + +func NewSummary(orig *otlpmetrics.Summary) Summary { + return Summary{orig: orig} +} + +type NumberDataPointSlice struct { + orig *[]*otlpmetrics.NumberDataPoint +} + +func GetOrigNumberDataPointSlice(ms NumberDataPointSlice) *[]*otlpmetrics.NumberDataPoint { + return ms.orig +} + +func NewNumberDataPointSlice(orig *[]*otlpmetrics.NumberDataPoint) NumberDataPointSlice { + return NumberDataPointSlice{orig: orig} +} + +type NumberDataPoint struct { + orig *otlpmetrics.NumberDataPoint +} + +func GetOrigNumberDataPoint(ms NumberDataPoint) *otlpmetrics.NumberDataPoint { + return ms.orig +} + +func NewNumberDataPoint(orig *otlpmetrics.NumberDataPoint) NumberDataPoint { + return NumberDataPoint{orig: orig} +} + +type HistogramDataPointSlice struct { + orig *[]*otlpmetrics.HistogramDataPoint +} + +func GetOrigHistogramDataPointSlice(ms HistogramDataPointSlice) *[]*otlpmetrics.HistogramDataPoint { + return ms.orig +} + +func NewHistogramDataPointSlice(orig *[]*otlpmetrics.HistogramDataPoint) HistogramDataPointSlice { + return HistogramDataPointSlice{orig: orig} +} + +type HistogramDataPoint struct { + orig *otlpmetrics.HistogramDataPoint +} + +func GetOrigHistogramDataPoint(ms HistogramDataPoint) *otlpmetrics.HistogramDataPoint { + return ms.orig +} + +func NewHistogramDataPoint(orig *otlpmetrics.HistogramDataPoint) HistogramDataPoint { + return HistogramDataPoint{orig: orig} +} + +type ExponentialHistogramDataPointSlice struct { + orig *[]*otlpmetrics.ExponentialHistogramDataPoint +} + +func GetOrigExponentialHistogramDataPointSlice(ms ExponentialHistogramDataPointSlice) *[]*otlpmetrics.ExponentialHistogramDataPoint { + return ms.orig +} + +func NewExponentialHistogramDataPointSlice(orig *[]*otlpmetrics.ExponentialHistogramDataPoint) ExponentialHistogramDataPointSlice { + return ExponentialHistogramDataPointSlice{orig: orig} +} + +type ExponentialHistogramDataPoint struct { + orig *otlpmetrics.ExponentialHistogramDataPoint +} + +func GetOrigExponentialHistogramDataPoint(ms ExponentialHistogramDataPoint) *otlpmetrics.ExponentialHistogramDataPoint { + return ms.orig +} + +func NewExponentialHistogramDataPoint(orig *otlpmetrics.ExponentialHistogramDataPoint) ExponentialHistogramDataPoint { + return ExponentialHistogramDataPoint{orig: orig} +} + +type Buckets struct { + orig *otlpmetrics.ExponentialHistogramDataPoint_Buckets +} + +func GetOrigBuckets(ms Buckets) *otlpmetrics.ExponentialHistogramDataPoint_Buckets { + return ms.orig +} + +func NewBuckets(orig *otlpmetrics.ExponentialHistogramDataPoint_Buckets) Buckets { + return Buckets{orig: orig} +} + +type SummaryDataPointSlice struct { + orig *[]*otlpmetrics.SummaryDataPoint +} + +func GetOrigSummaryDataPointSlice(ms SummaryDataPointSlice) *[]*otlpmetrics.SummaryDataPoint { + return ms.orig +} + +func NewSummaryDataPointSlice(orig *[]*otlpmetrics.SummaryDataPoint) SummaryDataPointSlice { + return SummaryDataPointSlice{orig: orig} +} + +type SummaryDataPoint struct { + orig *otlpmetrics.SummaryDataPoint +} + +func GetOrigSummaryDataPoint(ms SummaryDataPoint) *otlpmetrics.SummaryDataPoint { + return ms.orig +} + +func NewSummaryDataPoint(orig *otlpmetrics.SummaryDataPoint) SummaryDataPoint { + return SummaryDataPoint{orig: orig} +} + +type ValueAtQuantileSlice struct { + orig *[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile +} + +func GetOrigValueAtQuantileSlice(ms ValueAtQuantileSlice) *[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile { + return ms.orig +} + +func NewValueAtQuantileSlice(orig *[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile) ValueAtQuantileSlice { + return ValueAtQuantileSlice{orig: orig} +} + +type ValueAtQuantile struct { + orig *otlpmetrics.SummaryDataPoint_ValueAtQuantile +} + +func GetOrigValueAtQuantile(ms ValueAtQuantile) *otlpmetrics.SummaryDataPoint_ValueAtQuantile { + return ms.orig +} + +func NewValueAtQuantile(orig *otlpmetrics.SummaryDataPoint_ValueAtQuantile) ValueAtQuantile { + return ValueAtQuantile{orig: orig} +} + +type ExemplarSlice struct { + orig *[]otlpmetrics.Exemplar +} + +func GetOrigExemplarSlice(ms ExemplarSlice) *[]otlpmetrics.Exemplar { + return ms.orig +} + +func NewExemplarSlice(orig *[]otlpmetrics.Exemplar) ExemplarSlice { + return ExemplarSlice{orig: orig} +} + +type Exemplar struct { + orig *otlpmetrics.Exemplar +} + +func GetOrigExemplar(ms Exemplar) *otlpmetrics.Exemplar { + return ms.orig +} + +func NewExemplar(orig *otlpmetrics.Exemplar) Exemplar { + return Exemplar{orig: orig} +} + +func GenerateTestResourceMetricsSlice() ResourceMetricsSlice { + orig := []*otlpmetrics.ResourceMetrics{} + tv := NewResourceMetricsSlice(&orig) + FillTestResourceMetricsSlice(tv) + return tv +} + +func FillTestResourceMetricsSlice(tv ResourceMetricsSlice) { + *tv.orig = make([]*otlpmetrics.ResourceMetrics, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.ResourceMetrics{} + FillTestResourceMetrics(NewResourceMetrics((*tv.orig)[i])) + } +} + +func GenerateTestResourceMetrics() ResourceMetrics { + orig := otlpmetrics.ResourceMetrics{} + tv := NewResourceMetrics(&orig) + FillTestResourceMetrics(tv) + return tv +} + +func FillTestResourceMetrics(tv ResourceMetrics) { + FillTestResource(NewResource(&tv.orig.Resource)) + tv.orig.SchemaUrl = "https://opentelemetry.io/schemas/1.5.0" + FillTestScopeMetricsSlice(NewScopeMetricsSlice(&tv.orig.ScopeMetrics)) +} + +func GenerateTestScopeMetricsSlice() ScopeMetricsSlice { + orig := []*otlpmetrics.ScopeMetrics{} + tv := NewScopeMetricsSlice(&orig) + FillTestScopeMetricsSlice(tv) + return tv +} + +func FillTestScopeMetricsSlice(tv ScopeMetricsSlice) { + *tv.orig = make([]*otlpmetrics.ScopeMetrics, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.ScopeMetrics{} + FillTestScopeMetrics(NewScopeMetrics((*tv.orig)[i])) + } +} + +func GenerateTestScopeMetrics() ScopeMetrics { + orig := otlpmetrics.ScopeMetrics{} + tv := NewScopeMetrics(&orig) + FillTestScopeMetrics(tv) + return tv +} + +func FillTestScopeMetrics(tv ScopeMetrics) { + FillTestInstrumentationScope(NewInstrumentationScope(&tv.orig.Scope)) + tv.orig.SchemaUrl = "https://opentelemetry.io/schemas/1.5.0" + FillTestMetricSlice(NewMetricSlice(&tv.orig.Metrics)) +} + +func GenerateTestMetricSlice() MetricSlice { + orig := []*otlpmetrics.Metric{} + tv := NewMetricSlice(&orig) + FillTestMetricSlice(tv) + return tv +} + +func FillTestMetricSlice(tv MetricSlice) { + *tv.orig = make([]*otlpmetrics.Metric, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.Metric{} + FillTestMetric(NewMetric((*tv.orig)[i])) + } +} + +func GenerateTestMetric() Metric { + orig := otlpmetrics.Metric{} + tv := NewMetric(&orig) + FillTestMetric(tv) + return tv +} + +func FillTestMetric(tv Metric) { + tv.orig.Name = "test_name" + tv.orig.Description = "test_description" + tv.orig.Unit = "1" + tv.orig.Data = &otlpmetrics.Metric_Sum{Sum: &otlpmetrics.Sum{}} + FillTestSum(NewSum(tv.orig.GetSum())) +} + +func GenerateTestGauge() Gauge { + orig := otlpmetrics.Gauge{} + tv := NewGauge(&orig) + FillTestGauge(tv) + return tv +} + +func FillTestGauge(tv Gauge) { + FillTestNumberDataPointSlice(NewNumberDataPointSlice(&tv.orig.DataPoints)) +} + +func GenerateTestSum() Sum { + orig := otlpmetrics.Sum{} + tv := NewSum(&orig) + FillTestSum(tv) + return tv +} + +func FillTestSum(tv Sum) { + tv.orig.AggregationTemporality = otlpmetrics.AggregationTemporality(1) + tv.orig.IsMonotonic = true + FillTestNumberDataPointSlice(NewNumberDataPointSlice(&tv.orig.DataPoints)) +} + +func GenerateTestHistogram() Histogram { + orig := otlpmetrics.Histogram{} + tv := NewHistogram(&orig) + FillTestHistogram(tv) + return tv +} + +func FillTestHistogram(tv Histogram) { + tv.orig.AggregationTemporality = otlpmetrics.AggregationTemporality(1) + FillTestHistogramDataPointSlice(NewHistogramDataPointSlice(&tv.orig.DataPoints)) +} + +func GenerateTestExponentialHistogram() ExponentialHistogram { + orig := otlpmetrics.ExponentialHistogram{} + tv := NewExponentialHistogram(&orig) + FillTestExponentialHistogram(tv) + return tv +} + +func FillTestExponentialHistogram(tv ExponentialHistogram) { + tv.orig.AggregationTemporality = otlpmetrics.AggregationTemporality(1) + FillTestExponentialHistogramDataPointSlice(NewExponentialHistogramDataPointSlice(&tv.orig.DataPoints)) +} + +func GenerateTestSummary() Summary { + orig := otlpmetrics.Summary{} + tv := NewSummary(&orig) + FillTestSummary(tv) + return tv +} + +func FillTestSummary(tv Summary) { + FillTestSummaryDataPointSlice(NewSummaryDataPointSlice(&tv.orig.DataPoints)) +} + +func GenerateTestNumberDataPointSlice() NumberDataPointSlice { + orig := []*otlpmetrics.NumberDataPoint{} + tv := NewNumberDataPointSlice(&orig) + FillTestNumberDataPointSlice(tv) + return tv +} + +func FillTestNumberDataPointSlice(tv NumberDataPointSlice) { + *tv.orig = make([]*otlpmetrics.NumberDataPoint, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.NumberDataPoint{} + FillTestNumberDataPoint(NewNumberDataPoint((*tv.orig)[i])) + } +} + +func GenerateTestNumberDataPoint() NumberDataPoint { + orig := otlpmetrics.NumberDataPoint{} + tv := NewNumberDataPoint(&orig) + FillTestNumberDataPoint(tv) + return tv +} + +func FillTestNumberDataPoint(tv NumberDataPoint) { + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.StartTimeUnixNano = 1234567890 + tv.orig.TimeUnixNano = 1234567890 + tv.orig.Value = &otlpmetrics.NumberDataPoint_AsDouble{AsDouble: float64(17.13)} + FillTestExemplarSlice(NewExemplarSlice(&tv.orig.Exemplars)) + FillTestMetricDataPointFlags(NewMetricDataPointFlags(&tv.orig.Flags)) +} + +func GenerateTestHistogramDataPointSlice() HistogramDataPointSlice { + orig := []*otlpmetrics.HistogramDataPoint{} + tv := NewHistogramDataPointSlice(&orig) + FillTestHistogramDataPointSlice(tv) + return tv +} + +func FillTestHistogramDataPointSlice(tv HistogramDataPointSlice) { + *tv.orig = make([]*otlpmetrics.HistogramDataPoint, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.HistogramDataPoint{} + FillTestHistogramDataPoint(NewHistogramDataPoint((*tv.orig)[i])) + } +} + +func GenerateTestHistogramDataPoint() HistogramDataPoint { + orig := otlpmetrics.HistogramDataPoint{} + tv := NewHistogramDataPoint(&orig) + FillTestHistogramDataPoint(tv) + return tv +} + +func FillTestHistogramDataPoint(tv HistogramDataPoint) { + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.StartTimeUnixNano = 1234567890 + tv.orig.TimeUnixNano = 1234567890 + tv.orig.Count = uint64(17) + tv.orig.Sum_ = &otlpmetrics.HistogramDataPoint_Sum{Sum: float64(17.13)} + tv.orig.BucketCounts = []uint64{1, 2, 3} + tv.orig.ExplicitBounds = []float64{1, 2, 3} + FillTestExemplarSlice(NewExemplarSlice(&tv.orig.Exemplars)) + FillTestMetricDataPointFlags(NewMetricDataPointFlags(&tv.orig.Flags)) + tv.orig.Min_ = &otlpmetrics.HistogramDataPoint_Min{Min: float64(9.23)} + tv.orig.Max_ = &otlpmetrics.HistogramDataPoint_Max{Max: float64(182.55)} +} + +func GenerateTestExponentialHistogramDataPointSlice() ExponentialHistogramDataPointSlice { + orig := []*otlpmetrics.ExponentialHistogramDataPoint{} + tv := NewExponentialHistogramDataPointSlice(&orig) + FillTestExponentialHistogramDataPointSlice(tv) + return tv +} + +func FillTestExponentialHistogramDataPointSlice(tv ExponentialHistogramDataPointSlice) { + *tv.orig = make([]*otlpmetrics.ExponentialHistogramDataPoint, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.ExponentialHistogramDataPoint{} + FillTestExponentialHistogramDataPoint(NewExponentialHistogramDataPoint((*tv.orig)[i])) + } +} + +func GenerateTestExponentialHistogramDataPoint() ExponentialHistogramDataPoint { + orig := otlpmetrics.ExponentialHistogramDataPoint{} + tv := NewExponentialHistogramDataPoint(&orig) + FillTestExponentialHistogramDataPoint(tv) + return tv +} + +func FillTestExponentialHistogramDataPoint(tv ExponentialHistogramDataPoint) { + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.StartTimeUnixNano = 1234567890 + tv.orig.TimeUnixNano = 1234567890 + tv.orig.Count = uint64(17) + tv.orig.Sum_ = &otlpmetrics.ExponentialHistogramDataPoint_Sum{Sum: float64(17.13)} + tv.orig.Scale = int32(4) + tv.orig.ZeroCount = uint64(201) + FillTestBuckets(NewBuckets(&tv.orig.Positive)) + FillTestBuckets(NewBuckets(&tv.orig.Negative)) + FillTestExemplarSlice(NewExemplarSlice(&tv.orig.Exemplars)) + FillTestMetricDataPointFlags(NewMetricDataPointFlags(&tv.orig.Flags)) + tv.orig.Min_ = &otlpmetrics.ExponentialHistogramDataPoint_Min{Min: float64(9.23)} + tv.orig.Max_ = &otlpmetrics.ExponentialHistogramDataPoint_Max{Max: float64(182.55)} +} + +func GenerateTestBuckets() Buckets { + orig := otlpmetrics.ExponentialHistogramDataPoint_Buckets{} + tv := NewBuckets(&orig) + FillTestBuckets(tv) + return tv +} + +func FillTestBuckets(tv Buckets) { + tv.orig.Offset = int32(909) + tv.orig.BucketCounts = []uint64{1, 2, 3} +} + +func GenerateTestSummaryDataPointSlice() SummaryDataPointSlice { + orig := []*otlpmetrics.SummaryDataPoint{} + tv := NewSummaryDataPointSlice(&orig) + FillTestSummaryDataPointSlice(tv) + return tv +} + +func FillTestSummaryDataPointSlice(tv SummaryDataPointSlice) { + *tv.orig = make([]*otlpmetrics.SummaryDataPoint, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.SummaryDataPoint{} + FillTestSummaryDataPoint(NewSummaryDataPoint((*tv.orig)[i])) + } +} + +func GenerateTestSummaryDataPoint() SummaryDataPoint { + orig := otlpmetrics.SummaryDataPoint{} + tv := NewSummaryDataPoint(&orig) + FillTestSummaryDataPoint(tv) + return tv +} + +func FillTestSummaryDataPoint(tv SummaryDataPoint) { + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.StartTimeUnixNano = 1234567890 + tv.orig.TimeUnixNano = 1234567890 + tv.orig.Count = uint64(17) + tv.orig.Sum = float64(17.13) + FillTestValueAtQuantileSlice(NewValueAtQuantileSlice(&tv.orig.QuantileValues)) + FillTestMetricDataPointFlags(NewMetricDataPointFlags(&tv.orig.Flags)) +} + +func GenerateTestValueAtQuantileSlice() ValueAtQuantileSlice { + orig := []*otlpmetrics.SummaryDataPoint_ValueAtQuantile{} + tv := NewValueAtQuantileSlice(&orig) + FillTestValueAtQuantileSlice(tv) + return tv +} + +func FillTestValueAtQuantileSlice(tv ValueAtQuantileSlice) { + *tv.orig = make([]*otlpmetrics.SummaryDataPoint_ValueAtQuantile, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlpmetrics.SummaryDataPoint_ValueAtQuantile{} + FillTestValueAtQuantile(NewValueAtQuantile((*tv.orig)[i])) + } +} + +func GenerateTestValueAtQuantile() ValueAtQuantile { + orig := otlpmetrics.SummaryDataPoint_ValueAtQuantile{} + tv := NewValueAtQuantile(&orig) + FillTestValueAtQuantile(tv) + return tv +} + +func FillTestValueAtQuantile(tv ValueAtQuantile) { + tv.orig.Quantile = float64(17.13) + tv.orig.Value = float64(17.13) +} + +func GenerateTestExemplarSlice() ExemplarSlice { + orig := []otlpmetrics.Exemplar{} + tv := NewExemplarSlice(&orig) + FillTestExemplarSlice(tv) + return tv +} + +func FillTestExemplarSlice(tv ExemplarSlice) { + *tv.orig = make([]otlpmetrics.Exemplar, 7) + for i := 0; i < 7; i++ { + FillTestExemplar(NewExemplar(&(*tv.orig)[i])) + } +} + +func GenerateTestExemplar() Exemplar { + orig := otlpmetrics.Exemplar{} + tv := NewExemplar(&orig) + FillTestExemplar(tv) + return tv +} + +func FillTestExemplar(tv Exemplar) { + tv.orig.TimeUnixNano = 1234567890 + tv.orig.Value = &otlpmetrics.Exemplar_AsInt{AsInt: int64(17)} + FillTestMap(NewMap(&tv.orig.FilteredAttributes)) + tv.orig.TraceId = data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) + tv.orig.SpanId = data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) +} diff --git a/pdata/pcommon/generated_resource_alias.go b/pdata/internal/generated_wrapper_resource.go similarity index 55% rename from pdata/pcommon/generated_resource_alias.go rename to pdata/internal/generated_wrapper_resource.go index 3c10716d16e..96119dc6a60 100644 --- a/pdata/pcommon/generated_resource_alias.go +++ b/pdata/internal/generated_wrapper_resource.go @@ -15,18 +15,32 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package pcommon +package internal -import "go.opentelemetry.io/collector/pdata/internal" +import ( + otlpresource "go.opentelemetry.io/collector/pdata/internal/data/protogen/resource/v1" +) -// Resource is a message representing the resource information. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewResource function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Resource = internal.Resource +type Resource struct { + orig *otlpresource.Resource +} + +func GetOrigResource(ms Resource) *otlpresource.Resource { + return ms.orig +} + +func NewResource(orig *otlpresource.Resource) Resource { + return Resource{orig: orig} +} + +func GenerateTestResource() Resource { + orig := otlpresource.Resource{} + tv := NewResource(&orig) + FillTestResource(tv) + return tv +} -// NewResource is an alias for a function to create a new empty Resource. -var NewResource = internal.NewResource +func FillTestResource(tv Resource) { + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.DroppedAttributesCount = uint32(17) +} diff --git a/pdata/internal/generated_wrapper_traces.go b/pdata/internal/generated_wrapper_traces.go new file mode 100644 index 00000000000..9cec81f741a --- /dev/null +++ b/pdata/internal/generated_wrapper_traces.go @@ -0,0 +1,322 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package internal + +import ( + "go.opentelemetry.io/collector/pdata/internal/data" + otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" +) + +type ResourceSpansSlice struct { + orig *[]*otlptrace.ResourceSpans +} + +func GetOrigResourceSpansSlice(ms ResourceSpansSlice) *[]*otlptrace.ResourceSpans { + return ms.orig +} + +func NewResourceSpansSlice(orig *[]*otlptrace.ResourceSpans) ResourceSpansSlice { + return ResourceSpansSlice{orig: orig} +} + +type ResourceSpans struct { + orig *otlptrace.ResourceSpans +} + +func GetOrigResourceSpans(ms ResourceSpans) *otlptrace.ResourceSpans { + return ms.orig +} + +func NewResourceSpans(orig *otlptrace.ResourceSpans) ResourceSpans { + return ResourceSpans{orig: orig} +} + +type ScopeSpansSlice struct { + orig *[]*otlptrace.ScopeSpans +} + +func GetOrigScopeSpansSlice(ms ScopeSpansSlice) *[]*otlptrace.ScopeSpans { + return ms.orig +} + +func NewScopeSpansSlice(orig *[]*otlptrace.ScopeSpans) ScopeSpansSlice { + return ScopeSpansSlice{orig: orig} +} + +type ScopeSpans struct { + orig *otlptrace.ScopeSpans +} + +func GetOrigScopeSpans(ms ScopeSpans) *otlptrace.ScopeSpans { + return ms.orig +} + +func NewScopeSpans(orig *otlptrace.ScopeSpans) ScopeSpans { + return ScopeSpans{orig: orig} +} + +type SpanSlice struct { + orig *[]*otlptrace.Span +} + +func GetOrigSpanSlice(ms SpanSlice) *[]*otlptrace.Span { + return ms.orig +} + +func NewSpanSlice(orig *[]*otlptrace.Span) SpanSlice { + return SpanSlice{orig: orig} +} + +type Span struct { + orig *otlptrace.Span +} + +func GetOrigSpan(ms Span) *otlptrace.Span { + return ms.orig +} + +func NewSpan(orig *otlptrace.Span) Span { + return Span{orig: orig} +} + +type SpanEventSlice struct { + orig *[]*otlptrace.Span_Event +} + +func GetOrigSpanEventSlice(ms SpanEventSlice) *[]*otlptrace.Span_Event { + return ms.orig +} + +func NewSpanEventSlice(orig *[]*otlptrace.Span_Event) SpanEventSlice { + return SpanEventSlice{orig: orig} +} + +type SpanEvent struct { + orig *otlptrace.Span_Event +} + +func GetOrigSpanEvent(ms SpanEvent) *otlptrace.Span_Event { + return ms.orig +} + +func NewSpanEvent(orig *otlptrace.Span_Event) SpanEvent { + return SpanEvent{orig: orig} +} + +type SpanLinkSlice struct { + orig *[]*otlptrace.Span_Link +} + +func GetOrigSpanLinkSlice(ms SpanLinkSlice) *[]*otlptrace.Span_Link { + return ms.orig +} + +func NewSpanLinkSlice(orig *[]*otlptrace.Span_Link) SpanLinkSlice { + return SpanLinkSlice{orig: orig} +} + +type SpanLink struct { + orig *otlptrace.Span_Link +} + +func GetOrigSpanLink(ms SpanLink) *otlptrace.Span_Link { + return ms.orig +} + +func NewSpanLink(orig *otlptrace.Span_Link) SpanLink { + return SpanLink{orig: orig} +} + +type SpanStatus struct { + orig *otlptrace.Status +} + +func GetOrigSpanStatus(ms SpanStatus) *otlptrace.Status { + return ms.orig +} + +func NewSpanStatus(orig *otlptrace.Status) SpanStatus { + return SpanStatus{orig: orig} +} + +func GenerateTestResourceSpansSlice() ResourceSpansSlice { + orig := []*otlptrace.ResourceSpans{} + tv := NewResourceSpansSlice(&orig) + FillTestResourceSpansSlice(tv) + return tv +} + +func FillTestResourceSpansSlice(tv ResourceSpansSlice) { + *tv.orig = make([]*otlptrace.ResourceSpans, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlptrace.ResourceSpans{} + FillTestResourceSpans(NewResourceSpans((*tv.orig)[i])) + } +} + +func GenerateTestResourceSpans() ResourceSpans { + orig := otlptrace.ResourceSpans{} + tv := NewResourceSpans(&orig) + FillTestResourceSpans(tv) + return tv +} + +func FillTestResourceSpans(tv ResourceSpans) { + FillTestResource(NewResource(&tv.orig.Resource)) + tv.orig.SchemaUrl = "https://opentelemetry.io/schemas/1.5.0" + FillTestScopeSpansSlice(NewScopeSpansSlice(&tv.orig.ScopeSpans)) +} + +func GenerateTestScopeSpansSlice() ScopeSpansSlice { + orig := []*otlptrace.ScopeSpans{} + tv := NewScopeSpansSlice(&orig) + FillTestScopeSpansSlice(tv) + return tv +} + +func FillTestScopeSpansSlice(tv ScopeSpansSlice) { + *tv.orig = make([]*otlptrace.ScopeSpans, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlptrace.ScopeSpans{} + FillTestScopeSpans(NewScopeSpans((*tv.orig)[i])) + } +} + +func GenerateTestScopeSpans() ScopeSpans { + orig := otlptrace.ScopeSpans{} + tv := NewScopeSpans(&orig) + FillTestScopeSpans(tv) + return tv +} + +func FillTestScopeSpans(tv ScopeSpans) { + FillTestInstrumentationScope(NewInstrumentationScope(&tv.orig.Scope)) + tv.orig.SchemaUrl = "https://opentelemetry.io/schemas/1.5.0" + FillTestSpanSlice(NewSpanSlice(&tv.orig.Spans)) +} + +func GenerateTestSpanSlice() SpanSlice { + orig := []*otlptrace.Span{} + tv := NewSpanSlice(&orig) + FillTestSpanSlice(tv) + return tv +} + +func FillTestSpanSlice(tv SpanSlice) { + *tv.orig = make([]*otlptrace.Span, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlptrace.Span{} + FillTestSpan(NewSpan((*tv.orig)[i])) + } +} + +func GenerateTestSpan() Span { + orig := otlptrace.Span{} + tv := NewSpan(&orig) + FillTestSpan(tv) + return tv +} + +func FillTestSpan(tv Span) { + tv.orig.TraceId = data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) + tv.orig.SpanId = data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) + tv.orig.TraceState = "congo=congos" + tv.orig.ParentSpanId = data.NewSpanID([8]byte{8, 7, 6, 5, 4, 3, 2, 1}) + tv.orig.Name = "test_name" + tv.orig.Kind = otlptrace.Span_SpanKind(3) + tv.orig.StartTimeUnixNano = 1234567890 + tv.orig.EndTimeUnixNano = 1234567890 + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.DroppedAttributesCount = uint32(17) + FillTestSpanEventSlice(NewSpanEventSlice(&tv.orig.Events)) + tv.orig.DroppedEventsCount = uint32(17) + FillTestSpanLinkSlice(NewSpanLinkSlice(&tv.orig.Links)) + tv.orig.DroppedLinksCount = uint32(17) + FillTestSpanStatus(NewSpanStatus(&tv.orig.Status)) +} + +func GenerateTestSpanEventSlice() SpanEventSlice { + orig := []*otlptrace.Span_Event{} + tv := NewSpanEventSlice(&orig) + FillTestSpanEventSlice(tv) + return tv +} + +func FillTestSpanEventSlice(tv SpanEventSlice) { + *tv.orig = make([]*otlptrace.Span_Event, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlptrace.Span_Event{} + FillTestSpanEvent(NewSpanEvent((*tv.orig)[i])) + } +} + +func GenerateTestSpanEvent() SpanEvent { + orig := otlptrace.Span_Event{} + tv := NewSpanEvent(&orig) + FillTestSpanEvent(tv) + return tv +} + +func FillTestSpanEvent(tv SpanEvent) { + tv.orig.TimeUnixNano = 1234567890 + tv.orig.Name = "test_name" + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.DroppedAttributesCount = uint32(17) +} + +func GenerateTestSpanLinkSlice() SpanLinkSlice { + orig := []*otlptrace.Span_Link{} + tv := NewSpanLinkSlice(&orig) + FillTestSpanLinkSlice(tv) + return tv +} + +func FillTestSpanLinkSlice(tv SpanLinkSlice) { + *tv.orig = make([]*otlptrace.Span_Link, 7) + for i := 0; i < 7; i++ { + (*tv.orig)[i] = &otlptrace.Span_Link{} + FillTestSpanLink(NewSpanLink((*tv.orig)[i])) + } +} + +func GenerateTestSpanLink() SpanLink { + orig := otlptrace.Span_Link{} + tv := NewSpanLink(&orig) + FillTestSpanLink(tv) + return tv +} + +func FillTestSpanLink(tv SpanLink) { + tv.orig.TraceId = data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}) + tv.orig.SpanId = data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}) + tv.orig.TraceState = "congo=congos" + FillTestMap(NewMap(&tv.orig.Attributes)) + tv.orig.DroppedAttributesCount = uint32(17) +} + +func GenerateTestSpanStatus() SpanStatus { + orig := otlptrace.Status{} + tv := NewSpanStatus(&orig) + FillTestSpanStatus(tv) + return tv +} + +func FillTestSpanStatus(tv SpanStatus) { + tv.orig.Code = 1 + tv.orig.Message = "cancelled" +} diff --git a/pdata/internal/wrapper_common.go b/pdata/internal/wrapper_common.go new file mode 100644 index 00000000000..66933ab707f --- /dev/null +++ b/pdata/internal/wrapper_common.go @@ -0,0 +1,66 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal // import "go.opentelemetry.io/collector/pdata/internal" + +import ( + otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1" +) + +type Value struct { + orig *otlpcommon.AnyValue +} + +func GetOrigValue(ms Value) *otlpcommon.AnyValue { + return ms.orig +} + +func NewValue(orig *otlpcommon.AnyValue) Value { + return Value{orig: orig} +} + +type Map struct { + orig *[]otlpcommon.KeyValue +} + +func GetOrigMap(ms Map) *[]otlpcommon.KeyValue { + return ms.orig +} + +func NewMap(orig *[]otlpcommon.KeyValue) Map { + return Map{orig: orig} +} + +func FillTestValue(dest Value) { + dest.orig.Value = &otlpcommon.AnyValue_StringValue{StringValue: "v"} +} + +func GenerateTestValue() Value { + var orig otlpcommon.AnyValue + av := NewValue(&orig) + FillTestValue(av) + return av +} + +func GenerateTestMap() Map { + var orig []otlpcommon.KeyValue + am := NewMap(&orig) + FillTestMap(am) + return am +} + +func FillTestMap(dest Map) { + *dest.orig = nil + *dest.orig = append(*dest.orig, otlpcommon.KeyValue{Key: "k", Value: otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: "v"}}}) +} diff --git a/pdata/internal/wrapper_logs.go b/pdata/internal/wrapper_logs.go new file mode 100644 index 00000000000..58311f451d0 --- /dev/null +++ b/pdata/internal/wrapper_logs.go @@ -0,0 +1,69 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal // import "go.opentelemetry.io/collector/pdata/internal" + +import ( + otlpcollectorlog "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/logs/v1" + otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1" +) + +type Logs struct { + orig *otlpcollectorlog.ExportLogsServiceRequest +} + +func GetOrigLogs(ms Logs) *otlpcollectorlog.ExportLogsServiceRequest { + return ms.orig +} + +func NewLogs(orig *otlpcollectorlog.ExportLogsServiceRequest) Logs { + return Logs{orig: orig} +} + +// LogsToProto internal helper to convert Logs to protobuf representation. +func LogsToProto(l Logs) otlplogs.LogsData { + return otlplogs.LogsData{ + ResourceLogs: l.orig.ResourceLogs, + } +} + +// LogsFromProto internal helper to convert protobuf representation to Logs. +func LogsFromProto(orig otlplogs.LogsData) Logs { + return Logs{orig: &otlpcollectorlog.ExportLogsServiceRequest{ + ResourceLogs: orig.ResourceLogs, + }} +} + +type LogRecordFlags struct { + orig *uint32 +} + +func GetOrigLogRecordFlags(ms LogRecordFlags) *uint32 { + return ms.orig +} + +func NewLogRecordFlags(orig *uint32) LogRecordFlags { + return LogRecordFlags{orig: orig} +} + +func FillTestLogRecordFlags(tv LogRecordFlags) { + *tv.orig = uint32(1) +} + +func GenerateTestLogRecordFlags() LogRecordFlags { + var orig uint32 + tv := NewLogRecordFlags(&orig) + FillTestLogRecordFlags(tv) + return tv +} diff --git a/pdata/internal/wrapper_metrics.go b/pdata/internal/wrapper_metrics.go new file mode 100644 index 00000000000..ecdf1c0aef6 --- /dev/null +++ b/pdata/internal/wrapper_metrics.go @@ -0,0 +1,69 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal // import "go.opentelemetry.io/collector/pdata/internal" + +import ( + otlpcollectormetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/metrics/v1" + otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" +) + +type Metrics struct { + orig *otlpcollectormetrics.ExportMetricsServiceRequest +} + +func GetOrigMetrics(ms Metrics) *otlpcollectormetrics.ExportMetricsServiceRequest { + return ms.orig +} + +func NewMetrics(orig *otlpcollectormetrics.ExportMetricsServiceRequest) Metrics { + return Metrics{orig: orig} +} + +// MetricsToProto internal helper to convert Metrics to protobuf representation. +func MetricsToProto(l Metrics) otlpmetrics.MetricsData { + return otlpmetrics.MetricsData{ + ResourceMetrics: l.orig.ResourceMetrics, + } +} + +// MetricsFromProto internal helper to convert protobuf representation to Metrics. +func MetricsFromProto(orig otlpmetrics.MetricsData) Metrics { + return Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{ + ResourceMetrics: orig.ResourceMetrics, + }} +} + +type MetricDataPointFlags struct { + orig *uint32 +} + +func GetOrigMetricDataPointFlags(ms MetricDataPointFlags) *uint32 { + return ms.orig +} + +func NewMetricDataPointFlags(orig *uint32) MetricDataPointFlags { + return MetricDataPointFlags{orig: orig} +} + +func FillTestMetricDataPointFlags(tv MetricDataPointFlags) { + *tv.orig = uint32(0) +} + +func GenerateTestMetricDataPointFlags() MetricDataPointFlags { + var orig uint32 + tv := NewMetricDataPointFlags(&orig) + FillTestMetricDataPointFlags(tv) + return tv +} diff --git a/pdata/pcommon/spanid_alias.go b/pdata/internal/wrapper_span_id.go similarity index 55% rename from pdata/pcommon/spanid_alias.go rename to pdata/internal/wrapper_span_id.go index a37a7fd603a..c8aaf2ae8b3 100644 --- a/pdata/pcommon/spanid_alias.go +++ b/pdata/internal/wrapper_span_id.go @@ -4,7 +4,7 @@ // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -12,15 +12,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" +package internal // import "go.opentelemetry.io/collector/pdata/internal" -import "go.opentelemetry.io/collector/pdata/internal" +import ( + "go.opentelemetry.io/collector/pdata/internal/data" +) -// SpanID is span identifier. -type SpanID = internal.SpanID +type SpanID struct { + orig data.SpanID +} -// InvalidSpanID returns an empty (all zero bytes) SpanID. -var InvalidSpanID = internal.InvalidSpanID +func GetOrigSpanID(ms SpanID) data.SpanID { + return ms.orig +} -// NewSpanID returns a new SpanID from the given byte array. -var NewSpanID = internal.NewSpanID +func NewSpanID(orig data.SpanID) SpanID { + return SpanID{orig: orig} +} diff --git a/pdata/pcommon/traceid_alias.go b/pdata/internal/wrapper_trace_id.go similarity index 55% rename from pdata/pcommon/traceid_alias.go rename to pdata/internal/wrapper_trace_id.go index 99ffa62d251..5de30f675d7 100644 --- a/pdata/pcommon/traceid_alias.go +++ b/pdata/internal/wrapper_trace_id.go @@ -4,7 +4,7 @@ // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // -// http://www.apache.org/licenses/LICENSE-2.0 +// http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, @@ -12,15 +12,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" +package internal // import "go.opentelemetry.io/collector/pdata/internal" -import "go.opentelemetry.io/collector/pdata/internal" +import ( + "go.opentelemetry.io/collector/pdata/internal/data" +) -// TraceID is a trace identifier. -type TraceID = internal.TraceID +type TraceID struct { + orig data.TraceID +} -// InvalidTraceID returns an empty (all zero bytes) TraceID. -var InvalidTraceID = internal.InvalidTraceID +func GetOrigTraceID(ms TraceID) data.TraceID { + return ms.orig +} -// NewTraceID returns a new TraceID from the given byte array. -var NewTraceID = internal.NewTraceID +func NewTraceID(orig data.TraceID) TraceID { + return TraceID{orig: orig} +} diff --git a/pdata/internal/wrapper_traces.go b/pdata/internal/wrapper_traces.go new file mode 100644 index 00000000000..63589cd79e4 --- /dev/null +++ b/pdata/internal/wrapper_traces.go @@ -0,0 +1,46 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal // import "go.opentelemetry.io/collector/pdata/internal" + +import ( + otlpcollectortrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/trace/v1" + otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" +) + +type Traces struct { + orig *otlpcollectortrace.ExportTraceServiceRequest +} + +func GetOrigTraces(ms Traces) *otlpcollectortrace.ExportTraceServiceRequest { + return ms.orig +} + +func NewTraces(orig *otlpcollectortrace.ExportTraceServiceRequest) Traces { + return Traces{orig: orig} +} + +// TracesToProto internal helper to convert Traces to protobuf representation. +func TracesToProto(l Traces) otlptrace.TracesData { + return otlptrace.TracesData{ + ResourceSpans: l.orig.ResourceSpans, + } +} + +// TracesFromProto internal helper to convert protobuf representation to Traces. +func TracesFromProto(orig otlptrace.TracesData) Traces { + return Traces{orig: &otlpcollectortrace.ExportTraceServiceRequest{ + ResourceSpans: orig.ResourceSpans, + }} +} diff --git a/pdata/pcommon/alias.go b/pdata/pcommon/alias.go deleted file mode 100644 index dd03689145b..00000000000 --- a/pdata/pcommon/alias.go +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" - -// This file contains aliases to data structures that are common for all -// signal types, such as timestamps, attributes, etc. - -import "go.opentelemetry.io/collector/pdata/internal" - -// ValueType specifies the type of Value. -type ValueType = internal.ValueType - -const ( - ValueTypeEmpty = internal.ValueTypeEmpty - ValueTypeString = internal.ValueTypeString - ValueTypeInt = internal.ValueTypeInt - ValueTypeDouble = internal.ValueTypeDouble - ValueTypeBool = internal.ValueTypeBool - ValueTypeMap = internal.ValueTypeMap - ValueTypeSlice = internal.ValueTypeSlice - ValueTypeBytes = internal.ValueTypeBytes -) - -// Value is a mutable cell containing any value. Typically used as an element of Map or Slice. -// Must use one of NewValue+ functions below to create new instances. -// -// Intended to be passed by value since internally it is just a pointer to actual -// value representation. For the same reason passing by value and calling setters -// will modify the original, e.g.: -// -// func f1(val Value) { val.SetIntVal(234) } -// func f2() { -// v := NewValueString("a string") -// f1(v) -// _ := v.Type() // this will return ValueTypeInt -// } -// -// Important: zero-initialized instance is not valid for use. All Value functions below must -// be called only on instances that are created via NewValue+ functions. -type Value = internal.Value - -var ( - // NewValueEmpty creates a new Value with an empty value. - NewValueEmpty = internal.NewValueEmpty - - // NewValueString creates a new Value with the given string value. - NewValueString = internal.NewValueString - - // NewValueInt creates a new Value with the given int64 value. - NewValueInt = internal.NewValueInt - - // NewValueDouble creates a new Value with the given float64 value. - NewValueDouble = internal.NewValueDouble - - // NewValueBool creates a new Value with the given bool value. - NewValueBool = internal.NewValueBool - - // NewValueMap creates a new Value of map type. - NewValueMap = internal.NewValueMap - - // NewValueSlice creates a new Value of array type. - NewValueSlice = internal.NewValueSlice - - // NewValueBytes creates a new Value with the given ImmutableByteSlice value. - NewValueBytes = internal.NewValueBytes -) - -// Map stores a map of string keys to elements of Value type. -type Map = internal.Map - -var ( - // NewMap creates a Map with 0 elements. - NewMap = internal.NewMap - - // NewMapFromRaw creates a Map with values from the given map[string]interface{}. - NewMapFromRaw = internal.NewMapFromRaw -) - -// NewSliceFromRaw creates a Slice with values from the given []interface{}. -var NewSliceFromRaw = internal.NewSliceFromRaw diff --git a/pdata/internal/common.go b/pdata/pcommon/common.go similarity index 81% rename from pdata/internal/common.go rename to pdata/pcommon/common.go index 6f08113eb9c..63083ca52fb 100644 --- a/pdata/internal/common.go +++ b/pdata/pcommon/common.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal // import "go.opentelemetry.io/collector/pdata/internal" +package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" // This file contains data structures that are common for all telemetry types, // such as timestamps, attributes, etc. @@ -26,6 +26,7 @@ import ( "sort" "strconv" + "go.opentelemetry.io/collector/pdata/internal" otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1" ) @@ -82,52 +83,54 @@ func (avt ValueType) String() string { // // Important: zero-initialized instance is not valid for use. All Value functions below must // be called only on instances that are created via NewValue+ functions. -type Value struct { - orig *otlpcommon.AnyValue -} - -func newValue(orig *otlpcommon.AnyValue) Value { - return Value{orig} -} +type Value internal.Value // NewValueEmpty creates a new Value with an empty value. func NewValueEmpty() Value { - return Value{orig: &otlpcommon.AnyValue{}} + return newValue(&otlpcommon.AnyValue{}) } // NewValueString creates a new Value with the given string value. func NewValueString(v string) Value { - return Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: v}}} + return newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: v}}) } // NewValueInt creates a new Value with the given int64 value. func NewValueInt(v int64) Value { - return Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_IntValue{IntValue: v}}} + return newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_IntValue{IntValue: v}}) } // NewValueDouble creates a new Value with the given float64 value. func NewValueDouble(v float64) Value { - return Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_DoubleValue{DoubleValue: v}}} + return newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_DoubleValue{DoubleValue: v}}) } // NewValueBool creates a new Value with the given bool value. func NewValueBool(v bool) Value { - return Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_BoolValue{BoolValue: v}}} + return newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_BoolValue{BoolValue: v}}) } // NewValueMap creates a new Value of map type. func NewValueMap() Value { - return Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_KvlistValue{KvlistValue: &otlpcommon.KeyValueList{}}}} + return newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_KvlistValue{KvlistValue: &otlpcommon.KeyValueList{}}}) } // NewValueSlice creates a new Value of array type. func NewValueSlice() Value { - return Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_ArrayValue{ArrayValue: &otlpcommon.ArrayValue{}}}} + return newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_ArrayValue{ArrayValue: &otlpcommon.ArrayValue{}}}) } // NewValueBytes creates a new Value with the given ImmutableByteSlice value. func NewValueBytes(v ImmutableByteSlice) Value { - return Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_BytesValue{BytesValue: v.value}}} + return newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_BytesValue{BytesValue: v.getOrig()}}) +} + +func newValue(orig *otlpcommon.AnyValue) Value { + return Value(internal.NewValue(orig)) +} + +func (v Value) getOrig() *otlpcommon.AnyValue { + return internal.GetOrigValue(internal.Value(v)) } func newValueFromRaw(iv interface{}) Value { @@ -180,7 +183,7 @@ func newValueFromRaw(iv interface{}) Value { // Type returns the type of the value for this Value. // Calling this function on zero-initialized Value will cause a panic. func (v Value) Type() ValueType { - switch v.orig.Value.(type) { + switch v.getOrig().Value.(type) { case *otlpcommon.AnyValue_StringValue: return ValueTypeString case *otlpcommon.AnyValue_BoolValue: @@ -203,28 +206,28 @@ func (v Value) Type() ValueType { // If the Type() is not ValueTypeString then returns empty string. // Calling this function on zero-initialized Value will cause a panic. func (v Value) StringVal() string { - return v.orig.GetStringValue() + return v.getOrig().GetStringValue() } // IntVal returns the int64 value associated with this Value. // If the Type() is not ValueTypeInt then returns int64(0). // Calling this function on zero-initialized Value will cause a panic. func (v Value) IntVal() int64 { - return v.orig.GetIntValue() + return v.getOrig().GetIntValue() } // DoubleVal returns the float64 value associated with this Value. // If the Type() is not ValueTypeDouble then returns float64(0). // Calling this function on zero-initialized Value will cause a panic. func (v Value) DoubleVal() float64 { - return v.orig.GetDoubleValue() + return v.getOrig().GetDoubleValue() } // BoolVal returns the bool value associated with this Value. // If the Type() is not ValueTypeBool then returns false. // Calling this function on zero-initialized Value will cause a panic. func (v Value) BoolVal() bool { - return v.orig.GetBoolValue() + return v.getOrig().GetBoolValue() } // MapVal returns the map value associated with this Value. @@ -233,7 +236,7 @@ func (v Value) BoolVal() bool { // // Calling this function on zero-initialized Value will cause a panic. func (v Value) MapVal() Map { - kvlist := v.orig.GetKvlistValue() + kvlist := v.getOrig().GetKvlistValue() if kvlist == nil { return Map{} } @@ -246,7 +249,7 @@ func (v Value) MapVal() Map { // // Calling this function on zero-initialized Value will cause a panic. func (v Value) SliceVal() Slice { - arr := v.orig.GetArrayValue() + arr := v.getOrig().GetArrayValue() if arr == nil { return Slice{} } @@ -257,47 +260,47 @@ func (v Value) SliceVal() Slice { // If the Type() is not ValueTypeBytes then returns an empty slice. // Calling this function on zero-initialized Value will cause a panic. func (v Value) BytesVal() ImmutableByteSlice { - return ImmutableByteSlice{value: v.orig.GetBytesValue()} + return ImmutableByteSlice(internal.NewImmutableByteSlice(v.getOrig().GetBytesValue())) } // SetStringVal replaces the string value associated with this Value, // it also changes the type to be ValueTypeString. // Calling this function on zero-initialized Value will cause a panic. func (v Value) SetStringVal(sv string) { - v.orig.Value = &otlpcommon.AnyValue_StringValue{StringValue: sv} + v.getOrig().Value = &otlpcommon.AnyValue_StringValue{StringValue: sv} } // SetIntVal replaces the int64 value associated with this Value, // it also changes the type to be ValueTypeInt. // Calling this function on zero-initialized Value will cause a panic. func (v Value) SetIntVal(iv int64) { - v.orig.Value = &otlpcommon.AnyValue_IntValue{IntValue: iv} + v.getOrig().Value = &otlpcommon.AnyValue_IntValue{IntValue: iv} } // SetDoubleVal replaces the float64 value associated with this Value, // it also changes the type to be ValueTypeDouble. // Calling this function on zero-initialized Value will cause a panic. func (v Value) SetDoubleVal(dv float64) { - v.orig.Value = &otlpcommon.AnyValue_DoubleValue{DoubleValue: dv} + v.getOrig().Value = &otlpcommon.AnyValue_DoubleValue{DoubleValue: dv} } // SetBoolVal replaces the bool value associated with this Value, // it also changes the type to be ValueTypeBool. // Calling this function on zero-initialized Value will cause a panic. func (v Value) SetBoolVal(bv bool) { - v.orig.Value = &otlpcommon.AnyValue_BoolValue{BoolValue: bv} + v.getOrig().Value = &otlpcommon.AnyValue_BoolValue{BoolValue: bv} } // SetBytesVal replaces the ImmutableByteSlice value associated with this Value, // it also changes the type to be ValueTypeBytes. // Calling this function on zero-initialized Value will cause a panic. func (v Value) SetBytesVal(bv ImmutableByteSlice) { - v.orig.Value = &otlpcommon.AnyValue_BytesValue{BytesValue: bv.value} + v.getOrig().Value = &otlpcommon.AnyValue_BytesValue{BytesValue: bv.getOrig()} } // copyTo copies the value to Value. Will panic if dest is nil. func (v Value) copyTo(dest *otlpcommon.AnyValue) { - switch ov := v.orig.Value.(type) { + switch ov := v.getOrig().Value.(type) { case *otlpcommon.AnyValue_KvlistValue: kv, ok := dest.Value.(*otlpcommon.AnyValue_KvlistValue) if !ok { @@ -332,41 +335,41 @@ func (v Value) copyTo(dest *otlpcommon.AnyValue) { copy(bv.BytesValue, ov.BytesValue) default: // Primitive immutable type, no need for deep copy. - dest.Value = v.orig.Value + dest.Value = v.getOrig().Value } } // CopyTo copies the attribute to a destination. func (v Value) CopyTo(dest Value) { - v.copyTo(dest.orig) + v.copyTo(dest.getOrig()) } // Equal checks for equality, it returns true if the objects are equal otherwise false. func (v Value) Equal(av Value) bool { - if v.orig == av.orig { + if v.getOrig() == av.getOrig() { return true } - if v.orig.Value == nil || av.orig.Value == nil { - return v.orig.Value == av.orig.Value + if v.getOrig().Value == nil || av.getOrig().Value == nil { + return v.getOrig().Value == av.getOrig().Value } if v.Type() != av.Type() { return false } - switch v := v.orig.Value.(type) { + switch v := v.getOrig().Value.(type) { case *otlpcommon.AnyValue_StringValue: - return v.StringValue == av.orig.GetStringValue() + return v.StringValue == av.getOrig().GetStringValue() case *otlpcommon.AnyValue_BoolValue: - return v.BoolValue == av.orig.GetBoolValue() + return v.BoolValue == av.getOrig().GetBoolValue() case *otlpcommon.AnyValue_IntValue: - return v.IntValue == av.orig.GetIntValue() + return v.IntValue == av.getOrig().GetIntValue() case *otlpcommon.AnyValue_DoubleValue: - return v.DoubleValue == av.orig.GetDoubleValue() + return v.DoubleValue == av.getOrig().GetDoubleValue() case *otlpcommon.AnyValue_ArrayValue: vv := v.ArrayValue.GetValues() - avv := av.orig.GetArrayValue().GetValues() + avv := av.getOrig().GetArrayValue().GetValues() if len(vv) != len(avv) { return false } @@ -379,7 +382,7 @@ func (v Value) Equal(av Value) bool { return true case *otlpcommon.AnyValue_KvlistValue: cc := v.KvlistValue.GetValues() - avv := av.orig.GetKvlistValue().GetValues() + avv := av.getOrig().GetKvlistValue().GetValues() if len(cc) != len(avv) { return false } @@ -398,7 +401,7 @@ func (v Value) Equal(av Value) bool { } return true case *otlpcommon.AnyValue_BytesValue: - return bytes.Equal(v.BytesValue, av.orig.GetBytesValue()) + return bytes.Equal(v.BytesValue, av.getOrig().GetBytesValue()) } return false @@ -429,7 +432,7 @@ func (v Value) AsString() string { return string(jsonStr) case ValueTypeBytes: - return base64.StdEncoding.EncodeToString(v.BytesVal().value) + return base64.StdEncoding.EncodeToString(v.BytesVal().getOrig()) case ValueTypeSlice: jsonStr, _ := json.Marshal(v.SliceVal().AsRaw()) @@ -495,28 +498,28 @@ func (v Value) asRaw() interface{} { func newAttributeKeyValueString(k string, v string) otlpcommon.KeyValue { orig := otlpcommon.KeyValue{Key: k} - akv := Value{&orig.Value} + akv := newValue(&orig.Value) akv.SetStringVal(v) return orig } func newAttributeKeyValueInt(k string, v int64) otlpcommon.KeyValue { orig := otlpcommon.KeyValue{Key: k} - akv := Value{&orig.Value} + akv := newValue(&orig.Value) akv.SetIntVal(v) return orig } func newAttributeKeyValueDouble(k string, v float64) otlpcommon.KeyValue { orig := otlpcommon.KeyValue{Key: k} - akv := Value{&orig.Value} + akv := newValue(&orig.Value) akv.SetDoubleVal(v) return orig } func newAttributeKeyValueBool(k string, v bool) otlpcommon.KeyValue { orig := otlpcommon.KeyValue{Key: k} - akv := Value{&orig.Value} + akv := newValue(&orig.Value) akv.SetBoolVal(v) return orig } @@ -534,27 +537,29 @@ func newAttributeKeyValue(k string, av Value) otlpcommon.KeyValue { func newAttributeKeyValueBytes(k string, v ImmutableByteSlice) otlpcommon.KeyValue { orig := otlpcommon.KeyValue{Key: k} - akv := Value{&orig.Value} + akv := newValue(&orig.Value) akv.SetBytesVal(v) return orig } // Map stores a map of string keys to elements of Value type. -type Map struct { - orig *[]otlpcommon.KeyValue -} +type Map internal.Map // NewMap creates a Map with 0 elements. func NewMap() Map { orig := []otlpcommon.KeyValue(nil) - return Map{&orig} + return Map(internal.NewMap(&orig)) +} + +func (m Map) getOrig() *[]otlpcommon.KeyValue { + return internal.GetOrigMap(internal.Map(m)) } // NewMapFromRaw creates a Map with values from the given map[string]interface{}. func NewMapFromRaw(rawMap map[string]interface{}) Map { if len(rawMap) == 0 { kv := []otlpcommon.KeyValue(nil) - return Map{&kv} + return newMap(&kv) } origs := make([]otlpcommon.KeyValue, len(rawMap)) ix := 0 @@ -563,27 +568,27 @@ func NewMapFromRaw(rawMap map[string]interface{}) Map { newValueFromRaw(iv).copyTo(&origs[ix].Value) ix++ } - return Map{&origs} + return Map(internal.NewMap(&origs)) } func newMap(orig *[]otlpcommon.KeyValue) Map { - return Map{orig} + return Map(internal.NewMap(orig)) } // Clear erases any existing entries in this Map instance. func (m Map) Clear() { - *m.orig = nil + *m.getOrig() = nil } // EnsureCapacity increases the capacity of this Map instance, if necessary, // to ensure that it can hold at least the number of elements specified by the capacity argument. func (m Map) EnsureCapacity(capacity int) { - if capacity <= cap(*m.orig) { + if capacity <= cap(*m.getOrig()) { return } - oldOrig := *m.orig - *m.orig = make([]otlpcommon.KeyValue, 0, capacity) - copy(*m.orig, oldOrig) + oldOrig := *m.getOrig() + *m.getOrig() = make([]otlpcommon.KeyValue, 0, capacity) + copy(*m.getOrig(), oldOrig) } // Get returns the Value associated with the key and true. Returned @@ -594,23 +599,23 @@ func (m Map) EnsureCapacity(capacity int) { // If the key does not exist returns an invalid instance of the KeyValue and false. // Calling any functions on the returned invalid instance will cause a panic. func (m Map) Get(key string) (Value, bool) { - for i := range *m.orig { - akv := &(*m.orig)[i] + for i := range *m.getOrig() { + akv := &(*m.getOrig())[i] if akv.Key == key { - return Value{&akv.Value}, true + return newValue(&akv.Value), true } } - return Value{nil}, false + return newValue(nil), false } // Remove removes the entry associated with the key and returns true if the key // was present in the map, otherwise returns false. func (m Map) Remove(key string) bool { - for i := range *m.orig { - akv := &(*m.orig)[i] + for i := range *m.getOrig() { + akv := &(*m.getOrig())[i] if akv.Key == key { - *akv = (*m.orig)[len(*m.orig)-1] - *m.orig = (*m.orig)[:len(*m.orig)-1] + *akv = (*m.getOrig())[len(*m.getOrig())-1] + *m.getOrig() = (*m.getOrig())[:len(*m.getOrig())-1] return true } } @@ -620,9 +625,9 @@ func (m Map) Remove(key string) bool { // RemoveIf removes the entries for which the function in question returns true func (m Map) RemoveIf(f func(string, Value) bool) { newLen := 0 - for i := 0; i < len(*m.orig); i++ { - akv := &(*m.orig)[i] - if f(akv.Key, Value{&akv.Value}) { + for i := 0; i < len(*m.getOrig()); i++ { + akv := &(*m.getOrig())[i] + if f(akv.Key, newValue(&akv.Value)) { continue } if newLen == i { @@ -630,10 +635,10 @@ func (m Map) RemoveIf(f func(string, Value) bool) { newLen++ continue } - (*m.orig)[newLen] = (*m.orig)[i] + (*m.getOrig())[newLen] = (*m.getOrig())[i] newLen++ } - *m.orig = (*m.orig)[:newLen] + *m.getOrig() = (*m.getOrig())[:newLen] } // Insert adds the Value to the map when the key does not exist. @@ -645,7 +650,7 @@ func (m Map) RemoveIf(f func(string, Value) bool) { // the raw value to avoid an extra allocation. func (m Map) Insert(k string, v Value) { if _, existing := m.Get(k); !existing { - *m.orig = append(*m.orig, newAttributeKeyValue(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValue(k, v)) } } @@ -653,7 +658,7 @@ func (m Map) Insert(k string, v Value) { // No action is applied to the map where the key already exists. func (m Map) InsertNull(k string) { if _, existing := m.Get(k); !existing { - *m.orig = append(*m.orig, newAttributeKeyValueNull(k)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueNull(k)) } } @@ -661,7 +666,7 @@ func (m Map) InsertNull(k string) { // No action is applied to the map where the key already exists. func (m Map) InsertString(k string, v string) { if _, existing := m.Get(k); !existing { - *m.orig = append(*m.orig, newAttributeKeyValueString(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueString(k, v)) } } @@ -669,7 +674,7 @@ func (m Map) InsertString(k string, v string) { // No action is applied to the map where the key already exists. func (m Map) InsertInt(k string, v int64) { if _, existing := m.Get(k); !existing { - *m.orig = append(*m.orig, newAttributeKeyValueInt(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueInt(k, v)) } } @@ -677,7 +682,7 @@ func (m Map) InsertInt(k string, v int64) { // No action is applied to the map where the key already exists. func (m Map) InsertDouble(k string, v float64) { if _, existing := m.Get(k); !existing { - *m.orig = append(*m.orig, newAttributeKeyValueDouble(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueDouble(k, v)) } } @@ -685,7 +690,7 @@ func (m Map) InsertDouble(k string, v float64) { // No action is applied to the map where the key already exists. func (m Map) InsertBool(k string, v bool) { if _, existing := m.Get(k); !existing { - *m.orig = append(*m.orig, newAttributeKeyValueBool(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueBool(k, v)) } } @@ -693,7 +698,7 @@ func (m Map) InsertBool(k string, v bool) { // No action is applied to the map where the key already exists. func (m Map) InsertBytes(k string, v ImmutableByteSlice) { if _, existing := m.Get(k); !existing { - *m.orig = append(*m.orig, newAttributeKeyValueBytes(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueBytes(k, v)) } } @@ -706,7 +711,7 @@ func (m Map) InsertBytes(k string, v ImmutableByteSlice) { // the raw value to avoid an extra allocation. func (m Map) Update(k string, v Value) { if av, existing := m.Get(k); existing { - v.copyTo(av.orig) + v.copyTo(av.getOrig()) } } @@ -760,9 +765,9 @@ func (m Map) UpdateBytes(k string, v ImmutableByteSlice) { // the raw value to avoid an extra allocation. func (m Map) Upsert(k string, v Value) { if av, existing := m.Get(k); existing { - v.copyTo(av.orig) + v.copyTo(av.getOrig()) } else { - *m.orig = append(*m.orig, newAttributeKeyValue(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValue(k, v)) } } @@ -773,7 +778,7 @@ func (m Map) UpsertString(k string, v string) { if av, existing := m.Get(k); existing { av.SetStringVal(v) } else { - *m.orig = append(*m.orig, newAttributeKeyValueString(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueString(k, v)) } } @@ -784,7 +789,7 @@ func (m Map) UpsertInt(k string, v int64) { if av, existing := m.Get(k); existing { av.SetIntVal(v) } else { - *m.orig = append(*m.orig, newAttributeKeyValueInt(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueInt(k, v)) } } @@ -795,7 +800,7 @@ func (m Map) UpsertDouble(k string, v float64) { if av, existing := m.Get(k); existing { av.SetDoubleVal(v) } else { - *m.orig = append(*m.orig, newAttributeKeyValueDouble(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueDouble(k, v)) } } @@ -806,7 +811,7 @@ func (m Map) UpsertBool(k string, v bool) { if av, existing := m.Get(k); existing { av.SetBoolVal(v) } else { - *m.orig = append(*m.orig, newAttributeKeyValueBool(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueBool(k, v)) } } @@ -817,7 +822,7 @@ func (m Map) UpsertBytes(k string, v ImmutableByteSlice) { if av, existing := m.Get(k); existing { av.SetBytesVal(v) } else { - *m.orig = append(*m.orig, newAttributeKeyValueBytes(k, v)) + *m.getOrig() = append(*m.getOrig(), newAttributeKeyValueBytes(k, v)) } } @@ -827,8 +832,8 @@ func (m Map) UpsertBytes(k string, v ImmutableByteSlice) { // assert.EqualValues(t, expected.Sort(), actual.Sort()) func (m Map) Sort() Map { // Intention is to move the nil values at the end. - sort.SliceStable(*m.orig, func(i, j int) bool { - return (*m.orig)[i].Key < (*m.orig)[j].Key + sort.SliceStable(*m.getOrig(), func(i, j int) bool { + return (*m.getOrig())[i].Key < (*m.getOrig())[j].Key }) return m } @@ -838,7 +843,7 @@ func (m Map) Sort() Map { // Because the Map is represented internally by a slice of pointers, and the data are comping from the wire, // it is possible that when iterating using "Range" to get access to fewer elements because nil elements are skipped. func (m Map) Len() int { - return len(*m.orig) + return len(*m.getOrig()) } // Range calls f sequentially for each key and value present in the map. If f returns false, range stops the iteration. @@ -849,9 +854,9 @@ func (m Map) Len() int { // ... // }) func (m Map) Range(f func(k string, v Value) bool) { - for i := range *m.orig { - kv := &(*m.orig)[i] - if !f(kv.Key, Value{&kv.Value}) { + for i := range *m.getOrig() { + kv := &(*m.getOrig())[i] + if !f(kv.Key, Value(internal.NewValue(&kv.Value))) { break } } @@ -859,28 +864,28 @@ func (m Map) Range(f func(k string, v Value) bool) { // CopyTo copies all elements from the current map to the dest. func (m Map) CopyTo(dest Map) { - newLen := len(*m.orig) - oldCap := cap(*dest.orig) + newLen := len(*m.getOrig()) + oldCap := cap(*dest.getOrig()) if newLen <= oldCap { // New slice fits in existing slice, no need to reallocate. - *dest.orig = (*dest.orig)[:newLen:oldCap] - for i := range *m.orig { - akv := &(*m.orig)[i] - destAkv := &(*dest.orig)[i] + *dest.getOrig() = (*dest.getOrig())[:newLen:oldCap] + for i := range *m.getOrig() { + akv := &(*m.getOrig())[i] + destAkv := &(*dest.getOrig())[i] destAkv.Key = akv.Key - Value{&akv.Value}.copyTo(&destAkv.Value) + newValue(&akv.Value).copyTo(&destAkv.Value) } return } // New slice is bigger than exist slice. Allocate new space. - origs := make([]otlpcommon.KeyValue, len(*m.orig)) - for i := range *m.orig { - akv := &(*m.orig)[i] + origs := make([]otlpcommon.KeyValue, len(*m.getOrig())) + for i := range *m.getOrig() { + akv := &(*m.getOrig())[i] origs[i].Key = akv.Key - Value{&akv.Value}.copyTo(&origs[i].Value) + newValue(&akv.Value).copyTo(&origs[i].Value) } - *dest.orig = origs + *dest.getOrig() = origs } // AsRaw converts an OTLP Map to a standard go map @@ -897,13 +902,13 @@ func (m Map) AsRaw() map[string]interface{} { func NewSliceFromRaw(rawSlice []interface{}) Slice { if len(rawSlice) == 0 { v := []otlpcommon.AnyValue(nil) - return Slice{&v} + return newSlice(&v) } origs := make([]otlpcommon.AnyValue, len(rawSlice)) for ix, iv := range rawSlice { newValueFromRaw(iv).copyTo(&origs[ix]) } - return Slice{&origs} + return newSlice(&origs) } // AsRaw converts the Slice to a standard go slice. diff --git a/pdata/internal/common_test.go b/pdata/pcommon/common_test.go similarity index 94% rename from pdata/internal/common_test.go rename to pdata/pcommon/common_test.go index 3f2ad80dca1..e46e56a70f0 100644 --- a/pdata/internal/common_test.go +++ b/pdata/pcommon/common_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal +package pcommon import ( "encoding/base64" @@ -24,6 +24,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.opentelemetry.io/collector/pdata/internal" otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1" ) @@ -154,7 +155,7 @@ func TestAttributeValueMap(t *testing.T) { // Test nil KvlistValue case for MapVal() func. orig := &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_KvlistValue{KvlistValue: nil}} - m1 = Value{orig: orig} + m1 = newValue(orig) assert.EqualValues(t, Map{}, m1.MapVal()) } @@ -273,15 +274,15 @@ func TestNilMap(t *testing.T) { val, exist := NewMap().Get("test_key") assert.False(t, exist) - assert.EqualValues(t, Value{nil}, val) + assert.EqualValues(t, newValue(nil), val) insertMap := NewMap() insertMap.Insert("k", NewValueString("v")) - assert.EqualValues(t, generateTestMap(), insertMap) + assert.EqualValues(t, Map(internal.GenerateTestMap()), insertMap) insertMapString := NewMap() insertMapString.InsertString("k", "v") - assert.EqualValues(t, generateTestMap(), insertMapString) + assert.EqualValues(t, Map(internal.GenerateTestMap()), insertMapString) insertMapNull := NewMap() insertMapNull.InsertNull("k") @@ -329,11 +330,11 @@ func TestNilMap(t *testing.T) { upsertMap := NewMap() upsertMap.Upsert("k", NewValueString("v")) - assert.EqualValues(t, generateTestMap(), upsertMap) + assert.EqualValues(t, Map(internal.GenerateTestMap()), upsertMap) upsertMapString := NewMap() upsertMapString.UpsertString("k", "v") - assert.EqualValues(t, generateTestMap(), upsertMapString) + assert.EqualValues(t, Map(internal.GenerateTestMap()), upsertMapString) upsertMapInt := NewMap() upsertMapInt.UpsertInt("k", 123) @@ -371,9 +372,7 @@ func TestMapWithEmpty(t *testing.T) { Value: otlpcommon.AnyValue{Value: nil}, }, } - sm := Map{ - orig: &origWithNil, - } + sm := newMap(&origWithNil) val, exist := sm.Get("test_key") assert.True(t, exist) assert.EqualValues(t, ValueTypeString, val.Type()) @@ -558,7 +557,7 @@ func TestMapWithEmpty(t *testing.T) { assert.False(t, exist) // Test Sort - assert.EqualValues(t, Map{orig: &origWithNil}, sm.Sort()) + assert.EqualValues(t, newMap(&origWithNil), sm.Sort()) } func TestMapIterationNil(t *testing.T) { @@ -616,25 +615,26 @@ func TestMap_InitFromRaw(t *testing.T) { newAttributeKeyValueBytes("k_bytes", NewImmutableByteSlice([]byte{1, 2, 3})), } am = NewMapFromRaw(rawMap) - assert.EqualValues(t, Map{orig: &rawOrig}.Sort(), am.Sort()) + assert.EqualValues(t, newMap(&rawOrig).Sort(), am.Sort()) } func TestAttributeValue_CopyTo(t *testing.T) { // Test nil KvlistValue case for MapVal() func. dest := NewValueEmpty() orig := &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_KvlistValue{KvlistValue: nil}} - Value{orig: orig}.CopyTo(dest) - assert.Nil(t, dest.orig.Value.(*otlpcommon.AnyValue_KvlistValue).KvlistValue) + newValue(orig).CopyTo(dest) + assert.Nil(t, dest.getOrig().Value.(*otlpcommon.AnyValue_KvlistValue).KvlistValue) // Test nil ArrayValue case for SliceVal() func. dest = NewValueEmpty() orig = &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_ArrayValue{ArrayValue: nil}} - Value{orig: orig}.CopyTo(dest) - assert.Nil(t, dest.orig.Value.(*otlpcommon.AnyValue_ArrayValue).ArrayValue) + newValue(orig).CopyTo(dest) + assert.Nil(t, dest.getOrig().Value.(*otlpcommon.AnyValue_ArrayValue).ArrayValue) // Test copy empty value. - Value{orig: &otlpcommon.AnyValue{}}.CopyTo(dest) - assert.Nil(t, dest.orig.Value) + orig = &otlpcommon.AnyValue{} + newValue(orig).CopyTo(dest) + assert.Nil(t, dest.getOrig().Value) } func TestMap_CopyTo(t *testing.T) { @@ -644,17 +644,17 @@ func TestMap_CopyTo(t *testing.T) { assert.EqualValues(t, 0, dest.Len()) // Test CopyTo larger slice - generateTestMap().CopyTo(dest) - assert.EqualValues(t, generateTestMap(), dest) + Map(internal.GenerateTestMap()).CopyTo(dest) + assert.EqualValues(t, Map(internal.GenerateTestMap()), dest) // Test CopyTo same size slice - generateTestMap().CopyTo(dest) - assert.EqualValues(t, generateTestMap(), dest) + Map(internal.GenerateTestMap()).CopyTo(dest) + assert.EqualValues(t, Map(internal.GenerateTestMap()), dest) // Test CopyTo with an empty Value in the destination - (*dest.orig)[0].Value = otlpcommon.AnyValue{} - generateTestMap().CopyTo(dest) - assert.EqualValues(t, generateTestMap(), dest) + (*dest.getOrig())[0].Value = otlpcommon.AnyValue{} + Map(internal.GenerateTestMap()).CopyTo(dest) + assert.EqualValues(t, Map(internal.GenerateTestMap()), dest) } func TestAttributeValue_copyTo(t *testing.T) { @@ -675,9 +675,7 @@ func TestMap_Update(t *testing.T) { Value: otlpcommon.AnyValue{Value: nil}, }, } - sm := Map{ - orig: &origWithNil, - } + sm := newMap(&origWithNil) av, exists := sm.Get("test_key") assert.True(t, exists) @@ -706,31 +704,31 @@ func TestMap_EnsureCapacity_Zero(t *testing.T) { am := NewMap() am.EnsureCapacity(0) assert.Equal(t, 0, am.Len()) - assert.Equal(t, 0, cap(*am.orig)) + assert.Equal(t, 0, cap(*am.getOrig())) } func TestMap_EnsureCapacity(t *testing.T) { am := NewMap() am.EnsureCapacity(5) assert.Equal(t, 0, am.Len()) - assert.Equal(t, 5, cap(*am.orig)) + assert.Equal(t, 5, cap(*am.getOrig())) am.EnsureCapacity(3) assert.Equal(t, 0, am.Len()) - assert.Equal(t, 5, cap(*am.orig)) + assert.Equal(t, 5, cap(*am.getOrig())) am.EnsureCapacity(8) assert.Equal(t, 0, am.Len()) - assert.Equal(t, 8, cap(*am.orig)) + assert.Equal(t, 8, cap(*am.getOrig())) } func TestMap_Clear(t *testing.T) { am := NewMap() - assert.Nil(t, *am.orig) + assert.Nil(t, *am.getOrig()) am.Clear() - assert.Nil(t, *am.orig) + assert.Nil(t, *am.getOrig()) am.EnsureCapacity(5) - assert.NotNil(t, *am.orig) + assert.NotNil(t, *am.getOrig()) am.Clear() - assert.Nil(t, *am.orig) + assert.Nil(t, *am.getOrig()) } func TestMap_RemoveIf(t *testing.T) { @@ -765,7 +763,7 @@ func BenchmarkAttributeValue_CopyTo(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - c.copyTo(av.orig) + c.copyTo(av.getOrig()) } if av.IntVal() != 123 { b.Fail() @@ -802,9 +800,7 @@ func BenchmarkMap_Range(b *testing.B) { Value: otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_StringValue{StringValue: "v" + strconv.Itoa(i)}}, } } - am := Map{ - orig: &rawOrig, - } + am := newMap(&rawOrig) b.ResetTimer() for n := 0; n < b.N; n++ { numEls := 0 @@ -829,7 +825,7 @@ func BenchmarkMap_RangeOverMap(b *testing.B) { for n := 0; n < b.N; n++ { numEls := 0 for _, v := range rawOrig { - if v.orig == nil { + if v.getOrig() == nil { continue } numEls++ @@ -904,28 +900,6 @@ func BenchmarkStringMap_RangeOverMap(b *testing.B) { } } -func fillTestValue(dest Value) { - dest.SetStringVal("v") -} - -func generateTestValue() Value { - av := NewValueEmpty() - fillTestValue(av) - return av -} - -func generateTestMap() Map { - am := NewMap() - fillTestMap(am) - return am -} - -func fillTestMap(dest Map) { - NewMapFromRaw(map[string]interface{}{ - "k": "v", - }).CopyTo(dest) -} - func generateTestEmptyMap() Map { return NewMapFromRaw(map[string]interface{}{ "k": nil, @@ -988,8 +962,8 @@ func TestAttributeValueSlice(t *testing.T) { assert.EqualValues(t, "somestr", v.StringVal()) // Test nil values case for SliceVal() func. - a1 = Value{orig: &otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_ArrayValue{ArrayValue: nil}}} - assert.EqualValues(t, Slice{}, a1.SliceVal()) + a1 = newValue(&otlpcommon.AnyValue{Value: &otlpcommon.AnyValue_ArrayValue{ArrayValue: nil}}) + assert.EqualValues(t, newSlice(nil), a1.SliceVal()) } func TestAttributeSliceWithNilValues(t *testing.T) { @@ -997,9 +971,7 @@ func TestAttributeSliceWithNilValues(t *testing.T) { {}, {Value: &otlpcommon.AnyValue_StringValue{StringValue: "test_value"}}, } - sm := Slice{ - orig: &origWithNil, - } + sm := newSlice(&origWithNil) val := sm.At(0) assert.EqualValues(t, ValueTypeEmpty, val.Type()) @@ -1323,7 +1295,7 @@ func TestNewValueFromRaw(t *testing.T) { name: "invalid value", input: ValueTypeDouble, expected: (func() Value { - return NewValueString("") + return NewValueString("") })(), }, } diff --git a/pdata/internal/generated_common.go b/pdata/pcommon/generated_common.go similarity index 76% rename from pdata/internal/generated_common.go rename to pdata/pcommon/generated_common.go index 08df2d6c412..7c5ddeaab0c 100644 --- a/pdata/internal/generated_common.go +++ b/pdata/pcommon/generated_common.go @@ -15,9 +15,10 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package pcommon import ( + "go.opentelemetry.io/collector/pdata/internal" otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1" ) @@ -28,12 +29,15 @@ import ( // // Must use NewInstrumentationScope function to create new instances. // Important: zero-initialized instance is not valid for use. -type InstrumentationScope struct { - orig *otlpcommon.InstrumentationScope -} + +type InstrumentationScope internal.InstrumentationScope func newInstrumentationScope(orig *otlpcommon.InstrumentationScope) InstrumentationScope { - return InstrumentationScope{orig: orig} + return InstrumentationScope(internal.NewInstrumentationScope(orig)) +} + +func (ms InstrumentationScope) getOrig() *otlpcommon.InstrumentationScope { + return internal.GetOrigInstrumentationScope(internal.InstrumentationScope(ms)) } // NewInstrumentationScope creates a new empty InstrumentationScope. @@ -47,43 +51,43 @@ func NewInstrumentationScope() InstrumentationScope { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms InstrumentationScope) MoveTo(dest InstrumentationScope) { - *dest.orig = *ms.orig - *ms.orig = otlpcommon.InstrumentationScope{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpcommon.InstrumentationScope{} } // Name returns the name associated with this InstrumentationScope. func (ms InstrumentationScope) Name() string { - return ms.orig.Name + return ms.getOrig().Name } // SetName replaces the name associated with this InstrumentationScope. func (ms InstrumentationScope) SetName(v string) { - ms.orig.Name = v + ms.getOrig().Name = v } // Version returns the version associated with this InstrumentationScope. func (ms InstrumentationScope) Version() string { - return ms.orig.Version + return ms.getOrig().Version } // SetVersion replaces the version associated with this InstrumentationScope. func (ms InstrumentationScope) SetVersion(v string) { - ms.orig.Version = v + ms.getOrig().Version = v } // Attributes returns the Attributes associated with this InstrumentationScope. func (ms InstrumentationScope) Attributes() Map { - return newMap(&ms.orig.Attributes) + return Map(internal.NewMap(&ms.getOrig().Attributes)) } // DroppedAttributesCount returns the droppedattributescount associated with this InstrumentationScope. func (ms InstrumentationScope) DroppedAttributesCount() uint32 { - return ms.orig.DroppedAttributesCount + return ms.getOrig().DroppedAttributesCount } // SetDroppedAttributesCount replaces the droppedattributescount associated with this InstrumentationScope. func (ms InstrumentationScope) SetDroppedAttributesCount(v uint32) { - ms.orig.DroppedAttributesCount = v + ms.getOrig().DroppedAttributesCount = v } // CopyTo copies all properties from the current struct to the dest. @@ -101,28 +105,28 @@ func (ms InstrumentationScope) CopyTo(dest InstrumentationScope) { // // Must use NewSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type Slice struct { - // orig points to the slice otlpcommon.AnyValue field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]otlpcommon.AnyValue -} +type Slice internal.Slice func newSlice(orig *[]otlpcommon.AnyValue) Slice { - return Slice{orig} + return Slice(internal.NewSlice(orig)) +} + +func (ms Slice) getOrig() *[]otlpcommon.AnyValue { + return internal.GetOrigSlice(internal.Slice(ms)) } // NewSlice creates a Slice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewSlice() Slice { orig := []otlpcommon.AnyValue(nil) - return Slice{&orig} + return Slice(internal.NewSlice(&orig)) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewSlice()". func (es Slice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -134,21 +138,21 @@ func (es Slice) Len() int { // ... // Do something with the element // } func (es Slice) At(ix int) Value { - return newValue(&(*es.orig)[ix]) + return newValue(&(*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es Slice) CopyTo(dest Slice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] } else { - (*dest.orig) = make([]otlpcommon.AnyValue, srcLen) + (*dest.getOrig()) = make([]otlpcommon.AnyValue, srcLen) } - for i := range *es.orig { - newValue(&(*es.orig)[i]).CopyTo(newValue(&(*dest.orig)[i])) + for i := range *es.getOrig() { + newValue(&(*es.getOrig())[i]).CopyTo(newValue(&(*dest.getOrig())[i])) } } @@ -165,40 +169,40 @@ func (es Slice) CopyTo(dest Slice) { // // Here should set all the values for e. // } func (es Slice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]otlpcommon.AnyValue, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]otlpcommon.AnyValue, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty Value. // It returns the newly added Value. func (es Slice) AppendEmpty() Value { - *es.orig = append(*es.orig, otlpcommon.AnyValue{}) + *es.getOrig() = append(*es.getOrig(), otlpcommon.AnyValue{}) return es.At(es.Len() - 1) } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es Slice) MoveAndAppendTo(dest Slice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es Slice) RemoveIf(f func(Value) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -207,9 +211,9 @@ func (es Slice) RemoveIf(f func(Value) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } diff --git a/pdata/pcommon/generated_common_alias.go b/pdata/pcommon/generated_common_alias.go deleted file mode 100644 index fd1425dfd23..00000000000 --- a/pdata/pcommon/generated_common_alias.go +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package pcommon - -import "go.opentelemetry.io/collector/pdata/internal" - -// InstrumentationScope is a message representing the instrumentation scope information. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewInstrumentationScope function to create new instances. -// Important: zero-initialized instance is not valid for use. -type InstrumentationScope = internal.InstrumentationScope - -// NewInstrumentationScope is an alias for a function to create a new empty InstrumentationScope. -var NewInstrumentationScope = internal.NewInstrumentationScope - -// Slice logically represents a slice of Value. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Slice = internal.Slice - -// NewSlice creates a Slice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewSlice = internal.NewSlice diff --git a/pdata/internal/generated_common_test.go b/pdata/pcommon/generated_common_test.go similarity index 52% rename from pdata/internal/generated_common_test.go rename to pdata/pcommon/generated_common_test.go index a7836fac435..ab221ac43e3 100644 --- a/pdata/internal/generated_common_test.go +++ b/pdata/pcommon/generated_common_test.go @@ -15,81 +15,78 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package pcommon import ( "testing" "github.com/stretchr/testify/assert" + "go.opentelemetry.io/collector/pdata/internal" otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1" ) func TestInstrumentationScope_MoveTo(t *testing.T) { - ms := generateTestInstrumentationScope() + ms := InstrumentationScope(internal.GenerateTestInstrumentationScope()) dest := NewInstrumentationScope() ms.MoveTo(dest) - assert.EqualValues(t, NewInstrumentationScope(), ms) - assert.EqualValues(t, generateTestInstrumentationScope(), dest) + assert.Equal(t, NewInstrumentationScope(), ms) + assert.Equal(t, InstrumentationScope(internal.GenerateTestInstrumentationScope()), dest) } func TestInstrumentationScope_CopyTo(t *testing.T) { ms := NewInstrumentationScope() orig := NewInstrumentationScope() orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestInstrumentationScope() + assert.Equal(t, orig, ms) + orig = InstrumentationScope(internal.GenerateTestInstrumentationScope()) orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) + assert.Equal(t, orig, ms) } func TestInstrumentationScope_Name(t *testing.T) { ms := NewInstrumentationScope() - assert.EqualValues(t, "", ms.Name()) - testValName := "test_name" - ms.SetName(testValName) - assert.EqualValues(t, testValName, ms.Name()) + assert.Equal(t, "", ms.Name()) + ms.SetName("test_name") + assert.Equal(t, "test_name", ms.Name()) } func TestInstrumentationScope_Version(t *testing.T) { ms := NewInstrumentationScope() - assert.EqualValues(t, "", ms.Version()) - testValVersion := "test_version" - ms.SetVersion(testValVersion) - assert.EqualValues(t, testValVersion, ms.Version()) + assert.Equal(t, "", ms.Version()) + ms.SetVersion("test_version") + assert.Equal(t, "test_version", ms.Version()) } func TestInstrumentationScope_Attributes(t *testing.T) { ms := NewInstrumentationScope() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) + assert.Equal(t, NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, Map(internal.GenerateTestMap()), ms.Attributes()) } func TestInstrumentationScope_DroppedAttributesCount(t *testing.T) { ms := NewInstrumentationScope() - assert.EqualValues(t, uint32(0), ms.DroppedAttributesCount()) - testValDroppedAttributesCount := uint32(17) - ms.SetDroppedAttributesCount(testValDroppedAttributesCount) - assert.EqualValues(t, testValDroppedAttributesCount, ms.DroppedAttributesCount()) + assert.Equal(t, uint32(0), ms.DroppedAttributesCount()) + ms.SetDroppedAttributesCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedAttributesCount()) } func TestSlice(t *testing.T) { es := NewSlice() - assert.EqualValues(t, 0, es.Len()) + assert.Equal(t, 0, es.Len()) es = newSlice(&[]otlpcommon.AnyValue{}) - assert.EqualValues(t, 0, es.Len()) + assert.Equal(t, 0, es.Len()) es.EnsureCapacity(7) emptyVal := newValue(&otlpcommon.AnyValue{}) - testVal := generateTestValue() - assert.EqualValues(t, 7, cap(*es.orig)) + testVal := Value(internal.GenerateTestValue()) + assert.Equal(t, 7, cap(*es.getOrig())) for i := 0; i < es.Len(); i++ { el := es.AppendEmpty() - assert.EqualValues(t, emptyVal, el) - fillTestValue(el) - assert.EqualValues(t, testVal, el) + assert.Equal(t, emptyVal, el) + internal.FillTestValue(internal.Value(el)) + assert.Equal(t, testVal, el) } } @@ -97,64 +94,64 @@ func TestSlice_CopyTo(t *testing.T) { dest := NewSlice() // Test CopyTo to empty NewSlice().CopyTo(dest) - assert.EqualValues(t, NewSlice(), dest) + assert.Equal(t, NewSlice(), dest) // Test CopyTo larger slice - generateTestSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSlice(), dest) + Slice(internal.GenerateTestSlice()).CopyTo(dest) + assert.Equal(t, Slice(internal.GenerateTestSlice()), dest) // Test CopyTo same size slice - generateTestSlice().CopyTo(dest) - assert.EqualValues(t, generateTestSlice(), dest) + Slice(internal.GenerateTestSlice()).CopyTo(dest) + assert.Equal(t, Slice(internal.GenerateTestSlice()), dest) } func TestSlice_EnsureCapacity(t *testing.T) { - es := generateTestSlice() + es := Slice(internal.GenerateTestSlice()) // Test ensure smaller capacity. const ensureSmallLen = 4 expectedEs := make(map[*otlpcommon.AnyValue]bool) for i := 0; i < es.Len(); i++ { - expectedEs[es.At(i).orig] = true + expectedEs[es.At(i).getOrig()] = true } assert.Equal(t, es.Len(), len(expectedEs)) es.EnsureCapacity(ensureSmallLen) assert.Less(t, ensureSmallLen, es.Len()) foundEs := make(map[*otlpcommon.AnyValue]bool, es.Len()) for i := 0; i < es.Len(); i++ { - foundEs[es.At(i).orig] = true + foundEs[es.At(i).getOrig()] = true } - assert.EqualValues(t, expectedEs, foundEs) + assert.Equal(t, expectedEs, foundEs) // Test ensure larger capacity const ensureLargeLen = 9 oldLen := es.Len() assert.Equal(t, oldLen, len(expectedEs)) es.EnsureCapacity(ensureLargeLen) - assert.Equal(t, ensureLargeLen, cap(*es.orig)) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) } func TestSlice_MoveAndAppendTo(t *testing.T) { // Test MoveAndAppendTo to empty - expectedSlice := generateTestSlice() + expectedSlice := Slice(internal.GenerateTestSlice()) dest := NewSlice() - src := generateTestSlice() + src := Slice(internal.GenerateTestSlice()) src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) + assert.Equal(t, Slice(internal.GenerateTestSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) // Test MoveAndAppendTo empty slice src.MoveAndAppendTo(dest) - assert.EqualValues(t, generateTestSlice(), dest) - assert.EqualValues(t, 0, src.Len()) - assert.EqualValues(t, expectedSlice.Len(), dest.Len()) + assert.Equal(t, Slice(internal.GenerateTestSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) // Test MoveAndAppendTo not empty slice - generateTestSlice().MoveAndAppendTo(dest) - assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) + Slice(internal.GenerateTestSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) for i := 0; i < expectedSlice.Len(); i++ { - assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) - assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) } } @@ -167,7 +164,7 @@ func TestSlice_RemoveIf(t *testing.T) { }) // Test RemoveIf - filtered := generateTestSlice() + filtered := Slice(internal.GenerateTestSlice()) pos := 0 filtered.RemoveIf(func(el Value) bool { pos++ @@ -175,30 +172,3 @@ func TestSlice_RemoveIf(t *testing.T) { }) assert.Equal(t, 5, filtered.Len()) } - -func generateTestInstrumentationScope() InstrumentationScope { - tv := NewInstrumentationScope() - fillTestInstrumentationScope(tv) - return tv -} - -func fillTestInstrumentationScope(tv InstrumentationScope) { - tv.SetName("test_name") - tv.SetVersion("test_version") - fillTestMap(tv.Attributes()) - tv.SetDroppedAttributesCount(uint32(17)) -} - -func generateTestSlice() Slice { - tv := NewSlice() - fillTestSlice(tv) - return tv -} - -func fillTestSlice(tv Slice) { - l := 7 - tv.EnsureCapacity(l) - for i := 0; i < l; i++ { - fillTestValue(tv.AppendEmpty()) - } -} diff --git a/pdata/pcommon/generated_immutable_slice.go b/pdata/pcommon/generated_immutable_slice.go new file mode 100644 index 00000000000..5c141c8dea5 --- /dev/null +++ b/pdata/pcommon/generated_immutable_slice.go @@ -0,0 +1,137 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package pcommon + +import "go.opentelemetry.io/collector/pdata/internal" + +// ImmutableByteSlice represents a []byte slice that cannot be mutated. +// The instance of ImmutableByteSlice can be assigned to multiple objects since it's immutable. +type ImmutableByteSlice internal.ImmutableByteSlice + +func (ms ImmutableByteSlice) getOrig() []byte { + return internal.GetOrigImmutableByteSlice(internal.ImmutableByteSlice(ms)) +} + +// NewImmutableByteSlice creates a new ImmutableByteSlice by copying the provided []byte slice. +func NewImmutableByteSlice(orig []byte) ImmutableByteSlice { + if len(orig) == 0 { + return ImmutableByteSlice(internal.NewImmutableByteSlice(nil)) + } + copyOrig := make([]byte, len(orig)) + copy(copyOrig, orig) + return ImmutableByteSlice(internal.NewImmutableByteSlice(copyOrig)) +} + +// AsRaw returns a copy of the []byte slice. +func (ms ImmutableByteSlice) AsRaw() []byte { + orig := ms.getOrig() + if len(orig) == 0 { + return nil + } + copyOrig := make([]byte, len(orig)) + copy(copyOrig, orig) + return copyOrig +} + +// Len returns length of the []byte slice value. +func (ms ImmutableByteSlice) Len() int { + return len(ms.getOrig()) +} + +// At returns an item from particular index. +func (ms ImmutableByteSlice) At(i int) byte { + return ms.getOrig()[i] +} + +// ImmutableFloat64Slice represents a []float64 slice that cannot be mutated. +// The instance of ImmutableFloat64Slice can be assigned to multiple objects since it's immutable. +type ImmutableFloat64Slice internal.ImmutableFloat64Slice + +func (ms ImmutableFloat64Slice) getOrig() []float64 { + return internal.GetOrigImmutableFloat64Slice(internal.ImmutableFloat64Slice(ms)) +} + +// NewImmutableFloat64Slice creates a new ImmutableFloat64Slice by copying the provided []float64 slice. +func NewImmutableFloat64Slice(orig []float64) ImmutableFloat64Slice { + if len(orig) == 0 { + return ImmutableFloat64Slice(internal.NewImmutableFloat64Slice(nil)) + } + copyOrig := make([]float64, len(orig)) + copy(copyOrig, orig) + return ImmutableFloat64Slice(internal.NewImmutableFloat64Slice(copyOrig)) +} + +// AsRaw returns a copy of the []float64 slice. +func (ms ImmutableFloat64Slice) AsRaw() []float64 { + orig := ms.getOrig() + if len(orig) == 0 { + return nil + } + copyOrig := make([]float64, len(orig)) + copy(copyOrig, orig) + return copyOrig +} + +// Len returns length of the []float64 slice value. +func (ms ImmutableFloat64Slice) Len() int { + return len(ms.getOrig()) +} + +// At returns an item from particular index. +func (ms ImmutableFloat64Slice) At(i int) float64 { + return ms.getOrig()[i] +} + +// ImmutableUInt64Slice represents a []uint64 slice that cannot be mutated. +// The instance of ImmutableUInt64Slice can be assigned to multiple objects since it's immutable. +type ImmutableUInt64Slice internal.ImmutableUInt64Slice + +func (ms ImmutableUInt64Slice) getOrig() []uint64 { + return internal.GetOrigImmutableUInt64Slice(internal.ImmutableUInt64Slice(ms)) +} + +// NewImmutableUInt64Slice creates a new ImmutableUInt64Slice by copying the provided []uint64 slice. +func NewImmutableUInt64Slice(orig []uint64) ImmutableUInt64Slice { + if len(orig) == 0 { + return ImmutableUInt64Slice(internal.NewImmutableUInt64Slice(nil)) + } + copyOrig := make([]uint64, len(orig)) + copy(copyOrig, orig) + return ImmutableUInt64Slice(internal.NewImmutableUInt64Slice(copyOrig)) +} + +// AsRaw returns a copy of the []uint64 slice. +func (ms ImmutableUInt64Slice) AsRaw() []uint64 { + orig := ms.getOrig() + if len(orig) == 0 { + return nil + } + copyOrig := make([]uint64, len(orig)) + copy(copyOrig, orig) + return copyOrig +} + +// Len returns length of the []uint64 slice value. +func (ms ImmutableUInt64Slice) Len() int { + return len(ms.getOrig()) +} + +// At returns an item from particular index. +func (ms ImmutableUInt64Slice) At(i int) uint64 { + return ms.getOrig()[i] +} diff --git a/pdata/pcommon/generated_immutable_slice_alias.go b/pdata/pcommon/generated_immutable_slice_alias.go deleted file mode 100644 index 19e6975735f..00000000000 --- a/pdata/pcommon/generated_immutable_slice_alias.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package pcommon - -import "go.opentelemetry.io/collector/pdata/internal" - -// ImmutableByteSlice represents a []byte slice that cannot be mutated. -type ImmutableByteSlice = internal.ImmutableByteSlice - -// NewImmutableByteSlice creates a new ImmutableByteSlice by copying the provided []byte slice. -var NewImmutableByteSlice = internal.NewImmutableByteSlice - -// ImmutableFloat64Slice represents a []float64 slice that cannot be mutated. -type ImmutableFloat64Slice = internal.ImmutableFloat64Slice - -// NewImmutableFloat64Slice creates a new ImmutableFloat64Slice by copying the provided []float64 slice. -var NewImmutableFloat64Slice = internal.NewImmutableFloat64Slice - -// ImmutableUInt64Slice represents a []uint64 slice that cannot be mutated. -type ImmutableUInt64Slice = internal.ImmutableUInt64Slice - -// NewImmutableUInt64Slice creates a new ImmutableUInt64Slice by copying the provided []uint64 slice. -var NewImmutableUInt64Slice = internal.NewImmutableUInt64Slice diff --git a/pdata/internal/generated_immutable_slice_test.go b/pdata/pcommon/generated_immutable_slice_test.go similarity index 94% rename from pdata/internal/generated_immutable_slice_test.go rename to pdata/pcommon/generated_immutable_slice_test.go index 4479d57e25e..c42f8ae0443 100644 --- a/pdata/internal/generated_immutable_slice_test.go +++ b/pdata/pcommon/generated_immutable_slice_test.go @@ -15,7 +15,7 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package pcommon import ( "testing" @@ -50,7 +50,7 @@ func TestNewImmutableByteSlice(t *testing.T) { t.Run(tt.name, func(t *testing.T) { s := NewImmutableByteSlice(tt.orig) assert.Equal(t, tt.want, s.AsRaw()) - assert.Equal(t, len(s.value), s.Len()) + assert.Equal(t, len(s.getOrig()), s.Len()) if len(tt.orig) > 0 { // verify that orig mutation doesn't have any effect tt.orig[0] = byte(0) @@ -87,7 +87,7 @@ func TestNewImmutableFloat64Slice(t *testing.T) { t.Run(tt.name, func(t *testing.T) { s := NewImmutableFloat64Slice(tt.orig) assert.Equal(t, tt.want, s.AsRaw()) - assert.Equal(t, len(s.value), s.Len()) + assert.Equal(t, len(s.getOrig()), s.Len()) if len(tt.orig) > 0 { // verify that orig mutation doesn't have any effect tt.orig[0] = float64(0) @@ -124,7 +124,7 @@ func TestNewImmutableUInt64Slice(t *testing.T) { t.Run(tt.name, func(t *testing.T) { s := NewImmutableUInt64Slice(tt.orig) assert.Equal(t, tt.want, s.AsRaw()) - assert.Equal(t, len(s.value), s.Len()) + assert.Equal(t, len(s.getOrig()), s.Len()) if len(tt.orig) > 0 { // verify that orig mutation doesn't have any effect tt.orig[0] = uint64(0) diff --git a/pdata/internal/generated_resource.go b/pdata/pcommon/generated_resource.go similarity index 83% rename from pdata/internal/generated_resource.go rename to pdata/pcommon/generated_resource.go index 56b372e5214..806934ed206 100644 --- a/pdata/internal/generated_resource.go +++ b/pdata/pcommon/generated_resource.go @@ -15,9 +15,10 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package pcommon import ( + "go.opentelemetry.io/collector/pdata/internal" otlpresource "go.opentelemetry.io/collector/pdata/internal/data/protogen/resource/v1" ) @@ -28,12 +29,15 @@ import ( // // Must use NewResource function to create new instances. // Important: zero-initialized instance is not valid for use. -type Resource struct { - orig *otlpresource.Resource -} + +type Resource internal.Resource func newResource(orig *otlpresource.Resource) Resource { - return Resource{orig: orig} + return Resource(internal.NewResource(orig)) +} + +func (ms Resource) getOrig() *otlpresource.Resource { + return internal.GetOrigResource(internal.Resource(ms)) } // NewResource creates a new empty Resource. @@ -47,23 +51,23 @@ func NewResource() Resource { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Resource) MoveTo(dest Resource) { - *dest.orig = *ms.orig - *ms.orig = otlpresource.Resource{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpresource.Resource{} } // Attributes returns the Attributes associated with this Resource. func (ms Resource) Attributes() Map { - return newMap(&ms.orig.Attributes) + return Map(internal.NewMap(&ms.getOrig().Attributes)) } // DroppedAttributesCount returns the droppedattributescount associated with this Resource. func (ms Resource) DroppedAttributesCount() uint32 { - return ms.orig.DroppedAttributesCount + return ms.getOrig().DroppedAttributesCount } // SetDroppedAttributesCount replaces the droppedattributescount associated with this Resource. func (ms Resource) SetDroppedAttributesCount(v uint32) { - ms.orig.DroppedAttributesCount = v + ms.getOrig().DroppedAttributesCount = v } // CopyTo copies all properties from the current struct to the dest. diff --git a/pdata/internal/generated_resource_test.go b/pdata/pcommon/generated_resource_test.go similarity index 57% rename from pdata/internal/generated_resource_test.go rename to pdata/pcommon/generated_resource_test.go index d1c606cdf0d..30710081bd6 100644 --- a/pdata/internal/generated_resource_test.go +++ b/pdata/pcommon/generated_resource_test.go @@ -15,55 +15,44 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package pcommon import ( "testing" "github.com/stretchr/testify/assert" + + "go.opentelemetry.io/collector/pdata/internal" ) func TestResource_MoveTo(t *testing.T) { - ms := generateTestResource() + ms := Resource(internal.GenerateTestResource()) dest := NewResource() ms.MoveTo(dest) - assert.EqualValues(t, NewResource(), ms) - assert.EqualValues(t, generateTestResource(), dest) + assert.Equal(t, NewResource(), ms) + assert.Equal(t, Resource(internal.GenerateTestResource()), dest) } func TestResource_CopyTo(t *testing.T) { ms := NewResource() orig := NewResource() orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) - orig = generateTestResource() + assert.Equal(t, orig, ms) + orig = Resource(internal.GenerateTestResource()) orig.CopyTo(ms) - assert.EqualValues(t, orig, ms) + assert.Equal(t, orig, ms) } func TestResource_Attributes(t *testing.T) { ms := NewResource() - assert.EqualValues(t, NewMap(), ms.Attributes()) - fillTestMap(ms.Attributes()) - testValAttributes := generateTestMap() - assert.EqualValues(t, testValAttributes, ms.Attributes()) + assert.Equal(t, NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, Map(internal.GenerateTestMap()), ms.Attributes()) } func TestResource_DroppedAttributesCount(t *testing.T) { ms := NewResource() - assert.EqualValues(t, uint32(0), ms.DroppedAttributesCount()) - testValDroppedAttributesCount := uint32(17) - ms.SetDroppedAttributesCount(testValDroppedAttributesCount) - assert.EqualValues(t, testValDroppedAttributesCount, ms.DroppedAttributesCount()) -} - -func generateTestResource() Resource { - tv := NewResource() - fillTestResource(tv) - return tv -} - -func fillTestResource(tv Resource) { - fillTestMap(tv.Attributes()) - tv.SetDroppedAttributesCount(uint32(17)) + assert.Equal(t, uint32(0), ms.DroppedAttributesCount()) + ms.SetDroppedAttributesCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedAttributesCount()) } diff --git a/pdata/internal/spanid.go b/pdata/pcommon/spanid.go similarity index 67% rename from pdata/internal/spanid.go rename to pdata/pcommon/spanid.go index 2ac539e1129..a7497268300 100644 --- a/pdata/internal/spanid.go +++ b/pdata/pcommon/spanid.go @@ -12,38 +12,41 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal // import "go.opentelemetry.io/collector/pdata/internal" +package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" import ( + "go.opentelemetry.io/collector/pdata/internal" "go.opentelemetry.io/collector/pdata/internal/data" ) // SpanID is span identifier. -type SpanID struct { - orig data.SpanID +type SpanID internal.SpanID + +func (ms SpanID) getOrig() data.SpanID { + return internal.GetOrigSpanID(internal.SpanID(ms)) } // InvalidSpanID returns an empty (all zero bytes) SpanID. func InvalidSpanID() SpanID { - return SpanID{orig: data.NewSpanID([8]byte{})} + return NewSpanID([8]byte{}) } // NewSpanID returns a new SpanID from the given byte array. func NewSpanID(bytes [8]byte) SpanID { - return SpanID{orig: data.NewSpanID(bytes)} + return SpanID(internal.NewSpanID(data.NewSpanID(bytes))) } // Bytes returns the byte array representation of the SpanID. -func (t SpanID) Bytes() [8]byte { - return t.orig.Bytes() +func (ms SpanID) Bytes() [8]byte { + return ms.getOrig().Bytes() } // HexString returns hex representation of the SpanID. -func (t SpanID) HexString() string { - return t.orig.HexString() +func (ms SpanID) HexString() string { + return ms.getOrig().HexString() } // IsEmpty returns true if id doesn't contain at least one non-zero byte. -func (t SpanID) IsEmpty() bool { - return t.orig.IsEmpty() +func (ms SpanID) IsEmpty() bool { + return ms.getOrig().IsEmpty() } diff --git a/pdata/internal/spanid_test.go b/pdata/pcommon/spanid_test.go similarity index 98% rename from pdata/internal/spanid_test.go rename to pdata/pcommon/spanid_test.go index 2ad00db3658..d4c4c931658 100644 --- a/pdata/internal/spanid_test.go +++ b/pdata/pcommon/spanid_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal +package pcommon import ( "testing" diff --git a/pdata/internal/timestamp.go b/pdata/pcommon/timestamp.go similarity index 94% rename from pdata/internal/timestamp.go rename to pdata/pcommon/timestamp.go index 01597c3649c..78eb54eca71 100644 --- a/pdata/internal/timestamp.go +++ b/pdata/pcommon/timestamp.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal // import "go.opentelemetry.io/collector/pdata/internal" +package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" import ( "time" diff --git a/pdata/pcommon/timestamp_alias.go b/pdata/pcommon/timestamp_alias.go deleted file mode 100644 index 1d5e81ce89e..00000000000 --- a/pdata/pcommon/timestamp_alias.go +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" - -import "go.opentelemetry.io/collector/pdata/internal" - -// Timestamp is a time specified as UNIX Epoch time in nanoseconds since -// 1970-01-01 00:00:00 +0000 UTC. -type Timestamp = internal.Timestamp - -// NewTimestampFromTime constructs a new Timestamp from the provided time.Time. -var NewTimestampFromTime = internal.NewTimestampFromTime diff --git a/pdata/internal/timestamp_test.go b/pdata/pcommon/timestamp_test.go similarity index 98% rename from pdata/internal/timestamp_test.go rename to pdata/pcommon/timestamp_test.go index 9dd762b20f4..123ab425617 100644 --- a/pdata/internal/timestamp_test.go +++ b/pdata/pcommon/timestamp_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal +package pcommon import ( "testing" diff --git a/pdata/internal/traceid.go b/pdata/pcommon/traceid.go similarity index 67% rename from pdata/internal/traceid.go rename to pdata/pcommon/traceid.go index 2d6365f2755..62ec4e25c62 100644 --- a/pdata/internal/traceid.go +++ b/pdata/pcommon/traceid.go @@ -12,38 +12,41 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal // import "go.opentelemetry.io/collector/pdata/internal" +package pcommon // import "go.opentelemetry.io/collector/pdata/pcommon" import ( + "go.opentelemetry.io/collector/pdata/internal" "go.opentelemetry.io/collector/pdata/internal/data" ) // TraceID is a trace identifier. -type TraceID struct { - orig data.TraceID +type TraceID internal.TraceID + +func (ms TraceID) getOrig() data.TraceID { + return internal.GetOrigTraceID(internal.TraceID(ms)) } // InvalidTraceID returns an empty (all zero bytes) TraceID. func InvalidTraceID() TraceID { - return TraceID{orig: data.NewTraceID([16]byte{})} + return NewTraceID([16]byte{}) } // NewTraceID returns a new TraceID from the given byte array. func NewTraceID(bytes [16]byte) TraceID { - return TraceID{orig: data.NewTraceID(bytes)} + return TraceID(internal.NewTraceID(data.NewTraceID(bytes))) } // Bytes returns the byte array representation of the TraceID. -func (t TraceID) Bytes() [16]byte { - return t.orig.Bytes() +func (ms TraceID) Bytes() [16]byte { + return ms.getOrig().Bytes() } // HexString returns hex representation of the TraceID. -func (t TraceID) HexString() string { - return t.orig.HexString() +func (ms TraceID) HexString() string { + return ms.getOrig().HexString() } // IsEmpty returns true if id doesn't contain at least one non-zero byte. -func (t TraceID) IsEmpty() bool { - return t.orig.IsEmpty() +func (ms TraceID) IsEmpty() bool { + return ms.getOrig().IsEmpty() } diff --git a/pdata/internal/traceid_test.go b/pdata/pcommon/traceid_test.go similarity index 98% rename from pdata/internal/traceid_test.go rename to pdata/pcommon/traceid_test.go index 617a419099f..bd0d606488e 100644 --- a/pdata/internal/traceid_test.go +++ b/pdata/pcommon/traceid_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal +package pcommon import ( "testing" diff --git a/pdata/plog/alias.go b/pdata/plog/alias.go deleted file mode 100644 index 90aed13bc09..00000000000 --- a/pdata/plog/alias.go +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package plog // import "go.opentelemetry.io/collector/pdata/plog" - -// This file contains aliases for logs data structures. - -import "go.opentelemetry.io/collector/pdata/internal" - -// Logs is the top-level struct that is propagated through the logs pipeline. -// Use NewLogs to create new instance, zero-initialized instance is not valid for use. -type Logs = internal.Logs - -// NewLogs creates a new Logs struct. -var NewLogs = internal.NewLogs - -// SeverityNumber represents severity number of a log record. -type SeverityNumber = internal.SeverityNumber - -const ( - SeverityNumberUndefined = internal.SeverityNumberUndefined - SeverityNumberTrace = internal.SeverityNumberTrace - SeverityNumberTrace2 = internal.SeverityNumberTrace2 - SeverityNumberTrace3 = internal.SeverityNumberTrace3 - SeverityNumberTrace4 = internal.SeverityNumberTrace4 - SeverityNumberDebug = internal.SeverityNumberDebug - SeverityNumberDebug2 = internal.SeverityNumberDebug2 - SeverityNumberDebug3 = internal.SeverityNumberDebug3 - SeverityNumberDebug4 = internal.SeverityNumberDebug4 - SeverityNumberInfo = internal.SeverityNumberInfo - SeverityNumberInfo2 = internal.SeverityNumberInfo2 - SeverityNumberInfo3 = internal.SeverityNumberInfo3 - SeverityNumberInfo4 = internal.SeverityNumberInfo4 - SeverityNumberWarn = internal.SeverityNumberWarn - SeverityNumberWarn2 = internal.SeverityNumberWarn2 - SeverityNumberWarn3 = internal.SeverityNumberWarn3 - SeverityNumberWarn4 = internal.SeverityNumberWarn4 - SeverityNumberError = internal.SeverityNumberError - SeverityNumberError2 = internal.SeverityNumberError2 - SeverityNumberError3 = internal.SeverityNumberError3 - SeverityNumberError4 = internal.SeverityNumberError4 - SeverityNumberFatal = internal.SeverityNumberFatal - SeverityNumberFatal2 = internal.SeverityNumberFatal2 - SeverityNumberFatal3 = internal.SeverityNumberFatal3 - SeverityNumberFatal4 = internal.SeverityNumberFatal4 -) - -const ( - // Deprecated: [0.59.0] Use SeverityNumberUndefined instead - SeverityNumberUNDEFINED = SeverityNumberUndefined - - // Deprecated: [0.59.0] Use SeverityNumberTrace instead - SeverityNumberTRACE = SeverityNumberTrace - - // Deprecated: [0.59.0] Use SeverityNumberTrace2 instead - SeverityNumberTRACE2 = SeverityNumberTrace2 - - // Deprecated: [0.59.0] Use SeverityNumberTrace3 instead - SeverityNumberTRACE3 = SeverityNumberTrace3 - - // Deprecated: [0.59.0] Use SeverityNumberTrace4 instead - SeverityNumberTRACE4 = SeverityNumberTrace4 - - // Deprecated: [0.59.0] Use SeverityNumberDebug instead - SeverityNumberDEBUG = SeverityNumberDebug - - // Deprecated: [0.59.0] Use SeverityNumberDebug2 instead - SeverityNumberDEBUG2 = SeverityNumberDebug2 - - // Deprecated: [0.59.0] Use SeverityNumberDebug3 instead - SeverityNumberDEBUG3 = SeverityNumberDebug3 - - // Deprecated: [0.59.0] Use SeverityNumberDebug4 instead - SeverityNumberDEBUG4 = SeverityNumberDebug4 - - // Deprecated: [0.59.0] Use SeverityNumberInfo instead - SeverityNumberINFO = SeverityNumberInfo - - // Deprecated: [0.59.0] Use SeverityNumberInfo2 instead - SeverityNumberINFO2 = SeverityNumberInfo2 - - // Deprecated: [0.59.0] Use SeverityNumberInfo3 instead - SeverityNumberINFO3 = SeverityNumberInfo3 - - // Deprecated: [0.59.0] Use SeverityNumberInfo4 instead - SeverityNumberINFO4 = SeverityNumberInfo4 - - // Deprecated: [0.59.0] Use SeverityNumberWarn instead - SeverityNumberWARN = SeverityNumberWarn - - // Deprecated: [0.59.0] Use SeverityNumberWarn2 instead - SeverityNumberWARN2 = SeverityNumberWarn2 - - // Deprecated: [0.59.0] Use SeverityNumberWarn3 instead - SeverityNumberWARN3 = SeverityNumberWarn3 - - // Deprecated: [0.59.0] Use SeverityNumberWarn4 instead - SeverityNumberWARN4 = SeverityNumberWarn4 - - // Deprecated: [0.59.0] Use SeverityNumberError instead - SeverityNumberERROR = SeverityNumberError - - // Deprecated: [0.59.0] Use SeverityNumberError2 instead - SeverityNumberERROR2 = SeverityNumberError2 - - // Deprecated: [0.59.0] Use SeverityNumberError3 instead - SeverityNumberERROR3 = SeverityNumberError3 - - // Deprecated: [0.59.0] Use SeverityNumberError4 instead - SeverityNumberERROR4 = SeverityNumberError4 - - // Deprecated: [0.59.0] Use SeverityNumberFatal instead - SeverityNumberFATAL = SeverityNumberFatal - - // Deprecated: [0.59.0] Use SeverityNumberFatal2 instead - SeverityNumberFATAL2 = SeverityNumberFatal2 - - // Deprecated: [0.59.0] Use SeverityNumberFatal3 instead - SeverityNumberFATAL3 = SeverityNumberFatal3 - - // Deprecated: [0.59.0] Use SeverityNumberFatal4 instead - SeverityNumberFATAL4 = SeverityNumberFatal4 -) diff --git a/pdata/plog/generated_alias.go b/pdata/plog/generated_alias.go deleted file mode 100644 index e9d9fdb8ba3..00000000000 --- a/pdata/plog/generated_alias.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package plog - -import "go.opentelemetry.io/collector/pdata/internal" - -// ResourceLogsSlice logically represents a slice of ResourceLogs. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewResourceLogsSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ResourceLogsSlice = internal.ResourceLogsSlice - -// NewResourceLogsSlice creates a ResourceLogsSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewResourceLogsSlice = internal.NewResourceLogsSlice - -// ResourceLogs is a collection of logs from a Resource. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewResourceLogs function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ResourceLogs = internal.ResourceLogs - -// NewResourceLogs is an alias for a function to create a new empty ResourceLogs. -var NewResourceLogs = internal.NewResourceLogs - -// ScopeLogsSlice logically represents a slice of ScopeLogs. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewScopeLogsSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ScopeLogsSlice = internal.ScopeLogsSlice - -// NewScopeLogsSlice creates a ScopeLogsSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewScopeLogsSlice = internal.NewScopeLogsSlice - -// ScopeLogs is a collection of logs from a LibraryInstrumentation. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewScopeLogs function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ScopeLogs = internal.ScopeLogs - -// NewScopeLogs is an alias for a function to create a new empty ScopeLogs. -var NewScopeLogs = internal.NewScopeLogs - -// LogRecordSlice logically represents a slice of LogRecord. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewLogRecordSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type LogRecordSlice = internal.LogRecordSlice - -// NewLogRecordSlice creates a LogRecordSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewLogRecordSlice = internal.NewLogRecordSlice - -// LogRecord are experimental implementation of OpenTelemetry Log Data Model. - -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewLogRecord function to create new instances. -// Important: zero-initialized instance is not valid for use. -type LogRecord = internal.LogRecord - -// NewLogRecord is an alias for a function to create a new empty LogRecord. -var NewLogRecord = internal.NewLogRecord diff --git a/pdata/internal/generated_plog.go b/pdata/plog/generated_logs.go similarity index 70% rename from pdata/internal/generated_plog.go rename to pdata/plog/generated_logs.go index 7969e4b5f39..1f6d930fedb 100644 --- a/pdata/internal/generated_plog.go +++ b/pdata/plog/generated_logs.go @@ -15,12 +15,14 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package plog import ( "sort" + "go.opentelemetry.io/collector/pdata/internal" otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1" + "go.opentelemetry.io/collector/pdata/pcommon" ) // ResourceLogsSlice logically represents a slice of ResourceLogs. @@ -30,28 +32,28 @@ import ( // // Must use NewResourceLogsSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ResourceLogsSlice struct { - // orig points to the slice otlplogs.ResourceLogs field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlplogs.ResourceLogs -} +type ResourceLogsSlice internal.ResourceLogsSlice func newResourceLogsSlice(orig *[]*otlplogs.ResourceLogs) ResourceLogsSlice { - return ResourceLogsSlice{orig} + return ResourceLogsSlice(internal.NewResourceLogsSlice(orig)) +} + +func (ms ResourceLogsSlice) getOrig() *[]*otlplogs.ResourceLogs { + return internal.GetOrigResourceLogsSlice(internal.ResourceLogsSlice(ms)) } // NewResourceLogsSlice creates a ResourceLogsSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewResourceLogsSlice() ResourceLogsSlice { orig := []*otlplogs.ResourceLogs(nil) - return ResourceLogsSlice{&orig} + return newResourceLogsSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewResourceLogsSlice()". func (es ResourceLogsSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -63,27 +65,27 @@ func (es ResourceLogsSlice) Len() int { // ... // Do something with the element // } func (es ResourceLogsSlice) At(ix int) ResourceLogs { - return newResourceLogs((*es.orig)[ix]) + return newResourceLogs((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ResourceLogsSlice) CopyTo(dest ResourceLogsSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newResourceLogs((*es.orig)[i]).CopyTo(newResourceLogs((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newResourceLogs((*es.getOrig())[i]).CopyTo(newResourceLogs((*dest.getOrig())[i])) } return } origs := make([]otlplogs.ResourceLogs, srcLen) wrappers := make([]*otlplogs.ResourceLogs, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newResourceLogs((*es.orig)[i]).CopyTo(newResourceLogs(wrappers[i])) + newResourceLogs((*es.getOrig())[i]).CopyTo(newResourceLogs(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -99,20 +101,20 @@ func (es ResourceLogsSlice) CopyTo(dest ResourceLogsSlice) { // // Here should set all the values for e. // } func (es ResourceLogsSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlplogs.ResourceLogs, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlplogs.ResourceLogs, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ResourceLogs. // It returns the newly added ResourceLogs. func (es ResourceLogsSlice) AppendEmpty() ResourceLogs { - *es.orig = append(*es.orig, &otlplogs.ResourceLogs{}) + *es.getOrig() = append(*es.getOrig(), &otlplogs.ResourceLogs{}) return es.At(es.Len() - 1) } @@ -125,29 +127,29 @@ func (es ResourceLogsSlice) AppendEmpty() ResourceLogs { // lessFunc := func(a, b ResourceLogs) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ResourceLogsSlice) Sort(less func(a, b ResourceLogs) bool) ResourceLogsSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ResourceLogsSlice) MoveAndAppendTo(dest ResourceLogsSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ResourceLogsSlice) RemoveIf(f func(ResourceLogs) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -156,11 +158,11 @@ func (es ResourceLogsSlice) RemoveIf(f func(ResourceLogs) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ResourceLogs is a collection of logs from a Resource. @@ -170,12 +172,15 @@ func (es ResourceLogsSlice) RemoveIf(f func(ResourceLogs) bool) { // // Must use NewResourceLogs function to create new instances. // Important: zero-initialized instance is not valid for use. -type ResourceLogs struct { - orig *otlplogs.ResourceLogs -} + +type ResourceLogs internal.ResourceLogs func newResourceLogs(orig *otlplogs.ResourceLogs) ResourceLogs { - return ResourceLogs{orig: orig} + return ResourceLogs(internal.NewResourceLogs(orig)) +} + +func (ms ResourceLogs) getOrig() *otlplogs.ResourceLogs { + return internal.GetOrigResourceLogs(internal.ResourceLogs(ms)) } // NewResourceLogs creates a new empty ResourceLogs. @@ -189,28 +194,28 @@ func NewResourceLogs() ResourceLogs { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ResourceLogs) MoveTo(dest ResourceLogs) { - *dest.orig = *ms.orig - *ms.orig = otlplogs.ResourceLogs{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlplogs.ResourceLogs{} } // Resource returns the resource associated with this ResourceLogs. -func (ms ResourceLogs) Resource() Resource { - return newResource(&ms.orig.Resource) +func (ms ResourceLogs) Resource() pcommon.Resource { + return pcommon.Resource(internal.NewResource(&ms.getOrig().Resource)) } // SchemaUrl returns the schemaurl associated with this ResourceLogs. func (ms ResourceLogs) SchemaUrl() string { - return ms.orig.SchemaUrl + return ms.getOrig().SchemaUrl } // SetSchemaUrl replaces the schemaurl associated with this ResourceLogs. func (ms ResourceLogs) SetSchemaUrl(v string) { - ms.orig.SchemaUrl = v + ms.getOrig().SchemaUrl = v } // ScopeLogs returns the ScopeLogs associated with this ResourceLogs. func (ms ResourceLogs) ScopeLogs() ScopeLogsSlice { - return newScopeLogsSlice(&ms.orig.ScopeLogs) + return ScopeLogsSlice(internal.NewScopeLogsSlice(&ms.getOrig().ScopeLogs)) } // CopyTo copies all properties from the current struct to the dest. @@ -227,28 +232,28 @@ func (ms ResourceLogs) CopyTo(dest ResourceLogs) { // // Must use NewScopeLogsSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ScopeLogsSlice struct { - // orig points to the slice otlplogs.ScopeLogs field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlplogs.ScopeLogs -} +type ScopeLogsSlice internal.ScopeLogsSlice func newScopeLogsSlice(orig *[]*otlplogs.ScopeLogs) ScopeLogsSlice { - return ScopeLogsSlice{orig} + return ScopeLogsSlice(internal.NewScopeLogsSlice(orig)) +} + +func (ms ScopeLogsSlice) getOrig() *[]*otlplogs.ScopeLogs { + return internal.GetOrigScopeLogsSlice(internal.ScopeLogsSlice(ms)) } // NewScopeLogsSlice creates a ScopeLogsSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewScopeLogsSlice() ScopeLogsSlice { orig := []*otlplogs.ScopeLogs(nil) - return ScopeLogsSlice{&orig} + return newScopeLogsSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewScopeLogsSlice()". func (es ScopeLogsSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -260,27 +265,27 @@ func (es ScopeLogsSlice) Len() int { // ... // Do something with the element // } func (es ScopeLogsSlice) At(ix int) ScopeLogs { - return newScopeLogs((*es.orig)[ix]) + return newScopeLogs((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ScopeLogsSlice) CopyTo(dest ScopeLogsSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newScopeLogs((*es.orig)[i]).CopyTo(newScopeLogs((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newScopeLogs((*es.getOrig())[i]).CopyTo(newScopeLogs((*dest.getOrig())[i])) } return } origs := make([]otlplogs.ScopeLogs, srcLen) wrappers := make([]*otlplogs.ScopeLogs, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newScopeLogs((*es.orig)[i]).CopyTo(newScopeLogs(wrappers[i])) + newScopeLogs((*es.getOrig())[i]).CopyTo(newScopeLogs(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -296,20 +301,20 @@ func (es ScopeLogsSlice) CopyTo(dest ScopeLogsSlice) { // // Here should set all the values for e. // } func (es ScopeLogsSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlplogs.ScopeLogs, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlplogs.ScopeLogs, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ScopeLogs. // It returns the newly added ScopeLogs. func (es ScopeLogsSlice) AppendEmpty() ScopeLogs { - *es.orig = append(*es.orig, &otlplogs.ScopeLogs{}) + *es.getOrig() = append(*es.getOrig(), &otlplogs.ScopeLogs{}) return es.At(es.Len() - 1) } @@ -322,29 +327,29 @@ func (es ScopeLogsSlice) AppendEmpty() ScopeLogs { // lessFunc := func(a, b ScopeLogs) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ScopeLogsSlice) Sort(less func(a, b ScopeLogs) bool) ScopeLogsSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ScopeLogsSlice) MoveAndAppendTo(dest ScopeLogsSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ScopeLogsSlice) RemoveIf(f func(ScopeLogs) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -353,11 +358,11 @@ func (es ScopeLogsSlice) RemoveIf(f func(ScopeLogs) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ScopeLogs is a collection of logs from a LibraryInstrumentation. @@ -367,12 +372,15 @@ func (es ScopeLogsSlice) RemoveIf(f func(ScopeLogs) bool) { // // Must use NewScopeLogs function to create new instances. // Important: zero-initialized instance is not valid for use. -type ScopeLogs struct { - orig *otlplogs.ScopeLogs -} + +type ScopeLogs internal.ScopeLogs func newScopeLogs(orig *otlplogs.ScopeLogs) ScopeLogs { - return ScopeLogs{orig: orig} + return ScopeLogs(internal.NewScopeLogs(orig)) +} + +func (ms ScopeLogs) getOrig() *otlplogs.ScopeLogs { + return internal.GetOrigScopeLogs(internal.ScopeLogs(ms)) } // NewScopeLogs creates a new empty ScopeLogs. @@ -386,28 +394,28 @@ func NewScopeLogs() ScopeLogs { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ScopeLogs) MoveTo(dest ScopeLogs) { - *dest.orig = *ms.orig - *ms.orig = otlplogs.ScopeLogs{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlplogs.ScopeLogs{} } // Scope returns the scope associated with this ScopeLogs. -func (ms ScopeLogs) Scope() InstrumentationScope { - return newInstrumentationScope(&ms.orig.Scope) +func (ms ScopeLogs) Scope() pcommon.InstrumentationScope { + return pcommon.InstrumentationScope(internal.NewInstrumentationScope(&ms.getOrig().Scope)) } // SchemaUrl returns the schemaurl associated with this ScopeLogs. func (ms ScopeLogs) SchemaUrl() string { - return ms.orig.SchemaUrl + return ms.getOrig().SchemaUrl } // SetSchemaUrl replaces the schemaurl associated with this ScopeLogs. func (ms ScopeLogs) SetSchemaUrl(v string) { - ms.orig.SchemaUrl = v + ms.getOrig().SchemaUrl = v } // LogRecords returns the LogRecords associated with this ScopeLogs. func (ms ScopeLogs) LogRecords() LogRecordSlice { - return newLogRecordSlice(&ms.orig.LogRecords) + return LogRecordSlice(internal.NewLogRecordSlice(&ms.getOrig().LogRecords)) } // CopyTo copies all properties from the current struct to the dest. @@ -424,28 +432,28 @@ func (ms ScopeLogs) CopyTo(dest ScopeLogs) { // // Must use NewLogRecordSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type LogRecordSlice struct { - // orig points to the slice otlplogs.LogRecord field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlplogs.LogRecord -} +type LogRecordSlice internal.LogRecordSlice func newLogRecordSlice(orig *[]*otlplogs.LogRecord) LogRecordSlice { - return LogRecordSlice{orig} + return LogRecordSlice(internal.NewLogRecordSlice(orig)) +} + +func (ms LogRecordSlice) getOrig() *[]*otlplogs.LogRecord { + return internal.GetOrigLogRecordSlice(internal.LogRecordSlice(ms)) } // NewLogRecordSlice creates a LogRecordSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewLogRecordSlice() LogRecordSlice { orig := []*otlplogs.LogRecord(nil) - return LogRecordSlice{&orig} + return newLogRecordSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewLogRecordSlice()". func (es LogRecordSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -457,27 +465,27 @@ func (es LogRecordSlice) Len() int { // ... // Do something with the element // } func (es LogRecordSlice) At(ix int) LogRecord { - return newLogRecord((*es.orig)[ix]) + return newLogRecord((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es LogRecordSlice) CopyTo(dest LogRecordSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newLogRecord((*es.orig)[i]).CopyTo(newLogRecord((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newLogRecord((*es.getOrig())[i]).CopyTo(newLogRecord((*dest.getOrig())[i])) } return } origs := make([]otlplogs.LogRecord, srcLen) wrappers := make([]*otlplogs.LogRecord, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newLogRecord((*es.orig)[i]).CopyTo(newLogRecord(wrappers[i])) + newLogRecord((*es.getOrig())[i]).CopyTo(newLogRecord(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -493,20 +501,20 @@ func (es LogRecordSlice) CopyTo(dest LogRecordSlice) { // // Here should set all the values for e. // } func (es LogRecordSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlplogs.LogRecord, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlplogs.LogRecord, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty LogRecord. // It returns the newly added LogRecord. func (es LogRecordSlice) AppendEmpty() LogRecord { - *es.orig = append(*es.orig, &otlplogs.LogRecord{}) + *es.getOrig() = append(*es.getOrig(), &otlplogs.LogRecord{}) return es.At(es.Len() - 1) } @@ -519,29 +527,29 @@ func (es LogRecordSlice) AppendEmpty() LogRecord { // lessFunc := func(a, b LogRecord) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es LogRecordSlice) Sort(less func(a, b LogRecord) bool) LogRecordSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es LogRecordSlice) MoveAndAppendTo(dest LogRecordSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es LogRecordSlice) RemoveIf(f func(LogRecord) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -550,26 +558,30 @@ func (es LogRecordSlice) RemoveIf(f func(LogRecord) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // LogRecord are experimental implementation of OpenTelemetry Log Data Model. +// // This is a reference type, if passed by value and callee modifies it the // caller will see the modification. // // Must use NewLogRecord function to create new instances. // Important: zero-initialized instance is not valid for use. -type LogRecord struct { - orig *otlplogs.LogRecord -} + +type LogRecord internal.LogRecord func newLogRecord(orig *otlplogs.LogRecord) LogRecord { - return LogRecord{orig: orig} + return LogRecord(internal.NewLogRecord(orig)) +} + +func (ms LogRecord) getOrig() *otlplogs.LogRecord { + return internal.GetOrigLogRecord(internal.LogRecord(ms)) } // NewLogRecord creates a new empty LogRecord. @@ -583,93 +595,93 @@ func NewLogRecord() LogRecord { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms LogRecord) MoveTo(dest LogRecord) { - *dest.orig = *ms.orig - *ms.orig = otlplogs.LogRecord{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlplogs.LogRecord{} } // ObservedTimestamp returns the observedtimestamp associated with this LogRecord. -func (ms LogRecord) ObservedTimestamp() Timestamp { - return Timestamp(ms.orig.ObservedTimeUnixNano) +func (ms LogRecord) ObservedTimestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().ObservedTimeUnixNano) } // SetObservedTimestamp replaces the observedtimestamp associated with this LogRecord. -func (ms LogRecord) SetObservedTimestamp(v Timestamp) { - ms.orig.ObservedTimeUnixNano = uint64(v) +func (ms LogRecord) SetObservedTimestamp(v pcommon.Timestamp) { + ms.getOrig().ObservedTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this LogRecord. -func (ms LogRecord) Timestamp() Timestamp { - return Timestamp(ms.orig.TimeUnixNano) +func (ms LogRecord) Timestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this LogRecord. -func (ms LogRecord) SetTimestamp(v Timestamp) { - ms.orig.TimeUnixNano = uint64(v) +func (ms LogRecord) SetTimestamp(v pcommon.Timestamp) { + ms.getOrig().TimeUnixNano = uint64(v) } // TraceID returns the traceid associated with this LogRecord. -func (ms LogRecord) TraceID() TraceID { - return TraceID{orig: (ms.orig.TraceId)} +func (ms LogRecord) TraceID() pcommon.TraceID { + return pcommon.TraceID(internal.NewTraceID(ms.getOrig().TraceId)) } // SetTraceID replaces the traceid associated with this LogRecord. -func (ms LogRecord) SetTraceID(v TraceID) { - ms.orig.TraceId = v.orig +func (ms LogRecord) SetTraceID(v pcommon.TraceID) { + ms.getOrig().TraceId = internal.GetOrigTraceID(internal.TraceID(v)) } // SpanID returns the spanid associated with this LogRecord. -func (ms LogRecord) SpanID() SpanID { - return SpanID{orig: (ms.orig.SpanId)} +func (ms LogRecord) SpanID() pcommon.SpanID { + return pcommon.SpanID(internal.NewSpanID(ms.getOrig().SpanId)) } // SetSpanID replaces the spanid associated with this LogRecord. -func (ms LogRecord) SetSpanID(v SpanID) { - ms.orig.SpanId = v.orig +func (ms LogRecord) SetSpanID(v pcommon.SpanID) { + ms.getOrig().SpanId = internal.GetOrigSpanID(internal.SpanID(v)) } // FlagsStruct returns the flagsstruct associated with this LogRecord. func (ms LogRecord) FlagsStruct() LogRecordFlags { - return newLogRecordFlags(&ms.orig.Flags) + return LogRecordFlags(internal.NewLogRecordFlags(&ms.getOrig().Flags)) } // SeverityText returns the severitytext associated with this LogRecord. func (ms LogRecord) SeverityText() string { - return ms.orig.SeverityText + return ms.getOrig().SeverityText } // SetSeverityText replaces the severitytext associated with this LogRecord. func (ms LogRecord) SetSeverityText(v string) { - ms.orig.SeverityText = v + ms.getOrig().SeverityText = v } // SeverityNumber returns the severitynumber associated with this LogRecord. func (ms LogRecord) SeverityNumber() SeverityNumber { - return SeverityNumber(ms.orig.SeverityNumber) + return SeverityNumber(ms.getOrig().SeverityNumber) } // SetSeverityNumber replaces the severitynumber associated with this LogRecord. func (ms LogRecord) SetSeverityNumber(v SeverityNumber) { - ms.orig.SeverityNumber = otlplogs.SeverityNumber(v) + ms.getOrig().SeverityNumber = otlplogs.SeverityNumber(v) } // Body returns the body associated with this LogRecord. -func (ms LogRecord) Body() Value { - return newValue(&ms.orig.Body) +func (ms LogRecord) Body() pcommon.Value { + return pcommon.Value(internal.NewValue(&ms.getOrig().Body)) } // Attributes returns the Attributes associated with this LogRecord. -func (ms LogRecord) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms LogRecord) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // DroppedAttributesCount returns the droppedattributescount associated with this LogRecord. func (ms LogRecord) DroppedAttributesCount() uint32 { - return ms.orig.DroppedAttributesCount + return ms.getOrig().DroppedAttributesCount } // SetDroppedAttributesCount replaces the droppedattributescount associated with this LogRecord. func (ms LogRecord) SetDroppedAttributesCount(v uint32) { - ms.orig.DroppedAttributesCount = v + ms.getOrig().DroppedAttributesCount = v } // CopyTo copies all properties from the current struct to the dest. diff --git a/pdata/plog/generated_logs_test.go b/pdata/plog/generated_logs_test.go new file mode 100644 index 00000000000..9d0e449411c --- /dev/null +++ b/pdata/plog/generated_logs_test.go @@ -0,0 +1,526 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package plog + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "go.opentelemetry.io/collector/pdata/internal" + "go.opentelemetry.io/collector/pdata/internal/data" + otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1" + "go.opentelemetry.io/collector/pdata/pcommon" +) + +func TestResourceLogsSlice(t *testing.T) { + es := NewResourceLogsSlice() + assert.Equal(t, 0, es.Len()) + es = newResourceLogsSlice(&[]*otlplogs.ResourceLogs{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newResourceLogs(&otlplogs.ResourceLogs{}) + testVal := ResourceLogs(internal.GenerateTestResourceLogs()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestResourceLogs(internal.ResourceLogs(el)) + assert.Equal(t, testVal, el) + } +} + +func TestResourceLogsSlice_CopyTo(t *testing.T) { + dest := NewResourceLogsSlice() + // Test CopyTo to empty + NewResourceLogsSlice().CopyTo(dest) + assert.Equal(t, NewResourceLogsSlice(), dest) + + // Test CopyTo larger slice + ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()).CopyTo(dest) + assert.Equal(t, ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()), dest) + + // Test CopyTo same size slice + ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()).CopyTo(dest) + assert.Equal(t, ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()), dest) +} + +func TestResourceLogsSlice_EnsureCapacity(t *testing.T) { + es := ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlplogs.ResourceLogs]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlplogs.ResourceLogs]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlplogs.ResourceLogs]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlplogs.ResourceLogs]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestResourceLogsSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()) + dest := NewResourceLogsSlice() + src := ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestResourceLogsSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewResourceLogsSlice() + emptySlice.RemoveIf(func(el ResourceLogs) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()) + pos := 0 + filtered.RemoveIf(func(el ResourceLogs) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestResourceLogs_MoveTo(t *testing.T) { + ms := ResourceLogs(internal.GenerateTestResourceLogs()) + dest := NewResourceLogs() + ms.MoveTo(dest) + assert.Equal(t, NewResourceLogs(), ms) + assert.Equal(t, ResourceLogs(internal.GenerateTestResourceLogs()), dest) +} + +func TestResourceLogs_CopyTo(t *testing.T) { + ms := NewResourceLogs() + orig := NewResourceLogs() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ResourceLogs(internal.GenerateTestResourceLogs()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestResourceLogs_Resource(t *testing.T) { + ms := NewResourceLogs() + internal.FillTestResource(internal.Resource(ms.Resource())) + assert.Equal(t, pcommon.Resource(internal.GenerateTestResource()), ms.Resource()) +} + +func TestResourceLogs_SchemaUrl(t *testing.T) { + ms := NewResourceLogs() + assert.Equal(t, "", ms.SchemaUrl()) + ms.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") + assert.Equal(t, "https://opentelemetry.io/schemas/1.5.0", ms.SchemaUrl()) +} + +func TestResourceLogs_ScopeLogs(t *testing.T) { + ms := NewResourceLogs() + assert.Equal(t, NewScopeLogsSlice(), ms.ScopeLogs()) + internal.FillTestScopeLogsSlice(internal.ScopeLogsSlice(ms.ScopeLogs())) + assert.Equal(t, ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()), ms.ScopeLogs()) +} + +func TestScopeLogsSlice(t *testing.T) { + es := NewScopeLogsSlice() + assert.Equal(t, 0, es.Len()) + es = newScopeLogsSlice(&[]*otlplogs.ScopeLogs{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newScopeLogs(&otlplogs.ScopeLogs{}) + testVal := ScopeLogs(internal.GenerateTestScopeLogs()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestScopeLogs(internal.ScopeLogs(el)) + assert.Equal(t, testVal, el) + } +} + +func TestScopeLogsSlice_CopyTo(t *testing.T) { + dest := NewScopeLogsSlice() + // Test CopyTo to empty + NewScopeLogsSlice().CopyTo(dest) + assert.Equal(t, NewScopeLogsSlice(), dest) + + // Test CopyTo larger slice + ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()).CopyTo(dest) + assert.Equal(t, ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()), dest) + + // Test CopyTo same size slice + ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()).CopyTo(dest) + assert.Equal(t, ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()), dest) +} + +func TestScopeLogsSlice_EnsureCapacity(t *testing.T) { + es := ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlplogs.ScopeLogs]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlplogs.ScopeLogs]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlplogs.ScopeLogs]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlplogs.ScopeLogs]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestScopeLogsSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()) + dest := NewScopeLogsSlice() + src := ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestScopeLogsSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewScopeLogsSlice() + emptySlice.RemoveIf(func(el ScopeLogs) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ScopeLogsSlice(internal.GenerateTestScopeLogsSlice()) + pos := 0 + filtered.RemoveIf(func(el ScopeLogs) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestScopeLogs_MoveTo(t *testing.T) { + ms := ScopeLogs(internal.GenerateTestScopeLogs()) + dest := NewScopeLogs() + ms.MoveTo(dest) + assert.Equal(t, NewScopeLogs(), ms) + assert.Equal(t, ScopeLogs(internal.GenerateTestScopeLogs()), dest) +} + +func TestScopeLogs_CopyTo(t *testing.T) { + ms := NewScopeLogs() + orig := NewScopeLogs() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ScopeLogs(internal.GenerateTestScopeLogs()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestScopeLogs_Scope(t *testing.T) { + ms := NewScopeLogs() + internal.FillTestInstrumentationScope(internal.InstrumentationScope(ms.Scope())) + assert.Equal(t, pcommon.InstrumentationScope(internal.GenerateTestInstrumentationScope()), ms.Scope()) +} + +func TestScopeLogs_SchemaUrl(t *testing.T) { + ms := NewScopeLogs() + assert.Equal(t, "", ms.SchemaUrl()) + ms.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") + assert.Equal(t, "https://opentelemetry.io/schemas/1.5.0", ms.SchemaUrl()) +} + +func TestScopeLogs_LogRecords(t *testing.T) { + ms := NewScopeLogs() + assert.Equal(t, NewLogRecordSlice(), ms.LogRecords()) + internal.FillTestLogRecordSlice(internal.LogRecordSlice(ms.LogRecords())) + assert.Equal(t, LogRecordSlice(internal.GenerateTestLogRecordSlice()), ms.LogRecords()) +} + +func TestLogRecordSlice(t *testing.T) { + es := NewLogRecordSlice() + assert.Equal(t, 0, es.Len()) + es = newLogRecordSlice(&[]*otlplogs.LogRecord{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newLogRecord(&otlplogs.LogRecord{}) + testVal := LogRecord(internal.GenerateTestLogRecord()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestLogRecord(internal.LogRecord(el)) + assert.Equal(t, testVal, el) + } +} + +func TestLogRecordSlice_CopyTo(t *testing.T) { + dest := NewLogRecordSlice() + // Test CopyTo to empty + NewLogRecordSlice().CopyTo(dest) + assert.Equal(t, NewLogRecordSlice(), dest) + + // Test CopyTo larger slice + LogRecordSlice(internal.GenerateTestLogRecordSlice()).CopyTo(dest) + assert.Equal(t, LogRecordSlice(internal.GenerateTestLogRecordSlice()), dest) + + // Test CopyTo same size slice + LogRecordSlice(internal.GenerateTestLogRecordSlice()).CopyTo(dest) + assert.Equal(t, LogRecordSlice(internal.GenerateTestLogRecordSlice()), dest) +} + +func TestLogRecordSlice_EnsureCapacity(t *testing.T) { + es := LogRecordSlice(internal.GenerateTestLogRecordSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlplogs.LogRecord]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlplogs.LogRecord]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlplogs.LogRecord]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlplogs.LogRecord]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestLogRecordSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := LogRecordSlice(internal.GenerateTestLogRecordSlice()) + dest := NewLogRecordSlice() + src := LogRecordSlice(internal.GenerateTestLogRecordSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, LogRecordSlice(internal.GenerateTestLogRecordSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, LogRecordSlice(internal.GenerateTestLogRecordSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + LogRecordSlice(internal.GenerateTestLogRecordSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestLogRecordSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewLogRecordSlice() + emptySlice.RemoveIf(func(el LogRecord) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := LogRecordSlice(internal.GenerateTestLogRecordSlice()) + pos := 0 + filtered.RemoveIf(func(el LogRecord) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestLogRecord_MoveTo(t *testing.T) { + ms := LogRecord(internal.GenerateTestLogRecord()) + dest := NewLogRecord() + ms.MoveTo(dest) + assert.Equal(t, NewLogRecord(), ms) + assert.Equal(t, LogRecord(internal.GenerateTestLogRecord()), dest) +} + +func TestLogRecord_CopyTo(t *testing.T) { + ms := NewLogRecord() + orig := NewLogRecord() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = LogRecord(internal.GenerateTestLogRecord()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestLogRecord_ObservedTimestamp(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, pcommon.Timestamp(0), ms.ObservedTimestamp()) + testValObservedTimestamp := pcommon.Timestamp(1234567890) + ms.SetObservedTimestamp(testValObservedTimestamp) + assert.Equal(t, testValObservedTimestamp, ms.ObservedTimestamp()) +} + +func TestLogRecord_Timestamp(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, pcommon.Timestamp(0), ms.Timestamp()) + testValTimestamp := pcommon.Timestamp(1234567890) + ms.SetTimestamp(testValTimestamp) + assert.Equal(t, testValTimestamp, ms.Timestamp()) +} + +func TestLogRecord_TraceID(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{}))), ms.TraceID()) + testValTraceID := pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}))) + ms.SetTraceID(testValTraceID) + assert.Equal(t, testValTraceID, ms.TraceID()) +} + +func TestLogRecord_SpanID(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{}))), ms.SpanID()) + testValSpanID := pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}))) + ms.SetSpanID(testValSpanID) + assert.Equal(t, testValSpanID, ms.SpanID()) +} + +func TestLogRecord_FlagsStruct(t *testing.T) { + ms := NewLogRecord() + internal.FillTestLogRecordFlags(internal.LogRecordFlags(ms.FlagsStruct())) + assert.Equal(t, LogRecordFlags(internal.GenerateTestLogRecordFlags()), ms.FlagsStruct()) +} + +func TestLogRecord_SeverityText(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, "", ms.SeverityText()) + ms.SetSeverityText("INFO") + assert.Equal(t, "INFO", ms.SeverityText()) +} + +func TestLogRecord_SeverityNumber(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, SeverityNumber(otlplogs.SeverityNumber(0)), ms.SeverityNumber()) + testValSeverityNumber := SeverityNumber(otlplogs.SeverityNumber(5)) + ms.SetSeverityNumber(testValSeverityNumber) + assert.Equal(t, testValSeverityNumber, ms.SeverityNumber()) +} + +func TestLogRecord_Body(t *testing.T) { + ms := NewLogRecord() + internal.FillTestValue(internal.Value(ms.Body())) + assert.Equal(t, pcommon.Value(internal.GenerateTestValue()), ms.Body()) +} + +func TestLogRecord_Attributes(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestLogRecord_DroppedAttributesCount(t *testing.T) { + ms := NewLogRecord() + assert.Equal(t, uint32(0), ms.DroppedAttributesCount()) + ms.SetDroppedAttributesCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedAttributesCount()) +} diff --git a/pdata/plog/json.go b/pdata/plog/json.go index 050651ae740..9692b7c21a4 100644 --- a/pdata/plog/json.go +++ b/pdata/plog/json.go @@ -39,7 +39,7 @@ func newJSONMarshaler() *jsonMarshaler { func (e *jsonMarshaler) MarshalLogs(ld Logs) ([]byte, error) { buf := bytes.Buffer{} - pb := internal.LogsToProto(ld) + pb := internal.LogsToProto(internal.Logs(ld)) err := e.delegate.Marshal(&buf, &pb) return buf.Bytes(), err } @@ -63,5 +63,5 @@ func (d *jsonUnmarshaler) UnmarshalLogs(buf []byte) (Logs, error) { return Logs{}, err } otlp.MigrateLogs(ld.ResourceLogs) - return internal.LogsFromProto(ld), nil + return Logs(internal.LogsFromProto(ld)), nil } diff --git a/pdata/internal/logs.go b/pdata/plog/logs.go similarity index 60% rename from pdata/internal/logs.go rename to pdata/plog/logs.go index c5d842ed156..223ffb1899f 100644 --- a/pdata/internal/logs.go +++ b/pdata/plog/logs.go @@ -12,66 +12,49 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal // import "go.opentelemetry.io/collector/pdata/internal" +package plog // import "go.opentelemetry.io/collector/pdata/plog" import ( + "go.opentelemetry.io/collector/pdata/internal" otlpcollectorlog "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/logs/v1" otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1" ) -// LogsToOtlp internal helper to convert Logs to otlp request representation. -func LogsToOtlp(mw Logs) *otlpcollectorlog.ExportLogsServiceRequest { - return mw.orig -} - -// LogsFromOtlp internal helper to convert otlp request representation to Logs. -func LogsFromOtlp(orig *otlpcollectorlog.ExportLogsServiceRequest) Logs { - return Logs{orig: orig} -} - -// LogsToProto internal helper to convert Logs to protobuf representation. -func LogsToProto(l Logs) otlplogs.LogsData { - return otlplogs.LogsData{ - ResourceLogs: l.orig.ResourceLogs, - } -} +// Logs is the top-level struct that is propagated through the logs pipeline. +// Use NewLogs to create new instance, zero-initialized instance is not valid for use. +type Logs internal.Logs -// LogsFromProto internal helper to convert protobuf representation to Logs. -func LogsFromProto(orig otlplogs.LogsData) Logs { - return Logs{orig: &otlpcollectorlog.ExportLogsServiceRequest{ - ResourceLogs: orig.ResourceLogs, - }} +func newLogs(orig *otlpcollectorlog.ExportLogsServiceRequest) Logs { + return Logs(internal.NewLogs(orig)) } -// Logs is the top-level struct that is propagated through the logs pipeline. -// Use NewLogs to create new instance, zero-initialized instance is not valid for use. -type Logs struct { - orig *otlpcollectorlog.ExportLogsServiceRequest +func (ms Logs) getOrig() *otlpcollectorlog.ExportLogsServiceRequest { + return internal.GetOrigLogs(internal.Logs(ms)) } // NewLogs creates a new Logs struct. func NewLogs() Logs { - return Logs{orig: &otlpcollectorlog.ExportLogsServiceRequest{}} + return newLogs(&otlpcollectorlog.ExportLogsServiceRequest{}) } // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value. -func (ld Logs) MoveTo(dest Logs) { - *dest.orig = *ld.orig - *ld.orig = otlpcollectorlog.ExportLogsServiceRequest{} +func (ms Logs) MoveTo(dest Logs) { + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpcollectorlog.ExportLogsServiceRequest{} } // Clone returns a copy of Logs. -func (ld Logs) Clone() Logs { +func (ms Logs) Clone() Logs { cloneLd := NewLogs() - ld.ResourceLogs().CopyTo(cloneLd.ResourceLogs()) + ms.ResourceLogs().CopyTo(cloneLd.ResourceLogs()) return cloneLd } // LogRecordCount calculates the total number of log records. -func (ld Logs) LogRecordCount() int { +func (ms Logs) LogRecordCount() int { logCount := 0 - rss := ld.ResourceLogs() + rss := ms.ResourceLogs() for i := 0; i < rss.Len(); i++ { rs := rss.At(i) ill := rs.ScopeLogs() @@ -84,8 +67,8 @@ func (ld Logs) LogRecordCount() int { } // ResourceLogs returns the ResourceLogsSlice associated with this Logs. -func (ld Logs) ResourceLogs() ResourceLogsSlice { - return newResourceLogsSlice(&ld.orig.ResourceLogs) +func (ms Logs) ResourceLogs() ResourceLogsSlice { + return newResourceLogsSlice(&ms.getOrig().ResourceLogs) } // SeverityNumber represents severity number of a log record. @@ -119,17 +102,94 @@ const ( SeverityNumberFatal4 = SeverityNumber(otlplogs.SeverityNumber_SEVERITY_NUMBER_FATAL4) ) +const ( + // Deprecated: [0.59.0] Use SeverityNumberUndefined instead + SeverityNumberUNDEFINED = SeverityNumberUndefined + + // Deprecated: [0.59.0] Use SeverityNumberTrace instead + SeverityNumberTRACE = SeverityNumberTrace + + // Deprecated: [0.59.0] Use SeverityNumberTrace2 instead + SeverityNumberTRACE2 = SeverityNumberTrace2 + + // Deprecated: [0.59.0] Use SeverityNumberTrace3 instead + SeverityNumberTRACE3 = SeverityNumberTrace3 + + // Deprecated: [0.59.0] Use SeverityNumberTrace4 instead + SeverityNumberTRACE4 = SeverityNumberTrace4 + + // Deprecated: [0.59.0] Use SeverityNumberDebug instead + SeverityNumberDEBUG = SeverityNumberDebug + + // Deprecated: [0.59.0] Use SeverityNumberDebug2 instead + SeverityNumberDEBUG2 = SeverityNumberDebug2 + + // Deprecated: [0.59.0] Use SeverityNumberDebug3 instead + SeverityNumberDEBUG3 = SeverityNumberDebug3 + + // Deprecated: [0.59.0] Use SeverityNumberDebug4 instead + SeverityNumberDEBUG4 = SeverityNumberDebug4 + + // Deprecated: [0.59.0] Use SeverityNumberInfo instead + SeverityNumberINFO = SeverityNumberInfo + + // Deprecated: [0.59.0] Use SeverityNumberInfo2 instead + SeverityNumberINFO2 = SeverityNumberInfo2 + + // Deprecated: [0.59.0] Use SeverityNumberInfo3 instead + SeverityNumberINFO3 = SeverityNumberInfo3 + + // Deprecated: [0.59.0] Use SeverityNumberInfo4 instead + SeverityNumberINFO4 = SeverityNumberInfo4 + + // Deprecated: [0.59.0] Use SeverityNumberWarn instead + SeverityNumberWARN = SeverityNumberWarn + + // Deprecated: [0.59.0] Use SeverityNumberWarn2 instead + SeverityNumberWARN2 = SeverityNumberWarn2 + + // Deprecated: [0.59.0] Use SeverityNumberWarn3 instead + SeverityNumberWARN3 = SeverityNumberWarn3 + + // Deprecated: [0.59.0] Use SeverityNumberWarn4 instead + SeverityNumberWARN4 = SeverityNumberWarn4 + + // Deprecated: [0.59.0] Use SeverityNumberError instead + SeverityNumberERROR = SeverityNumberError + + // Deprecated: [0.59.0] Use SeverityNumberError2 instead + SeverityNumberERROR2 = SeverityNumberError2 + + // Deprecated: [0.59.0] Use SeverityNumberError3 instead + SeverityNumberERROR3 = SeverityNumberError3 + + // Deprecated: [0.59.0] Use SeverityNumberError4 instead + SeverityNumberERROR4 = SeverityNumberError4 + + // Deprecated: [0.59.0] Use SeverityNumberFatal instead + SeverityNumberFATAL = SeverityNumberFatal + + // Deprecated: [0.59.0] Use SeverityNumberFatal2 instead + SeverityNumberFATAL2 = SeverityNumberFatal2 + + // Deprecated: [0.59.0] Use SeverityNumberFatal3 instead + SeverityNumberFATAL3 = SeverityNumberFatal3 + + // Deprecated: [0.59.0] Use SeverityNumberFatal4 instead + SeverityNumberFATAL4 = SeverityNumberFatal4 +) + // String returns the string representation of the SeverityNumber. func (sn SeverityNumber) String() string { return otlplogs.SeverityNumber(sn).String() } // Deprecated: [v0.59.0] use FlagsStruct(). func (ms LogRecord) Flags() uint32 { - return ms.orig.Flags + return ms.getOrig().Flags } // Deprecated: [v0.59.0] use FlagsStruct(). func (ms LogRecord) SetFlags(v uint32) { - ms.orig.Flags = v + ms.getOrig().Flags = v } const ( @@ -144,12 +204,14 @@ const ( // // Must use NewLogRecordFlags function to create new instances. // Important: zero-initialized instance is not valid for use. -type LogRecordFlags struct { - orig *uint32 -} +type LogRecordFlags internal.LogRecordFlags func newLogRecordFlags(orig *uint32) LogRecordFlags { - return LogRecordFlags{orig: orig} + return LogRecordFlags(internal.NewLogRecordFlags(orig)) +} + +func (ms LogRecordFlags) getOrig() *uint32 { + return internal.GetOrigLogRecordFlags(internal.LogRecordFlags(ms)) } // NewLogRecordFlags creates a new empty LogRecordFlags. @@ -162,31 +224,31 @@ func NewLogRecordFlags() LogRecordFlags { // MoveTo moves all properties from the current struct to dest resetting the current instance to its zero value func (ms LogRecordFlags) MoveTo(dest LogRecordFlags) { - *dest.orig = *ms.orig - *ms.orig = traceFlagsNone + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = traceFlagsNone } // CopyTo copies all properties from the current struct to the dest. func (ms LogRecordFlags) CopyTo(dest LogRecordFlags) { - *dest.orig = *ms.orig + *dest.getOrig() = *ms.getOrig() } // IsSampled returns true if the LogRecordFlags contains the IsSampled flag. func (ms LogRecordFlags) IsSampled() bool { - return *ms.orig&isSampledMask != 0 + return *ms.getOrig()&isSampledMask != 0 } // SetIsSampled sets the IsSampled flag if true and removes it if false. // Setting this Flag when it is already set is a no-op. func (ms LogRecordFlags) SetIsSampled(b bool) { if b { - *ms.orig |= isSampledMask + *ms.getOrig() |= isSampledMask } else { - *ms.orig &^= isSampledMask + *ms.getOrig() &^= isSampledMask } } // AsRaw converts LogRecordFlags to the OTLP uint32 representation. func (ms LogRecordFlags) AsRaw() uint32 { - return *ms.orig + return *ms.getOrig() } diff --git a/pdata/internal/logs_test.go b/pdata/plog/logs_test.go similarity index 79% rename from pdata/internal/logs_test.go rename to pdata/plog/logs_test.go index c0a5d831607..e3ef8f7f6c5 100644 --- a/pdata/internal/logs_test.go +++ b/pdata/plog/logs_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal +package plog import ( "testing" @@ -22,6 +22,7 @@ import ( goproto "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/emptypb" + "go.opentelemetry.io/collector/pdata/internal" otlpcollectorlog "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/logs/v1" otlplogs "go.opentelemetry.io/collector/pdata/internal/data/protogen/logs/v1" ) @@ -52,17 +53,17 @@ func TestLogRecordCount(t *testing.T) { func TestLogRecordCountWithEmpty(t *testing.T) { assert.Zero(t, NewLogs().LogRecordCount()) - assert.Zero(t, Logs{orig: &otlpcollectorlog.ExportLogsServiceRequest{ + assert.Zero(t, newLogs(&otlpcollectorlog.ExportLogsServiceRequest{ ResourceLogs: []*otlplogs.ResourceLogs{{}}, - }}.LogRecordCount()) - assert.Zero(t, Logs{orig: &otlpcollectorlog.ExportLogsServiceRequest{ + }).LogRecordCount()) + assert.Zero(t, newLogs(&otlpcollectorlog.ExportLogsServiceRequest{ ResourceLogs: []*otlplogs.ResourceLogs{ { ScopeLogs: []*otlplogs.ScopeLogs{{}}, }, }, - }}.LogRecordCount()) - assert.Equal(t, 1, Logs{orig: &otlpcollectorlog.ExportLogsServiceRequest{ + }).LogRecordCount()) + assert.Equal(t, 1, newLogs(&otlpcollectorlog.ExportLogsServiceRequest{ ResourceLogs: []*otlplogs.ResourceLogs{ { ScopeLogs: []*otlplogs.ScopeLogs{ @@ -72,14 +73,14 @@ func TestLogRecordCountWithEmpty(t *testing.T) { }, }, }, - }}.LogRecordCount()) + }).LogRecordCount()) } func TestToFromLogOtlp(t *testing.T) { otlp := &otlpcollectorlog.ExportLogsServiceRequest{} - logs := LogsFromOtlp(otlp) + logs := newLogs(otlp) assert.EqualValues(t, NewLogs(), logs) - assert.EqualValues(t, otlp, LogsToOtlp(logs)) + assert.EqualValues(t, otlp, logs.getOrig()) } func TestResourceLogsWireCompatibility(t *testing.T) { @@ -88,10 +89,11 @@ func TestResourceLogsWireCompatibility(t *testing.T) { // this repository are wire compatible. // Generate ResourceLogs as pdata struct. - logs := generateTestResourceLogs() + logs := NewLogs() + internal.FillTestResourceLogsSlice(internal.ResourceLogsSlice(logs.ResourceLogs())) // Marshal its underlying ProtoBuf to wire. - wire1, err := gogoproto.Marshal(logs.orig) + wire1, err := gogoproto.Marshal(logs.getOrig()) assert.NoError(t, err) assert.NotNil(t, wire1) @@ -106,32 +108,32 @@ func TestResourceLogsWireCompatibility(t *testing.T) { assert.NotNil(t, wire2) // Unmarshal from the wire into gogoproto's representation. - var gogoprotoRS2 otlplogs.ResourceLogs + var gogoprotoRS2 otlpcollectorlog.ExportLogsServiceRequest err = gogoproto.Unmarshal(wire2, &gogoprotoRS2) assert.NoError(t, err) // Now compare that the original and final ProtoBuf messages are the same. // This proves that goproto and gogoproto marshaling/unmarshaling are wire compatible. - assert.EqualValues(t, logs.orig, &gogoprotoRS2) + assert.EqualValues(t, logs.getOrig(), &gogoprotoRS2) } func TestLogsMoveTo(t *testing.T) { logs := NewLogs() - fillTestResourceLogsSlice(logs.ResourceLogs()) + internal.FillTestResourceLogsSlice(internal.ResourceLogsSlice(logs.ResourceLogs())) dest := NewLogs() logs.MoveTo(dest) assert.EqualValues(t, NewLogs(), logs) - assert.EqualValues(t, generateTestResourceLogsSlice(), dest.ResourceLogs()) + assert.EqualValues(t, ResourceLogsSlice(internal.GenerateTestResourceLogsSlice()), dest.ResourceLogs()) } func TestLogsClone(t *testing.T) { logs := NewLogs() - fillTestResourceLogsSlice(logs.ResourceLogs()) + internal.FillTestResourceLogsSlice(internal.ResourceLogsSlice(logs.ResourceLogs())) assert.EqualValues(t, logs, logs.Clone()) } func TestLogRecordFlags(t *testing.T) { - flags := generateTestLogRecordFlags() + flags := LogRecordFlags(internal.GenerateTestLogRecordFlags()) assert.True(t, flags.IsSampled()) assert.Equal(t, uint32(1), flags.AsRaw()) @@ -159,7 +161,7 @@ func TestLogRecordFlags(t *testing.T) { func BenchmarkLogsClone(b *testing.B) { logs := NewLogs() - fillTestResourceLogsSlice(logs.ResourceLogs()) + internal.FillTestResourceLogsSlice(internal.ResourceLogsSlice(logs.ResourceLogs())) b.ResetTimer() for n := 0; n < b.N; n++ { clone := logs.Clone() @@ -168,13 +170,3 @@ func BenchmarkLogsClone(b *testing.B) { } } } - -func fillTestLogRecordFlags(tv LogRecordFlags) { - tv.SetIsSampled(true) -} - -func generateTestLogRecordFlags() LogRecordFlags { - tv := NewLogRecordFlags() - fillTestLogRecordFlags(tv) - return tv -} diff --git a/pdata/plog/pb.go b/pdata/plog/pb.go index a12c967edcc..e49c1c92c08 100644 --- a/pdata/plog/pb.go +++ b/pdata/plog/pb.go @@ -34,12 +34,12 @@ func newPbMarshaler() *pbMarshaler { var _ Sizer = (*pbMarshaler)(nil) func (e *pbMarshaler) MarshalLogs(ld Logs) ([]byte, error) { - pb := internal.LogsToProto(ld) + pb := internal.LogsToProto(internal.Logs(ld)) return pb.Marshal() } func (e *pbMarshaler) LogsSize(ld Logs) int { - pb := internal.LogsToProto(ld) + pb := internal.LogsToProto(internal.Logs(ld)) return pb.Size() } @@ -57,5 +57,5 @@ func newPbUnmarshaler() *pbUnmarshaler { func (d *pbUnmarshaler) UnmarshalLogs(buf []byte) (Logs, error) { pb := otlplogs.LogsData{} err := pb.Unmarshal(buf) - return internal.LogsFromProto(pb), err + return Logs(internal.LogsFromProto(pb)), err } diff --git a/pdata/plog/plogotlp/logs.go b/pdata/plog/plogotlp/logs.go index 4026fdc9db1..fe7a840e798 100644 --- a/pdata/plog/plogotlp/logs.go +++ b/pdata/plog/plogotlp/logs.go @@ -78,8 +78,8 @@ func NewRequest() Request { // NewRequestFromLogs returns a Request from plog.Logs. // Because Request is a wrapper for plog.Logs, // any changes to the provided Logs struct will be reflected in the Request and vice versa. -func NewRequestFromLogs(l plog.Logs) Request { - return Request{orig: internal.LogsToOtlp(l)} +func NewRequestFromLogs(ld plog.Logs) Request { + return Request{orig: internal.GetOrigLogs(internal.Logs(ld))} } // MarshalProto marshals Request into proto bytes. @@ -115,7 +115,7 @@ func (lr Request) UnmarshalJSON(data []byte) error { } func (lr Request) Logs() plog.Logs { - return internal.LogsFromOtlp(lr.orig) + return plog.Logs(internal.NewLogs(lr.orig)) } // Client is the client API for OTLP-GRPC Logs service. diff --git a/pdata/pmetric/alias.go b/pdata/pmetric/alias.go deleted file mode 100644 index 60c48b69168..00000000000 --- a/pdata/pmetric/alias.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package pmetric // import "go.opentelemetry.io/collector/pdata/pmetric" - -import "go.opentelemetry.io/collector/pdata/internal" // This file contains aliases for metric data structures. - -// Metrics is the top-level struct that is propagated through the metrics pipeline. -// Use NewMetrics to create new instance, zero-initialized instance is not valid for use. -type Metrics = internal.Metrics - -// NewMetrics creates a new Metrics struct. -var NewMetrics = internal.NewMetrics - -// MetricDataType specifies the type of data in a Metric. -type MetricDataType = internal.MetricDataType - -const ( - MetricDataTypeNone = internal.MetricDataTypeNone - MetricDataTypeGauge = internal.MetricDataTypeGauge - MetricDataTypeSum = internal.MetricDataTypeSum - MetricDataTypeHistogram = internal.MetricDataTypeHistogram - MetricDataTypeExponentialHistogram = internal.MetricDataTypeExponentialHistogram - MetricDataTypeSummary = internal.MetricDataTypeSummary -) - -// MetricAggregationTemporality defines how a metric aggregator reports aggregated values. -// It describes how those values relate to the time interval over which they are aggregated. -type MetricAggregationTemporality = internal.MetricAggregationTemporality - -const ( - // MetricAggregationTemporalityUnspecified is the default MetricAggregationTemporality, it MUST NOT be used. - MetricAggregationTemporalityUnspecified = internal.MetricAggregationTemporalityUnspecified - - // MetricAggregationTemporalityDelta is a MetricAggregationTemporality for a metric aggregator which reports changes since last report time. - MetricAggregationTemporalityDelta = internal.MetricAggregationTemporalityDelta - - // MetricAggregationTemporalityCumulative is a MetricAggregationTemporality for a metric aggregator which reports changes since a fixed start time. - MetricAggregationTemporalityCumulative = internal.MetricAggregationTemporalityCumulative -) - -// Deprecated: [0.59.0] Use MetricDataPointFlags instead -type MetricDataPointFlagsStruct = MetricDataPointFlags - -// Deprecated: [0.59.0] Use NewMetricDataPointFlags instead -var NewMetricDataPointFlagsStruct = NewMetricDataPointFlags - -// MetricDataPointFlags defines how a metric aggregator reports aggregated values. -// It describes how those values relate to the time interval over which they are aggregated. -type MetricDataPointFlags = internal.MetricDataPointFlags - -// NewMetricDataPointFlags returns a new empty MetricDataPointFlags. -var NewMetricDataPointFlags = internal.NewMetricDataPointFlags - -// NumberDataPointValueType specifies the type of NumberDataPoint value. -type NumberDataPointValueType = internal.NumberDataPointValueType - -const ( - NumberDataPointValueTypeNone = internal.NumberDataPointValueTypeNone - NumberDataPointValueTypeInt = internal.NumberDataPointValueTypeInt - NumberDataPointValueTypeDouble = internal.NumberDataPointValueTypeDouble -) - -// ExemplarValueType specifies the type of Exemplar measurement value. -type ExemplarValueType = internal.ExemplarValueType - -const ( - ExemplarValueTypeNone = internal.ExemplarValueTypeNone - ExemplarValueTypeInt = internal.ExemplarValueTypeInt - ExemplarValueTypeDouble = internal.ExemplarValueTypeDouble -) diff --git a/pdata/pmetric/generated_alias.go b/pdata/pmetric/generated_alias.go deleted file mode 100644 index 86343ebe339..00000000000 --- a/pdata/pmetric/generated_alias.go +++ /dev/null @@ -1,325 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package pmetric - -import "go.opentelemetry.io/collector/pdata/internal" - -// ResourceMetricsSlice logically represents a slice of ResourceMetrics. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewResourceMetricsSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ResourceMetricsSlice = internal.ResourceMetricsSlice - -// NewResourceMetricsSlice creates a ResourceMetricsSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewResourceMetricsSlice = internal.NewResourceMetricsSlice - -// ResourceMetrics is a collection of metrics from a Resource. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewResourceMetrics function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ResourceMetrics = internal.ResourceMetrics - -// NewResourceMetrics is an alias for a function to create a new empty ResourceMetrics. -var NewResourceMetrics = internal.NewResourceMetrics - -// ScopeMetricsSlice logically represents a slice of ScopeMetrics. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewScopeMetricsSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ScopeMetricsSlice = internal.ScopeMetricsSlice - -// NewScopeMetricsSlice creates a ScopeMetricsSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewScopeMetricsSlice = internal.NewScopeMetricsSlice - -// ScopeMetrics is a collection of metrics from a LibraryInstrumentation. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewScopeMetrics function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ScopeMetrics = internal.ScopeMetrics - -// NewScopeMetrics is an alias for a function to create a new empty ScopeMetrics. -var NewScopeMetrics = internal.NewScopeMetrics - -// MetricSlice logically represents a slice of Metric. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewMetricSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type MetricSlice = internal.MetricSlice - -// NewMetricSlice creates a MetricSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewMetricSlice = internal.NewMetricSlice - -// Metric represents one metric as a collection of datapoints. -// See Metric definition in OTLP: https://github.com/open-telemetry/opentelemetry-proto/blob/main/opentelemetry/proto/metrics/v1/metrics.proto -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewMetric function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Metric = internal.Metric - -// NewMetric is an alias for a function to create a new empty Metric. -var NewMetric = internal.NewMetric - -// Gauge represents the type of a numeric metric that always exports the "current value" for every data point. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewGauge function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Gauge = internal.Gauge - -// NewGauge is an alias for a function to create a new empty Gauge. -var NewGauge = internal.NewGauge - -// Sum represents the type of a numeric metric that is calculated as a sum of all reported measurements over a time interval. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewSum function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Sum = internal.Sum - -// NewSum is an alias for a function to create a new empty Sum. -var NewSum = internal.NewSum - -// Histogram represents the type of a metric that is calculated by aggregating as a Histogram of all reported measurements over a time interval. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewHistogram function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Histogram = internal.Histogram - -// NewHistogram is an alias for a function to create a new empty Histogram. -var NewHistogram = internal.NewHistogram - -// ExponentialHistogram represents the type of a metric that is calculated by aggregating -// as a ExponentialHistogram of all reported double measurements over a time interval. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewExponentialHistogram function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ExponentialHistogram = internal.ExponentialHistogram - -// NewExponentialHistogram is an alias for a function to create a new empty ExponentialHistogram. -var NewExponentialHistogram = internal.NewExponentialHistogram - -// Summary represents the type of a metric that is calculated by aggregating as a Summary of all reported double measurements over a time interval. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewSummary function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Summary = internal.Summary - -// NewSummary is an alias for a function to create a new empty Summary. -var NewSummary = internal.NewSummary - -// NumberDataPointSlice logically represents a slice of NumberDataPoint. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewNumberDataPointSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type NumberDataPointSlice = internal.NumberDataPointSlice - -// NewNumberDataPointSlice creates a NumberDataPointSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewNumberDataPointSlice = internal.NewNumberDataPointSlice - -// NumberDataPoint is a single data point in a timeseries that describes the time-varying value of a number metric. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewNumberDataPoint function to create new instances. -// Important: zero-initialized instance is not valid for use. -type NumberDataPoint = internal.NumberDataPoint - -// NewNumberDataPoint is an alias for a function to create a new empty NumberDataPoint. -var NewNumberDataPoint = internal.NewNumberDataPoint - -// HistogramDataPointSlice logically represents a slice of HistogramDataPoint. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewHistogramDataPointSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type HistogramDataPointSlice = internal.HistogramDataPointSlice - -// NewHistogramDataPointSlice creates a HistogramDataPointSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewHistogramDataPointSlice = internal.NewHistogramDataPointSlice - -// HistogramDataPoint is a single data point in a timeseries that describes the time-varying values of a Histogram of values. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewHistogramDataPoint function to create new instances. -// Important: zero-initialized instance is not valid for use. -type HistogramDataPoint = internal.HistogramDataPoint - -// NewHistogramDataPoint is an alias for a function to create a new empty HistogramDataPoint. -var NewHistogramDataPoint = internal.NewHistogramDataPoint - -// ExponentialHistogramDataPointSlice logically represents a slice of ExponentialHistogramDataPoint. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewExponentialHistogramDataPointSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ExponentialHistogramDataPointSlice = internal.ExponentialHistogramDataPointSlice - -// NewExponentialHistogramDataPointSlice creates a ExponentialHistogramDataPointSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewExponentialHistogramDataPointSlice = internal.NewExponentialHistogramDataPointSlice - -// ExponentialHistogramDataPoint is a single data point in a timeseries that describes the -// time-varying values of a ExponentialHistogram of double values. A ExponentialHistogram contains -// summary statistics for a population of values, it may optionally contain the -// distribution of those values across a set of buckets. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewExponentialHistogramDataPoint function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ExponentialHistogramDataPoint = internal.ExponentialHistogramDataPoint - -// NewExponentialHistogramDataPoint is an alias for a function to create a new empty ExponentialHistogramDataPoint. -var NewExponentialHistogramDataPoint = internal.NewExponentialHistogramDataPoint - -// Buckets are a set of bucket counts, encoded in a contiguous array of counts. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewBuckets function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Buckets = internal.Buckets - -// NewBuckets is an alias for a function to create a new empty Buckets. -var NewBuckets = internal.NewBuckets - -// SummaryDataPointSlice logically represents a slice of SummaryDataPoint. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewSummaryDataPointSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SummaryDataPointSlice = internal.SummaryDataPointSlice - -// NewSummaryDataPointSlice creates a SummaryDataPointSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewSummaryDataPointSlice = internal.NewSummaryDataPointSlice - -// SummaryDataPoint is a single data point in a timeseries that describes the time-varying values of a Summary of double values. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewSummaryDataPoint function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SummaryDataPoint = internal.SummaryDataPoint - -// NewSummaryDataPoint is an alias for a function to create a new empty SummaryDataPoint. -var NewSummaryDataPoint = internal.NewSummaryDataPoint - -// ValueAtQuantileSlice logically represents a slice of ValueAtQuantile. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewValueAtQuantileSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ValueAtQuantileSlice = internal.ValueAtQuantileSlice - -// NewValueAtQuantileSlice creates a ValueAtQuantileSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewValueAtQuantileSlice = internal.NewValueAtQuantileSlice - -// ValueAtQuantile is a quantile value within a Summary data point. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewValueAtQuantile function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ValueAtQuantile = internal.ValueAtQuantile - -// NewValueAtQuantile is an alias for a function to create a new empty ValueAtQuantile. -var NewValueAtQuantile = internal.NewValueAtQuantile - -// ExemplarSlice logically represents a slice of Exemplar. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewExemplarSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ExemplarSlice = internal.ExemplarSlice - -// NewExemplarSlice creates a ExemplarSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewExemplarSlice = internal.NewExemplarSlice - -// Exemplar is a sample input double measurement. -// -// Exemplars also hold information about the environment when the measurement was recorded, -// for example the span and trace ID of the active span when the exemplar was recorded. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewExemplar function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Exemplar = internal.Exemplar - -// NewExemplar is an alias for a function to create a new empty Exemplar. -var NewExemplar = internal.NewExemplar diff --git a/pdata/internal/generated_pmetric.go b/pdata/pmetric/generated_metrics.go similarity index 72% rename from pdata/internal/generated_pmetric.go rename to pdata/pmetric/generated_metrics.go index e73b6426c6c..cfefd9189d6 100644 --- a/pdata/internal/generated_pmetric.go +++ b/pdata/pmetric/generated_metrics.go @@ -15,12 +15,14 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package pmetric import ( "sort" + "go.opentelemetry.io/collector/pdata/internal" otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" + "go.opentelemetry.io/collector/pdata/pcommon" ) // ResourceMetricsSlice logically represents a slice of ResourceMetrics. @@ -30,28 +32,28 @@ import ( // // Must use NewResourceMetricsSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ResourceMetricsSlice struct { - // orig points to the slice otlpmetrics.ResourceMetrics field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.ResourceMetrics -} +type ResourceMetricsSlice internal.ResourceMetricsSlice func newResourceMetricsSlice(orig *[]*otlpmetrics.ResourceMetrics) ResourceMetricsSlice { - return ResourceMetricsSlice{orig} + return ResourceMetricsSlice(internal.NewResourceMetricsSlice(orig)) +} + +func (ms ResourceMetricsSlice) getOrig() *[]*otlpmetrics.ResourceMetrics { + return internal.GetOrigResourceMetricsSlice(internal.ResourceMetricsSlice(ms)) } // NewResourceMetricsSlice creates a ResourceMetricsSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewResourceMetricsSlice() ResourceMetricsSlice { orig := []*otlpmetrics.ResourceMetrics(nil) - return ResourceMetricsSlice{&orig} + return newResourceMetricsSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewResourceMetricsSlice()". func (es ResourceMetricsSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -63,27 +65,27 @@ func (es ResourceMetricsSlice) Len() int { // ... // Do something with the element // } func (es ResourceMetricsSlice) At(ix int) ResourceMetrics { - return newResourceMetrics((*es.orig)[ix]) + return newResourceMetrics((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ResourceMetricsSlice) CopyTo(dest ResourceMetricsSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newResourceMetrics((*es.orig)[i]).CopyTo(newResourceMetrics((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newResourceMetrics((*es.getOrig())[i]).CopyTo(newResourceMetrics((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.ResourceMetrics, srcLen) wrappers := make([]*otlpmetrics.ResourceMetrics, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newResourceMetrics((*es.orig)[i]).CopyTo(newResourceMetrics(wrappers[i])) + newResourceMetrics((*es.getOrig())[i]).CopyTo(newResourceMetrics(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -99,20 +101,20 @@ func (es ResourceMetricsSlice) CopyTo(dest ResourceMetricsSlice) { // // Here should set all the values for e. // } func (es ResourceMetricsSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.ResourceMetrics, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.ResourceMetrics, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ResourceMetrics. // It returns the newly added ResourceMetrics. func (es ResourceMetricsSlice) AppendEmpty() ResourceMetrics { - *es.orig = append(*es.orig, &otlpmetrics.ResourceMetrics{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.ResourceMetrics{}) return es.At(es.Len() - 1) } @@ -125,29 +127,29 @@ func (es ResourceMetricsSlice) AppendEmpty() ResourceMetrics { // lessFunc := func(a, b ResourceMetrics) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ResourceMetricsSlice) Sort(less func(a, b ResourceMetrics) bool) ResourceMetricsSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ResourceMetricsSlice) MoveAndAppendTo(dest ResourceMetricsSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ResourceMetricsSlice) RemoveIf(f func(ResourceMetrics) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -156,11 +158,11 @@ func (es ResourceMetricsSlice) RemoveIf(f func(ResourceMetrics) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ResourceMetrics is a collection of metrics from a Resource. @@ -170,12 +172,15 @@ func (es ResourceMetricsSlice) RemoveIf(f func(ResourceMetrics) bool) { // // Must use NewResourceMetrics function to create new instances. // Important: zero-initialized instance is not valid for use. -type ResourceMetrics struct { - orig *otlpmetrics.ResourceMetrics -} + +type ResourceMetrics internal.ResourceMetrics func newResourceMetrics(orig *otlpmetrics.ResourceMetrics) ResourceMetrics { - return ResourceMetrics{orig: orig} + return ResourceMetrics(internal.NewResourceMetrics(orig)) +} + +func (ms ResourceMetrics) getOrig() *otlpmetrics.ResourceMetrics { + return internal.GetOrigResourceMetrics(internal.ResourceMetrics(ms)) } // NewResourceMetrics creates a new empty ResourceMetrics. @@ -189,28 +194,28 @@ func NewResourceMetrics() ResourceMetrics { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ResourceMetrics) MoveTo(dest ResourceMetrics) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.ResourceMetrics{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.ResourceMetrics{} } // Resource returns the resource associated with this ResourceMetrics. -func (ms ResourceMetrics) Resource() Resource { - return newResource(&ms.orig.Resource) +func (ms ResourceMetrics) Resource() pcommon.Resource { + return pcommon.Resource(internal.NewResource(&ms.getOrig().Resource)) } // SchemaUrl returns the schemaurl associated with this ResourceMetrics. func (ms ResourceMetrics) SchemaUrl() string { - return ms.orig.SchemaUrl + return ms.getOrig().SchemaUrl } // SetSchemaUrl replaces the schemaurl associated with this ResourceMetrics. func (ms ResourceMetrics) SetSchemaUrl(v string) { - ms.orig.SchemaUrl = v + ms.getOrig().SchemaUrl = v } // ScopeMetrics returns the ScopeMetrics associated with this ResourceMetrics. func (ms ResourceMetrics) ScopeMetrics() ScopeMetricsSlice { - return newScopeMetricsSlice(&ms.orig.ScopeMetrics) + return ScopeMetricsSlice(internal.NewScopeMetricsSlice(&ms.getOrig().ScopeMetrics)) } // CopyTo copies all properties from the current struct to the dest. @@ -227,28 +232,28 @@ func (ms ResourceMetrics) CopyTo(dest ResourceMetrics) { // // Must use NewScopeMetricsSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ScopeMetricsSlice struct { - // orig points to the slice otlpmetrics.ScopeMetrics field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.ScopeMetrics -} +type ScopeMetricsSlice internal.ScopeMetricsSlice func newScopeMetricsSlice(orig *[]*otlpmetrics.ScopeMetrics) ScopeMetricsSlice { - return ScopeMetricsSlice{orig} + return ScopeMetricsSlice(internal.NewScopeMetricsSlice(orig)) +} + +func (ms ScopeMetricsSlice) getOrig() *[]*otlpmetrics.ScopeMetrics { + return internal.GetOrigScopeMetricsSlice(internal.ScopeMetricsSlice(ms)) } // NewScopeMetricsSlice creates a ScopeMetricsSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewScopeMetricsSlice() ScopeMetricsSlice { orig := []*otlpmetrics.ScopeMetrics(nil) - return ScopeMetricsSlice{&orig} + return newScopeMetricsSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewScopeMetricsSlice()". func (es ScopeMetricsSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -260,27 +265,27 @@ func (es ScopeMetricsSlice) Len() int { // ... // Do something with the element // } func (es ScopeMetricsSlice) At(ix int) ScopeMetrics { - return newScopeMetrics((*es.orig)[ix]) + return newScopeMetrics((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ScopeMetricsSlice) CopyTo(dest ScopeMetricsSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newScopeMetrics((*es.orig)[i]).CopyTo(newScopeMetrics((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newScopeMetrics((*es.getOrig())[i]).CopyTo(newScopeMetrics((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.ScopeMetrics, srcLen) wrappers := make([]*otlpmetrics.ScopeMetrics, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newScopeMetrics((*es.orig)[i]).CopyTo(newScopeMetrics(wrappers[i])) + newScopeMetrics((*es.getOrig())[i]).CopyTo(newScopeMetrics(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -296,20 +301,20 @@ func (es ScopeMetricsSlice) CopyTo(dest ScopeMetricsSlice) { // // Here should set all the values for e. // } func (es ScopeMetricsSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.ScopeMetrics, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.ScopeMetrics, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ScopeMetrics. // It returns the newly added ScopeMetrics. func (es ScopeMetricsSlice) AppendEmpty() ScopeMetrics { - *es.orig = append(*es.orig, &otlpmetrics.ScopeMetrics{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.ScopeMetrics{}) return es.At(es.Len() - 1) } @@ -322,29 +327,29 @@ func (es ScopeMetricsSlice) AppendEmpty() ScopeMetrics { // lessFunc := func(a, b ScopeMetrics) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ScopeMetricsSlice) Sort(less func(a, b ScopeMetrics) bool) ScopeMetricsSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ScopeMetricsSlice) MoveAndAppendTo(dest ScopeMetricsSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ScopeMetricsSlice) RemoveIf(f func(ScopeMetrics) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -353,11 +358,11 @@ func (es ScopeMetricsSlice) RemoveIf(f func(ScopeMetrics) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ScopeMetrics is a collection of metrics from a LibraryInstrumentation. @@ -367,12 +372,15 @@ func (es ScopeMetricsSlice) RemoveIf(f func(ScopeMetrics) bool) { // // Must use NewScopeMetrics function to create new instances. // Important: zero-initialized instance is not valid for use. -type ScopeMetrics struct { - orig *otlpmetrics.ScopeMetrics -} + +type ScopeMetrics internal.ScopeMetrics func newScopeMetrics(orig *otlpmetrics.ScopeMetrics) ScopeMetrics { - return ScopeMetrics{orig: orig} + return ScopeMetrics(internal.NewScopeMetrics(orig)) +} + +func (ms ScopeMetrics) getOrig() *otlpmetrics.ScopeMetrics { + return internal.GetOrigScopeMetrics(internal.ScopeMetrics(ms)) } // NewScopeMetrics creates a new empty ScopeMetrics. @@ -386,28 +394,28 @@ func NewScopeMetrics() ScopeMetrics { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ScopeMetrics) MoveTo(dest ScopeMetrics) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.ScopeMetrics{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.ScopeMetrics{} } // Scope returns the scope associated with this ScopeMetrics. -func (ms ScopeMetrics) Scope() InstrumentationScope { - return newInstrumentationScope(&ms.orig.Scope) +func (ms ScopeMetrics) Scope() pcommon.InstrumentationScope { + return pcommon.InstrumentationScope(internal.NewInstrumentationScope(&ms.getOrig().Scope)) } // SchemaUrl returns the schemaurl associated with this ScopeMetrics. func (ms ScopeMetrics) SchemaUrl() string { - return ms.orig.SchemaUrl + return ms.getOrig().SchemaUrl } // SetSchemaUrl replaces the schemaurl associated with this ScopeMetrics. func (ms ScopeMetrics) SetSchemaUrl(v string) { - ms.orig.SchemaUrl = v + ms.getOrig().SchemaUrl = v } // Metrics returns the Metrics associated with this ScopeMetrics. func (ms ScopeMetrics) Metrics() MetricSlice { - return newMetricSlice(&ms.orig.Metrics) + return MetricSlice(internal.NewMetricSlice(&ms.getOrig().Metrics)) } // CopyTo copies all properties from the current struct to the dest. @@ -424,28 +432,28 @@ func (ms ScopeMetrics) CopyTo(dest ScopeMetrics) { // // Must use NewMetricSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type MetricSlice struct { - // orig points to the slice otlpmetrics.Metric field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.Metric -} +type MetricSlice internal.MetricSlice func newMetricSlice(orig *[]*otlpmetrics.Metric) MetricSlice { - return MetricSlice{orig} + return MetricSlice(internal.NewMetricSlice(orig)) +} + +func (ms MetricSlice) getOrig() *[]*otlpmetrics.Metric { + return internal.GetOrigMetricSlice(internal.MetricSlice(ms)) } // NewMetricSlice creates a MetricSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewMetricSlice() MetricSlice { orig := []*otlpmetrics.Metric(nil) - return MetricSlice{&orig} + return newMetricSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewMetricSlice()". func (es MetricSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -457,27 +465,27 @@ func (es MetricSlice) Len() int { // ... // Do something with the element // } func (es MetricSlice) At(ix int) Metric { - return newMetric((*es.orig)[ix]) + return newMetric((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es MetricSlice) CopyTo(dest MetricSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newMetric((*es.orig)[i]).CopyTo(newMetric((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newMetric((*es.getOrig())[i]).CopyTo(newMetric((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.Metric, srcLen) wrappers := make([]*otlpmetrics.Metric, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newMetric((*es.orig)[i]).CopyTo(newMetric(wrappers[i])) + newMetric((*es.getOrig())[i]).CopyTo(newMetric(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -493,20 +501,20 @@ func (es MetricSlice) CopyTo(dest MetricSlice) { // // Here should set all the values for e. // } func (es MetricSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.Metric, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.Metric, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty Metric. // It returns the newly added Metric. func (es MetricSlice) AppendEmpty() Metric { - *es.orig = append(*es.orig, &otlpmetrics.Metric{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.Metric{}) return es.At(es.Len() - 1) } @@ -519,29 +527,29 @@ func (es MetricSlice) AppendEmpty() Metric { // lessFunc := func(a, b Metric) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es MetricSlice) Sort(less func(a, b Metric) bool) MetricSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es MetricSlice) MoveAndAppendTo(dest MetricSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es MetricSlice) RemoveIf(f func(Metric) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -550,11 +558,11 @@ func (es MetricSlice) RemoveIf(f func(Metric) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // Metric represents one metric as a collection of datapoints. @@ -565,12 +573,15 @@ func (es MetricSlice) RemoveIf(f func(Metric) bool) { // // Must use NewMetric function to create new instances. // Important: zero-initialized instance is not valid for use. -type Metric struct { - orig *otlpmetrics.Metric -} + +type Metric internal.Metric func newMetric(orig *otlpmetrics.Metric) Metric { - return Metric{orig: orig} + return Metric(internal.NewMetric(orig)) +} + +func (ms Metric) getOrig() *otlpmetrics.Metric { + return internal.GetOrigMetric(internal.Metric(ms)) } // NewMetric creates a new empty Metric. @@ -584,44 +595,44 @@ func NewMetric() Metric { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Metric) MoveTo(dest Metric) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.Metric{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.Metric{} } // Name returns the name associated with this Metric. func (ms Metric) Name() string { - return ms.orig.Name + return ms.getOrig().Name } // SetName replaces the name associated with this Metric. func (ms Metric) SetName(v string) { - ms.orig.Name = v + ms.getOrig().Name = v } // Description returns the description associated with this Metric. func (ms Metric) Description() string { - return ms.orig.Description + return ms.getOrig().Description } // SetDescription replaces the description associated with this Metric. func (ms Metric) SetDescription(v string) { - ms.orig.Description = v + ms.getOrig().Description = v } // Unit returns the unit associated with this Metric. func (ms Metric) Unit() string { - return ms.orig.Unit + return ms.getOrig().Unit } // SetUnit replaces the unit associated with this Metric. func (ms Metric) SetUnit(v string) { - ms.orig.Unit = v + ms.getOrig().Unit = v } // DataType returns the type of the data for this Metric. // Calling this function on zero-initialized Metric will cause a panic. func (ms Metric) DataType() MetricDataType { - switch ms.orig.Data.(type) { + switch ms.getOrig().Data.(type) { case *otlpmetrics.Metric_Gauge: return MetricDataTypeGauge case *otlpmetrics.Metric_Sum: @@ -643,7 +654,7 @@ func (ms Metric) DataType() MetricDataType { // // Calling this function on zero-initialized Metric will cause a panic. func (ms Metric) Gauge() Gauge { - v, ok := ms.orig.GetData().(*otlpmetrics.Metric_Gauge) + v, ok := ms.getOrig().GetData().(*otlpmetrics.Metric_Gauge) if !ok { return Gauge{} } @@ -657,7 +668,7 @@ func (ms Metric) Gauge() Gauge { // // Calling this function on zero-initialized Metric will cause a panic. func (ms Metric) Sum() Sum { - v, ok := ms.orig.GetData().(*otlpmetrics.Metric_Sum) + v, ok := ms.getOrig().GetData().(*otlpmetrics.Metric_Sum) if !ok { return Sum{} } @@ -671,7 +682,7 @@ func (ms Metric) Sum() Sum { // // Calling this function on zero-initialized Metric will cause a panic. func (ms Metric) Histogram() Histogram { - v, ok := ms.orig.GetData().(*otlpmetrics.Metric_Histogram) + v, ok := ms.getOrig().GetData().(*otlpmetrics.Metric_Histogram) if !ok { return Histogram{} } @@ -685,7 +696,7 @@ func (ms Metric) Histogram() Histogram { // // Calling this function on zero-initialized Metric will cause a panic. func (ms Metric) ExponentialHistogram() ExponentialHistogram { - v, ok := ms.orig.GetData().(*otlpmetrics.Metric_ExponentialHistogram) + v, ok := ms.getOrig().GetData().(*otlpmetrics.Metric_ExponentialHistogram) if !ok { return ExponentialHistogram{} } @@ -699,7 +710,7 @@ func (ms Metric) ExponentialHistogram() ExponentialHistogram { // // Calling this function on zero-initialized Metric will cause a panic. func (ms Metric) Summary() Summary { - v, ok := ms.orig.GetData().(*otlpmetrics.Metric_Summary) + v, ok := ms.getOrig().GetData().(*otlpmetrics.Metric_Summary) if !ok { return Summary{} } @@ -738,12 +749,15 @@ func (ms Metric) CopyTo(dest Metric) { // // Must use NewGauge function to create new instances. // Important: zero-initialized instance is not valid for use. -type Gauge struct { - orig *otlpmetrics.Gauge -} + +type Gauge internal.Gauge func newGauge(orig *otlpmetrics.Gauge) Gauge { - return Gauge{orig: orig} + return Gauge(internal.NewGauge(orig)) +} + +func (ms Gauge) getOrig() *otlpmetrics.Gauge { + return internal.GetOrigGauge(internal.Gauge(ms)) } // NewGauge creates a new empty Gauge. @@ -757,13 +771,13 @@ func NewGauge() Gauge { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Gauge) MoveTo(dest Gauge) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.Gauge{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.Gauge{} } // DataPoints returns the DataPoints associated with this Gauge. func (ms Gauge) DataPoints() NumberDataPointSlice { - return newNumberDataPointSlice(&ms.orig.DataPoints) + return NumberDataPointSlice(internal.NewNumberDataPointSlice(&ms.getOrig().DataPoints)) } // CopyTo copies all properties from the current struct to the dest. @@ -778,12 +792,15 @@ func (ms Gauge) CopyTo(dest Gauge) { // // Must use NewSum function to create new instances. // Important: zero-initialized instance is not valid for use. -type Sum struct { - orig *otlpmetrics.Sum -} + +type Sum internal.Sum func newSum(orig *otlpmetrics.Sum) Sum { - return Sum{orig: orig} + return Sum(internal.NewSum(orig)) +} + +func (ms Sum) getOrig() *otlpmetrics.Sum { + return internal.GetOrigSum(internal.Sum(ms)) } // NewSum creates a new empty Sum. @@ -797,33 +814,33 @@ func NewSum() Sum { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Sum) MoveTo(dest Sum) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.Sum{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.Sum{} } // AggregationTemporality returns the aggregationtemporality associated with this Sum. func (ms Sum) AggregationTemporality() MetricAggregationTemporality { - return MetricAggregationTemporality(ms.orig.AggregationTemporality) + return MetricAggregationTemporality(ms.getOrig().AggregationTemporality) } // SetAggregationTemporality replaces the aggregationtemporality associated with this Sum. func (ms Sum) SetAggregationTemporality(v MetricAggregationTemporality) { - ms.orig.AggregationTemporality = otlpmetrics.AggregationTemporality(v) + ms.getOrig().AggregationTemporality = otlpmetrics.AggregationTemporality(v) } // IsMonotonic returns the ismonotonic associated with this Sum. func (ms Sum) IsMonotonic() bool { - return ms.orig.IsMonotonic + return ms.getOrig().IsMonotonic } // SetIsMonotonic replaces the ismonotonic associated with this Sum. func (ms Sum) SetIsMonotonic(v bool) { - ms.orig.IsMonotonic = v + ms.getOrig().IsMonotonic = v } // DataPoints returns the DataPoints associated with this Sum. func (ms Sum) DataPoints() NumberDataPointSlice { - return newNumberDataPointSlice(&ms.orig.DataPoints) + return NumberDataPointSlice(internal.NewNumberDataPointSlice(&ms.getOrig().DataPoints)) } // CopyTo copies all properties from the current struct to the dest. @@ -840,12 +857,15 @@ func (ms Sum) CopyTo(dest Sum) { // // Must use NewHistogram function to create new instances. // Important: zero-initialized instance is not valid for use. -type Histogram struct { - orig *otlpmetrics.Histogram -} + +type Histogram internal.Histogram func newHistogram(orig *otlpmetrics.Histogram) Histogram { - return Histogram{orig: orig} + return Histogram(internal.NewHistogram(orig)) +} + +func (ms Histogram) getOrig() *otlpmetrics.Histogram { + return internal.GetOrigHistogram(internal.Histogram(ms)) } // NewHistogram creates a new empty Histogram. @@ -859,23 +879,23 @@ func NewHistogram() Histogram { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Histogram) MoveTo(dest Histogram) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.Histogram{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.Histogram{} } // AggregationTemporality returns the aggregationtemporality associated with this Histogram. func (ms Histogram) AggregationTemporality() MetricAggregationTemporality { - return MetricAggregationTemporality(ms.orig.AggregationTemporality) + return MetricAggregationTemporality(ms.getOrig().AggregationTemporality) } // SetAggregationTemporality replaces the aggregationtemporality associated with this Histogram. func (ms Histogram) SetAggregationTemporality(v MetricAggregationTemporality) { - ms.orig.AggregationTemporality = otlpmetrics.AggregationTemporality(v) + ms.getOrig().AggregationTemporality = otlpmetrics.AggregationTemporality(v) } // DataPoints returns the DataPoints associated with this Histogram. func (ms Histogram) DataPoints() HistogramDataPointSlice { - return newHistogramDataPointSlice(&ms.orig.DataPoints) + return HistogramDataPointSlice(internal.NewHistogramDataPointSlice(&ms.getOrig().DataPoints)) } // CopyTo copies all properties from the current struct to the dest. @@ -892,12 +912,15 @@ func (ms Histogram) CopyTo(dest Histogram) { // // Must use NewExponentialHistogram function to create new instances. // Important: zero-initialized instance is not valid for use. -type ExponentialHistogram struct { - orig *otlpmetrics.ExponentialHistogram -} + +type ExponentialHistogram internal.ExponentialHistogram func newExponentialHistogram(orig *otlpmetrics.ExponentialHistogram) ExponentialHistogram { - return ExponentialHistogram{orig: orig} + return ExponentialHistogram(internal.NewExponentialHistogram(orig)) +} + +func (ms ExponentialHistogram) getOrig() *otlpmetrics.ExponentialHistogram { + return internal.GetOrigExponentialHistogram(internal.ExponentialHistogram(ms)) } // NewExponentialHistogram creates a new empty ExponentialHistogram. @@ -911,23 +934,23 @@ func NewExponentialHistogram() ExponentialHistogram { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ExponentialHistogram) MoveTo(dest ExponentialHistogram) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.ExponentialHistogram{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.ExponentialHistogram{} } // AggregationTemporality returns the aggregationtemporality associated with this ExponentialHistogram. func (ms ExponentialHistogram) AggregationTemporality() MetricAggregationTemporality { - return MetricAggregationTemporality(ms.orig.AggregationTemporality) + return MetricAggregationTemporality(ms.getOrig().AggregationTemporality) } // SetAggregationTemporality replaces the aggregationtemporality associated with this ExponentialHistogram. func (ms ExponentialHistogram) SetAggregationTemporality(v MetricAggregationTemporality) { - ms.orig.AggregationTemporality = otlpmetrics.AggregationTemporality(v) + ms.getOrig().AggregationTemporality = otlpmetrics.AggregationTemporality(v) } // DataPoints returns the DataPoints associated with this ExponentialHistogram. func (ms ExponentialHistogram) DataPoints() ExponentialHistogramDataPointSlice { - return newExponentialHistogramDataPointSlice(&ms.orig.DataPoints) + return ExponentialHistogramDataPointSlice(internal.NewExponentialHistogramDataPointSlice(&ms.getOrig().DataPoints)) } // CopyTo copies all properties from the current struct to the dest. @@ -943,12 +966,15 @@ func (ms ExponentialHistogram) CopyTo(dest ExponentialHistogram) { // // Must use NewSummary function to create new instances. // Important: zero-initialized instance is not valid for use. -type Summary struct { - orig *otlpmetrics.Summary -} + +type Summary internal.Summary func newSummary(orig *otlpmetrics.Summary) Summary { - return Summary{orig: orig} + return Summary(internal.NewSummary(orig)) +} + +func (ms Summary) getOrig() *otlpmetrics.Summary { + return internal.GetOrigSummary(internal.Summary(ms)) } // NewSummary creates a new empty Summary. @@ -962,13 +988,13 @@ func NewSummary() Summary { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Summary) MoveTo(dest Summary) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.Summary{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.Summary{} } // DataPoints returns the DataPoints associated with this Summary. func (ms Summary) DataPoints() SummaryDataPointSlice { - return newSummaryDataPointSlice(&ms.orig.DataPoints) + return SummaryDataPointSlice(internal.NewSummaryDataPointSlice(&ms.getOrig().DataPoints)) } // CopyTo copies all properties from the current struct to the dest. @@ -983,28 +1009,28 @@ func (ms Summary) CopyTo(dest Summary) { // // Must use NewNumberDataPointSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type NumberDataPointSlice struct { - // orig points to the slice otlpmetrics.NumberDataPoint field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.NumberDataPoint -} +type NumberDataPointSlice internal.NumberDataPointSlice func newNumberDataPointSlice(orig *[]*otlpmetrics.NumberDataPoint) NumberDataPointSlice { - return NumberDataPointSlice{orig} + return NumberDataPointSlice(internal.NewNumberDataPointSlice(orig)) +} + +func (ms NumberDataPointSlice) getOrig() *[]*otlpmetrics.NumberDataPoint { + return internal.GetOrigNumberDataPointSlice(internal.NumberDataPointSlice(ms)) } // NewNumberDataPointSlice creates a NumberDataPointSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewNumberDataPointSlice() NumberDataPointSlice { orig := []*otlpmetrics.NumberDataPoint(nil) - return NumberDataPointSlice{&orig} + return newNumberDataPointSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewNumberDataPointSlice()". func (es NumberDataPointSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -1016,27 +1042,27 @@ func (es NumberDataPointSlice) Len() int { // ... // Do something with the element // } func (es NumberDataPointSlice) At(ix int) NumberDataPoint { - return newNumberDataPoint((*es.orig)[ix]) + return newNumberDataPoint((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es NumberDataPointSlice) CopyTo(dest NumberDataPointSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newNumberDataPoint((*es.orig)[i]).CopyTo(newNumberDataPoint((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newNumberDataPoint((*es.getOrig())[i]).CopyTo(newNumberDataPoint((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.NumberDataPoint, srcLen) wrappers := make([]*otlpmetrics.NumberDataPoint, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newNumberDataPoint((*es.orig)[i]).CopyTo(newNumberDataPoint(wrappers[i])) + newNumberDataPoint((*es.getOrig())[i]).CopyTo(newNumberDataPoint(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -1052,20 +1078,20 @@ func (es NumberDataPointSlice) CopyTo(dest NumberDataPointSlice) { // // Here should set all the values for e. // } func (es NumberDataPointSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.NumberDataPoint, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.NumberDataPoint, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty NumberDataPoint. // It returns the newly added NumberDataPoint. func (es NumberDataPointSlice) AppendEmpty() NumberDataPoint { - *es.orig = append(*es.orig, &otlpmetrics.NumberDataPoint{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.NumberDataPoint{}) return es.At(es.Len() - 1) } @@ -1078,29 +1104,29 @@ func (es NumberDataPointSlice) AppendEmpty() NumberDataPoint { // lessFunc := func(a, b NumberDataPoint) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es NumberDataPointSlice) Sort(less func(a, b NumberDataPoint) bool) NumberDataPointSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es NumberDataPointSlice) MoveAndAppendTo(dest NumberDataPointSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es NumberDataPointSlice) RemoveIf(f func(NumberDataPoint) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -1109,11 +1135,11 @@ func (es NumberDataPointSlice) RemoveIf(f func(NumberDataPoint) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // NumberDataPoint is a single data point in a timeseries that describes the time-varying value of a number metric. @@ -1123,12 +1149,15 @@ func (es NumberDataPointSlice) RemoveIf(f func(NumberDataPoint) bool) { // // Must use NewNumberDataPoint function to create new instances. // Important: zero-initialized instance is not valid for use. -type NumberDataPoint struct { - orig *otlpmetrics.NumberDataPoint -} + +type NumberDataPoint internal.NumberDataPoint func newNumberDataPoint(orig *otlpmetrics.NumberDataPoint) NumberDataPoint { - return NumberDataPoint{orig: orig} + return NumberDataPoint(internal.NewNumberDataPoint(orig)) +} + +func (ms NumberDataPoint) getOrig() *otlpmetrics.NumberDataPoint { + return internal.GetOrigNumberDataPoint(internal.NumberDataPoint(ms)) } // NewNumberDataPoint creates a new empty NumberDataPoint. @@ -1142,39 +1171,39 @@ func NewNumberDataPoint() NumberDataPoint { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms NumberDataPoint) MoveTo(dest NumberDataPoint) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.NumberDataPoint{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.NumberDataPoint{} } // Attributes returns the Attributes associated with this NumberDataPoint. -func (ms NumberDataPoint) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms NumberDataPoint) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // StartTimestamp returns the starttimestamp associated with this NumberDataPoint. -func (ms NumberDataPoint) StartTimestamp() Timestamp { - return Timestamp(ms.orig.StartTimeUnixNano) +func (ms NumberDataPoint) StartTimestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().StartTimeUnixNano) } // SetStartTimestamp replaces the starttimestamp associated with this NumberDataPoint. -func (ms NumberDataPoint) SetStartTimestamp(v Timestamp) { - ms.orig.StartTimeUnixNano = uint64(v) +func (ms NumberDataPoint) SetStartTimestamp(v pcommon.Timestamp) { + ms.getOrig().StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this NumberDataPoint. -func (ms NumberDataPoint) Timestamp() Timestamp { - return Timestamp(ms.orig.TimeUnixNano) +func (ms NumberDataPoint) Timestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this NumberDataPoint. -func (ms NumberDataPoint) SetTimestamp(v Timestamp) { - ms.orig.TimeUnixNano = uint64(v) +func (ms NumberDataPoint) SetTimestamp(v pcommon.Timestamp) { + ms.getOrig().TimeUnixNano = uint64(v) } // ValueType returns the type of the value for this NumberDataPoint. // Calling this function on zero-initialized NumberDataPoint will cause a panic. func (ms NumberDataPoint) ValueType() NumberDataPointValueType { - switch ms.orig.Value.(type) { + switch ms.getOrig().Value.(type) { case *otlpmetrics.NumberDataPoint_AsDouble: return NumberDataPointValueTypeDouble case *otlpmetrics.NumberDataPoint_AsInt: @@ -1185,36 +1214,36 @@ func (ms NumberDataPoint) ValueType() NumberDataPointValueType { // DoubleVal returns the doubleval associated with this NumberDataPoint. func (ms NumberDataPoint) DoubleVal() float64 { - return ms.orig.GetAsDouble() + return ms.getOrig().GetAsDouble() } // SetDoubleVal replaces the doubleval associated with this NumberDataPoint. func (ms NumberDataPoint) SetDoubleVal(v float64) { - ms.orig.Value = &otlpmetrics.NumberDataPoint_AsDouble{ + ms.getOrig().Value = &otlpmetrics.NumberDataPoint_AsDouble{ AsDouble: v, } } // IntVal returns the intval associated with this NumberDataPoint. func (ms NumberDataPoint) IntVal() int64 { - return ms.orig.GetAsInt() + return ms.getOrig().GetAsInt() } // SetIntVal replaces the intval associated with this NumberDataPoint. func (ms NumberDataPoint) SetIntVal(v int64) { - ms.orig.Value = &otlpmetrics.NumberDataPoint_AsInt{ + ms.getOrig().Value = &otlpmetrics.NumberDataPoint_AsInt{ AsInt: v, } } // Exemplars returns the Exemplars associated with this NumberDataPoint. func (ms NumberDataPoint) Exemplars() ExemplarSlice { - return newExemplarSlice(&ms.orig.Exemplars) + return ExemplarSlice(internal.NewExemplarSlice(&ms.getOrig().Exemplars)) } // Flags returns the flags associated with this NumberDataPoint. func (ms NumberDataPoint) Flags() MetricDataPointFlags { - return newMetricDataPointFlags(&ms.orig.Flags) + return MetricDataPointFlags(internal.NewMetricDataPointFlags(&ms.getOrig().Flags)) } // CopyTo copies all properties from the current struct to the dest. @@ -1240,28 +1269,28 @@ func (ms NumberDataPoint) CopyTo(dest NumberDataPoint) { // // Must use NewHistogramDataPointSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type HistogramDataPointSlice struct { - // orig points to the slice otlpmetrics.HistogramDataPoint field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.HistogramDataPoint -} +type HistogramDataPointSlice internal.HistogramDataPointSlice func newHistogramDataPointSlice(orig *[]*otlpmetrics.HistogramDataPoint) HistogramDataPointSlice { - return HistogramDataPointSlice{orig} + return HistogramDataPointSlice(internal.NewHistogramDataPointSlice(orig)) +} + +func (ms HistogramDataPointSlice) getOrig() *[]*otlpmetrics.HistogramDataPoint { + return internal.GetOrigHistogramDataPointSlice(internal.HistogramDataPointSlice(ms)) } // NewHistogramDataPointSlice creates a HistogramDataPointSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewHistogramDataPointSlice() HistogramDataPointSlice { orig := []*otlpmetrics.HistogramDataPoint(nil) - return HistogramDataPointSlice{&orig} + return newHistogramDataPointSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewHistogramDataPointSlice()". func (es HistogramDataPointSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -1273,27 +1302,27 @@ func (es HistogramDataPointSlice) Len() int { // ... // Do something with the element // } func (es HistogramDataPointSlice) At(ix int) HistogramDataPoint { - return newHistogramDataPoint((*es.orig)[ix]) + return newHistogramDataPoint((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es HistogramDataPointSlice) CopyTo(dest HistogramDataPointSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newHistogramDataPoint((*es.orig)[i]).CopyTo(newHistogramDataPoint((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newHistogramDataPoint((*es.getOrig())[i]).CopyTo(newHistogramDataPoint((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.HistogramDataPoint, srcLen) wrappers := make([]*otlpmetrics.HistogramDataPoint, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newHistogramDataPoint((*es.orig)[i]).CopyTo(newHistogramDataPoint(wrappers[i])) + newHistogramDataPoint((*es.getOrig())[i]).CopyTo(newHistogramDataPoint(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -1309,20 +1338,20 @@ func (es HistogramDataPointSlice) CopyTo(dest HistogramDataPointSlice) { // // Here should set all the values for e. // } func (es HistogramDataPointSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.HistogramDataPoint, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.HistogramDataPoint, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty HistogramDataPoint. // It returns the newly added HistogramDataPoint. func (es HistogramDataPointSlice) AppendEmpty() HistogramDataPoint { - *es.orig = append(*es.orig, &otlpmetrics.HistogramDataPoint{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.HistogramDataPoint{}) return es.At(es.Len() - 1) } @@ -1335,29 +1364,29 @@ func (es HistogramDataPointSlice) AppendEmpty() HistogramDataPoint { // lessFunc := func(a, b HistogramDataPoint) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es HistogramDataPointSlice) Sort(less func(a, b HistogramDataPoint) bool) HistogramDataPointSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es HistogramDataPointSlice) MoveAndAppendTo(dest HistogramDataPointSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es HistogramDataPointSlice) RemoveIf(f func(HistogramDataPoint) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -1366,11 +1395,11 @@ func (es HistogramDataPointSlice) RemoveIf(f func(HistogramDataPoint) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // HistogramDataPoint is a single data point in a timeseries that describes the time-varying values of a Histogram of values. @@ -1380,12 +1409,15 @@ func (es HistogramDataPointSlice) RemoveIf(f func(HistogramDataPoint) bool) { // // Must use NewHistogramDataPoint function to create new instances. // Important: zero-initialized instance is not valid for use. -type HistogramDataPoint struct { - orig *otlpmetrics.HistogramDataPoint -} + +type HistogramDataPoint internal.HistogramDataPoint func newHistogramDataPoint(orig *otlpmetrics.HistogramDataPoint) HistogramDataPoint { - return HistogramDataPoint{orig: orig} + return HistogramDataPoint(internal.NewHistogramDataPoint(orig)) +} + +func (ms HistogramDataPoint) getOrig() *otlpmetrics.HistogramDataPoint { + return internal.GetOrigHistogramDataPoint(internal.HistogramDataPoint(ms)) } // NewHistogramDataPoint creates a new empty HistogramDataPoint. @@ -1399,121 +1431,121 @@ func NewHistogramDataPoint() HistogramDataPoint { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms HistogramDataPoint) MoveTo(dest HistogramDataPoint) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.HistogramDataPoint{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.HistogramDataPoint{} } // Attributes returns the Attributes associated with this HistogramDataPoint. -func (ms HistogramDataPoint) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms HistogramDataPoint) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // StartTimestamp returns the starttimestamp associated with this HistogramDataPoint. -func (ms HistogramDataPoint) StartTimestamp() Timestamp { - return Timestamp(ms.orig.StartTimeUnixNano) +func (ms HistogramDataPoint) StartTimestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().StartTimeUnixNano) } // SetStartTimestamp replaces the starttimestamp associated with this HistogramDataPoint. -func (ms HistogramDataPoint) SetStartTimestamp(v Timestamp) { - ms.orig.StartTimeUnixNano = uint64(v) +func (ms HistogramDataPoint) SetStartTimestamp(v pcommon.Timestamp) { + ms.getOrig().StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this HistogramDataPoint. -func (ms HistogramDataPoint) Timestamp() Timestamp { - return Timestamp(ms.orig.TimeUnixNano) +func (ms HistogramDataPoint) Timestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this HistogramDataPoint. -func (ms HistogramDataPoint) SetTimestamp(v Timestamp) { - ms.orig.TimeUnixNano = uint64(v) +func (ms HistogramDataPoint) SetTimestamp(v pcommon.Timestamp) { + ms.getOrig().TimeUnixNano = uint64(v) } // Count returns the count associated with this HistogramDataPoint. func (ms HistogramDataPoint) Count() uint64 { - return ms.orig.Count + return ms.getOrig().Count } // SetCount replaces the count associated with this HistogramDataPoint. func (ms HistogramDataPoint) SetCount(v uint64) { - ms.orig.Count = v + ms.getOrig().Count = v } // Sum returns the sum associated with this HistogramDataPoint. func (ms HistogramDataPoint) Sum() float64 { - return ms.orig.GetSum() + return ms.getOrig().GetSum() } // HasSum returns true if the HistogramDataPoint contains a // Sum value, false otherwise. func (ms HistogramDataPoint) HasSum() bool { - return ms.orig.Sum_ != nil + return ms.getOrig().Sum_ != nil } // SetSum replaces the sum associated with this HistogramDataPoint. func (ms HistogramDataPoint) SetSum(v float64) { - ms.orig.Sum_ = &otlpmetrics.HistogramDataPoint_Sum{Sum: v} + ms.getOrig().Sum_ = &otlpmetrics.HistogramDataPoint_Sum{Sum: v} } // BucketCounts returns the bucketcounts associated with this HistogramDataPoint. -func (ms HistogramDataPoint) BucketCounts() ImmutableUInt64Slice { - return ImmutableUInt64Slice{value: ms.orig.BucketCounts} +func (ms HistogramDataPoint) BucketCounts() pcommon.ImmutableUInt64Slice { + return pcommon.ImmutableUInt64Slice(internal.NewImmutableUInt64Slice(ms.getOrig().BucketCounts)) } // SetBucketCounts replaces the bucketcounts associated with this HistogramDataPoint. -func (ms HistogramDataPoint) SetBucketCounts(v ImmutableUInt64Slice) { - ms.orig.BucketCounts = v.value +func (ms HistogramDataPoint) SetBucketCounts(v pcommon.ImmutableUInt64Slice) { + ms.getOrig().BucketCounts = internal.GetOrigImmutableUInt64Slice(internal.ImmutableUInt64Slice(v)) } // ExplicitBounds returns the explicitbounds associated with this HistogramDataPoint. -func (ms HistogramDataPoint) ExplicitBounds() ImmutableFloat64Slice { - return ImmutableFloat64Slice{value: ms.orig.ExplicitBounds} +func (ms HistogramDataPoint) ExplicitBounds() pcommon.ImmutableFloat64Slice { + return pcommon.ImmutableFloat64Slice(internal.NewImmutableFloat64Slice(ms.getOrig().ExplicitBounds)) } // SetExplicitBounds replaces the explicitbounds associated with this HistogramDataPoint. -func (ms HistogramDataPoint) SetExplicitBounds(v ImmutableFloat64Slice) { - ms.orig.ExplicitBounds = v.value +func (ms HistogramDataPoint) SetExplicitBounds(v pcommon.ImmutableFloat64Slice) { + ms.getOrig().ExplicitBounds = internal.GetOrigImmutableFloat64Slice(internal.ImmutableFloat64Slice(v)) } // Exemplars returns the Exemplars associated with this HistogramDataPoint. func (ms HistogramDataPoint) Exemplars() ExemplarSlice { - return newExemplarSlice(&ms.orig.Exemplars) + return ExemplarSlice(internal.NewExemplarSlice(&ms.getOrig().Exemplars)) } // Flags returns the flags associated with this HistogramDataPoint. func (ms HistogramDataPoint) Flags() MetricDataPointFlags { - return newMetricDataPointFlags(&ms.orig.Flags) + return MetricDataPointFlags(internal.NewMetricDataPointFlags(&ms.getOrig().Flags)) } // Min returns the min associated with this HistogramDataPoint. func (ms HistogramDataPoint) Min() float64 { - return ms.orig.GetMin() + return ms.getOrig().GetMin() } // HasMin returns true if the HistogramDataPoint contains a // Min value, false otherwise. func (ms HistogramDataPoint) HasMin() bool { - return ms.orig.Min_ != nil + return ms.getOrig().Min_ != nil } // SetMin replaces the min associated with this HistogramDataPoint. func (ms HistogramDataPoint) SetMin(v float64) { - ms.orig.Min_ = &otlpmetrics.HistogramDataPoint_Min{Min: v} + ms.getOrig().Min_ = &otlpmetrics.HistogramDataPoint_Min{Min: v} } // Max returns the max associated with this HistogramDataPoint. func (ms HistogramDataPoint) Max() float64 { - return ms.orig.GetMax() + return ms.getOrig().GetMax() } // HasMax returns true if the HistogramDataPoint contains a // Max value, false otherwise. func (ms HistogramDataPoint) HasMax() bool { - return ms.orig.Max_ != nil + return ms.getOrig().Max_ != nil } // SetMax replaces the max associated with this HistogramDataPoint. func (ms HistogramDataPoint) SetMax(v float64) { - ms.orig.Max_ = &otlpmetrics.HistogramDataPoint_Max{Max: v} + ms.getOrig().Max_ = &otlpmetrics.HistogramDataPoint_Max{Max: v} } // CopyTo copies all properties from the current struct to the dest. @@ -1526,18 +1558,18 @@ func (ms HistogramDataPoint) CopyTo(dest HistogramDataPoint) { dest.SetSum(ms.Sum()) } - if len(ms.orig.BucketCounts) == 0 { - dest.orig.BucketCounts = nil + if len(ms.getOrig().BucketCounts) == 0 { + dest.getOrig().BucketCounts = nil } else { - dest.orig.BucketCounts = make([]uint64, len(ms.orig.BucketCounts)) - copy(dest.orig.BucketCounts, ms.orig.BucketCounts) + dest.getOrig().BucketCounts = make([]uint64, len(ms.getOrig().BucketCounts)) + copy(dest.getOrig().BucketCounts, ms.getOrig().BucketCounts) } - if len(ms.orig.ExplicitBounds) == 0 { - dest.orig.ExplicitBounds = nil + if len(ms.getOrig().ExplicitBounds) == 0 { + dest.getOrig().ExplicitBounds = nil } else { - dest.orig.ExplicitBounds = make([]float64, len(ms.orig.ExplicitBounds)) - copy(dest.orig.ExplicitBounds, ms.orig.ExplicitBounds) + dest.getOrig().ExplicitBounds = make([]float64, len(ms.getOrig().ExplicitBounds)) + copy(dest.getOrig().ExplicitBounds, ms.getOrig().ExplicitBounds) } ms.Exemplars().CopyTo(dest.Exemplars()) @@ -1559,28 +1591,28 @@ func (ms HistogramDataPoint) CopyTo(dest HistogramDataPoint) { // // Must use NewExponentialHistogramDataPointSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ExponentialHistogramDataPointSlice struct { - // orig points to the slice otlpmetrics.ExponentialHistogramDataPoint field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.ExponentialHistogramDataPoint -} +type ExponentialHistogramDataPointSlice internal.ExponentialHistogramDataPointSlice func newExponentialHistogramDataPointSlice(orig *[]*otlpmetrics.ExponentialHistogramDataPoint) ExponentialHistogramDataPointSlice { - return ExponentialHistogramDataPointSlice{orig} + return ExponentialHistogramDataPointSlice(internal.NewExponentialHistogramDataPointSlice(orig)) +} + +func (ms ExponentialHistogramDataPointSlice) getOrig() *[]*otlpmetrics.ExponentialHistogramDataPoint { + return internal.GetOrigExponentialHistogramDataPointSlice(internal.ExponentialHistogramDataPointSlice(ms)) } // NewExponentialHistogramDataPointSlice creates a ExponentialHistogramDataPointSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewExponentialHistogramDataPointSlice() ExponentialHistogramDataPointSlice { orig := []*otlpmetrics.ExponentialHistogramDataPoint(nil) - return ExponentialHistogramDataPointSlice{&orig} + return newExponentialHistogramDataPointSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewExponentialHistogramDataPointSlice()". func (es ExponentialHistogramDataPointSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -1592,27 +1624,27 @@ func (es ExponentialHistogramDataPointSlice) Len() int { // ... // Do something with the element // } func (es ExponentialHistogramDataPointSlice) At(ix int) ExponentialHistogramDataPoint { - return newExponentialHistogramDataPoint((*es.orig)[ix]) + return newExponentialHistogramDataPoint((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ExponentialHistogramDataPointSlice) CopyTo(dest ExponentialHistogramDataPointSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newExponentialHistogramDataPoint((*es.orig)[i]).CopyTo(newExponentialHistogramDataPoint((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newExponentialHistogramDataPoint((*es.getOrig())[i]).CopyTo(newExponentialHistogramDataPoint((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.ExponentialHistogramDataPoint, srcLen) wrappers := make([]*otlpmetrics.ExponentialHistogramDataPoint, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newExponentialHistogramDataPoint((*es.orig)[i]).CopyTo(newExponentialHistogramDataPoint(wrappers[i])) + newExponentialHistogramDataPoint((*es.getOrig())[i]).CopyTo(newExponentialHistogramDataPoint(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -1628,20 +1660,20 @@ func (es ExponentialHistogramDataPointSlice) CopyTo(dest ExponentialHistogramDat // // Here should set all the values for e. // } func (es ExponentialHistogramDataPointSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.ExponentialHistogramDataPoint, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.ExponentialHistogramDataPoint, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ExponentialHistogramDataPoint. // It returns the newly added ExponentialHistogramDataPoint. func (es ExponentialHistogramDataPointSlice) AppendEmpty() ExponentialHistogramDataPoint { - *es.orig = append(*es.orig, &otlpmetrics.ExponentialHistogramDataPoint{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.ExponentialHistogramDataPoint{}) return es.At(es.Len() - 1) } @@ -1654,29 +1686,29 @@ func (es ExponentialHistogramDataPointSlice) AppendEmpty() ExponentialHistogramD // lessFunc := func(a, b ExponentialHistogramDataPoint) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ExponentialHistogramDataPointSlice) Sort(less func(a, b ExponentialHistogramDataPoint) bool) ExponentialHistogramDataPointSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ExponentialHistogramDataPointSlice) MoveAndAppendTo(dest ExponentialHistogramDataPointSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ExponentialHistogramDataPointSlice) RemoveIf(f func(ExponentialHistogramDataPoint) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -1685,11 +1717,11 @@ func (es ExponentialHistogramDataPointSlice) RemoveIf(f func(ExponentialHistogra newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ExponentialHistogramDataPoint is a single data point in a timeseries that describes the @@ -1702,12 +1734,15 @@ func (es ExponentialHistogramDataPointSlice) RemoveIf(f func(ExponentialHistogra // // Must use NewExponentialHistogramDataPoint function to create new instances. // Important: zero-initialized instance is not valid for use. -type ExponentialHistogramDataPoint struct { - orig *otlpmetrics.ExponentialHistogramDataPoint -} + +type ExponentialHistogramDataPoint internal.ExponentialHistogramDataPoint func newExponentialHistogramDataPoint(orig *otlpmetrics.ExponentialHistogramDataPoint) ExponentialHistogramDataPoint { - return ExponentialHistogramDataPoint{orig: orig} + return ExponentialHistogramDataPoint(internal.NewExponentialHistogramDataPoint(orig)) +} + +func (ms ExponentialHistogramDataPoint) getOrig() *otlpmetrics.ExponentialHistogramDataPoint { + return internal.GetOrigExponentialHistogramDataPoint(internal.ExponentialHistogramDataPoint(ms)) } // NewExponentialHistogramDataPoint creates a new empty ExponentialHistogramDataPoint. @@ -1721,131 +1756,131 @@ func NewExponentialHistogramDataPoint() ExponentialHistogramDataPoint { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ExponentialHistogramDataPoint) MoveTo(dest ExponentialHistogramDataPoint) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.ExponentialHistogramDataPoint{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.ExponentialHistogramDataPoint{} } // Attributes returns the Attributes associated with this ExponentialHistogramDataPoint. -func (ms ExponentialHistogramDataPoint) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms ExponentialHistogramDataPoint) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // StartTimestamp returns the starttimestamp associated with this ExponentialHistogramDataPoint. -func (ms ExponentialHistogramDataPoint) StartTimestamp() Timestamp { - return Timestamp(ms.orig.StartTimeUnixNano) +func (ms ExponentialHistogramDataPoint) StartTimestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().StartTimeUnixNano) } // SetStartTimestamp replaces the starttimestamp associated with this ExponentialHistogramDataPoint. -func (ms ExponentialHistogramDataPoint) SetStartTimestamp(v Timestamp) { - ms.orig.StartTimeUnixNano = uint64(v) +func (ms ExponentialHistogramDataPoint) SetStartTimestamp(v pcommon.Timestamp) { + ms.getOrig().StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this ExponentialHistogramDataPoint. -func (ms ExponentialHistogramDataPoint) Timestamp() Timestamp { - return Timestamp(ms.orig.TimeUnixNano) +func (ms ExponentialHistogramDataPoint) Timestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this ExponentialHistogramDataPoint. -func (ms ExponentialHistogramDataPoint) SetTimestamp(v Timestamp) { - ms.orig.TimeUnixNano = uint64(v) +func (ms ExponentialHistogramDataPoint) SetTimestamp(v pcommon.Timestamp) { + ms.getOrig().TimeUnixNano = uint64(v) } // Count returns the count associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Count() uint64 { - return ms.orig.Count + return ms.getOrig().Count } // SetCount replaces the count associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) SetCount(v uint64) { - ms.orig.Count = v + ms.getOrig().Count = v } // Sum returns the sum associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Sum() float64 { - return ms.orig.GetSum() + return ms.getOrig().GetSum() } // HasSum returns true if the ExponentialHistogramDataPoint contains a // Sum value, false otherwise. func (ms ExponentialHistogramDataPoint) HasSum() bool { - return ms.orig.Sum_ != nil + return ms.getOrig().Sum_ != nil } // SetSum replaces the sum associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) SetSum(v float64) { - ms.orig.Sum_ = &otlpmetrics.ExponentialHistogramDataPoint_Sum{Sum: v} + ms.getOrig().Sum_ = &otlpmetrics.ExponentialHistogramDataPoint_Sum{Sum: v} } // Scale returns the scale associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Scale() int32 { - return int32(ms.orig.Scale) + return ms.getOrig().Scale } // SetScale replaces the scale associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) SetScale(v int32) { - ms.orig.Scale = int32(v) + ms.getOrig().Scale = v } // ZeroCount returns the zerocount associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) ZeroCount() uint64 { - return uint64(ms.orig.ZeroCount) + return ms.getOrig().ZeroCount } // SetZeroCount replaces the zerocount associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) SetZeroCount(v uint64) { - ms.orig.ZeroCount = uint64(v) + ms.getOrig().ZeroCount = v } // Positive returns the positive associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Positive() Buckets { - return newBuckets(&ms.orig.Positive) + return Buckets(internal.NewBuckets(&ms.getOrig().Positive)) } // Negative returns the negative associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Negative() Buckets { - return newBuckets(&ms.orig.Negative) + return Buckets(internal.NewBuckets(&ms.getOrig().Negative)) } // Exemplars returns the Exemplars associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Exemplars() ExemplarSlice { - return newExemplarSlice(&ms.orig.Exemplars) + return ExemplarSlice(internal.NewExemplarSlice(&ms.getOrig().Exemplars)) } // Flags returns the flags associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Flags() MetricDataPointFlags { - return newMetricDataPointFlags(&ms.orig.Flags) + return MetricDataPointFlags(internal.NewMetricDataPointFlags(&ms.getOrig().Flags)) } // Min returns the min associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Min() float64 { - return ms.orig.GetMin() + return ms.getOrig().GetMin() } // HasMin returns true if the ExponentialHistogramDataPoint contains a // Min value, false otherwise. func (ms ExponentialHistogramDataPoint) HasMin() bool { - return ms.orig.Min_ != nil + return ms.getOrig().Min_ != nil } // SetMin replaces the min associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) SetMin(v float64) { - ms.orig.Min_ = &otlpmetrics.ExponentialHistogramDataPoint_Min{Min: v} + ms.getOrig().Min_ = &otlpmetrics.ExponentialHistogramDataPoint_Min{Min: v} } // Max returns the max associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) Max() float64 { - return ms.orig.GetMax() + return ms.getOrig().GetMax() } // HasMax returns true if the ExponentialHistogramDataPoint contains a // Max value, false otherwise. func (ms ExponentialHistogramDataPoint) HasMax() bool { - return ms.orig.Max_ != nil + return ms.getOrig().Max_ != nil } // SetMax replaces the max associated with this ExponentialHistogramDataPoint. func (ms ExponentialHistogramDataPoint) SetMax(v float64) { - ms.orig.Max_ = &otlpmetrics.ExponentialHistogramDataPoint_Max{Max: v} + ms.getOrig().Max_ = &otlpmetrics.ExponentialHistogramDataPoint_Max{Max: v} } // CopyTo copies all properties from the current struct to the dest. @@ -1881,12 +1916,15 @@ func (ms ExponentialHistogramDataPoint) CopyTo(dest ExponentialHistogramDataPoin // // Must use NewBuckets function to create new instances. // Important: zero-initialized instance is not valid for use. -type Buckets struct { - orig *otlpmetrics.ExponentialHistogramDataPoint_Buckets -} + +type Buckets internal.Buckets func newBuckets(orig *otlpmetrics.ExponentialHistogramDataPoint_Buckets) Buckets { - return Buckets{orig: orig} + return Buckets(internal.NewBuckets(orig)) +} + +func (ms Buckets) getOrig() *otlpmetrics.ExponentialHistogramDataPoint_Buckets { + return internal.GetOrigBuckets(internal.Buckets(ms)) } // NewBuckets creates a new empty Buckets. @@ -1900,38 +1938,38 @@ func NewBuckets() Buckets { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Buckets) MoveTo(dest Buckets) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.ExponentialHistogramDataPoint_Buckets{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.ExponentialHistogramDataPoint_Buckets{} } // Offset returns the offset associated with this Buckets. func (ms Buckets) Offset() int32 { - return int32(ms.orig.Offset) + return ms.getOrig().Offset } // SetOffset replaces the offset associated with this Buckets. func (ms Buckets) SetOffset(v int32) { - ms.orig.Offset = int32(v) + ms.getOrig().Offset = v } // BucketCounts returns the bucketcounts associated with this Buckets. -func (ms Buckets) BucketCounts() ImmutableUInt64Slice { - return ImmutableUInt64Slice{value: ms.orig.BucketCounts} +func (ms Buckets) BucketCounts() pcommon.ImmutableUInt64Slice { + return pcommon.ImmutableUInt64Slice(internal.NewImmutableUInt64Slice(ms.getOrig().BucketCounts)) } // SetBucketCounts replaces the bucketcounts associated with this Buckets. -func (ms Buckets) SetBucketCounts(v ImmutableUInt64Slice) { - ms.orig.BucketCounts = v.value +func (ms Buckets) SetBucketCounts(v pcommon.ImmutableUInt64Slice) { + ms.getOrig().BucketCounts = internal.GetOrigImmutableUInt64Slice(internal.ImmutableUInt64Slice(v)) } // CopyTo copies all properties from the current struct to the dest. func (ms Buckets) CopyTo(dest Buckets) { dest.SetOffset(ms.Offset()) - if len(ms.orig.BucketCounts) == 0 { - dest.orig.BucketCounts = nil + if len(ms.getOrig().BucketCounts) == 0 { + dest.getOrig().BucketCounts = nil } else { - dest.orig.BucketCounts = make([]uint64, len(ms.orig.BucketCounts)) - copy(dest.orig.BucketCounts, ms.orig.BucketCounts) + dest.getOrig().BucketCounts = make([]uint64, len(ms.getOrig().BucketCounts)) + copy(dest.getOrig().BucketCounts, ms.getOrig().BucketCounts) } } @@ -1943,28 +1981,28 @@ func (ms Buckets) CopyTo(dest Buckets) { // // Must use NewSummaryDataPointSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type SummaryDataPointSlice struct { - // orig points to the slice otlpmetrics.SummaryDataPoint field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.SummaryDataPoint -} +type SummaryDataPointSlice internal.SummaryDataPointSlice func newSummaryDataPointSlice(orig *[]*otlpmetrics.SummaryDataPoint) SummaryDataPointSlice { - return SummaryDataPointSlice{orig} + return SummaryDataPointSlice(internal.NewSummaryDataPointSlice(orig)) +} + +func (ms SummaryDataPointSlice) getOrig() *[]*otlpmetrics.SummaryDataPoint { + return internal.GetOrigSummaryDataPointSlice(internal.SummaryDataPointSlice(ms)) } // NewSummaryDataPointSlice creates a SummaryDataPointSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewSummaryDataPointSlice() SummaryDataPointSlice { orig := []*otlpmetrics.SummaryDataPoint(nil) - return SummaryDataPointSlice{&orig} + return newSummaryDataPointSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewSummaryDataPointSlice()". func (es SummaryDataPointSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -1976,27 +2014,27 @@ func (es SummaryDataPointSlice) Len() int { // ... // Do something with the element // } func (es SummaryDataPointSlice) At(ix int) SummaryDataPoint { - return newSummaryDataPoint((*es.orig)[ix]) + return newSummaryDataPoint((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es SummaryDataPointSlice) CopyTo(dest SummaryDataPointSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newSummaryDataPoint((*es.orig)[i]).CopyTo(newSummaryDataPoint((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newSummaryDataPoint((*es.getOrig())[i]).CopyTo(newSummaryDataPoint((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.SummaryDataPoint, srcLen) wrappers := make([]*otlpmetrics.SummaryDataPoint, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newSummaryDataPoint((*es.orig)[i]).CopyTo(newSummaryDataPoint(wrappers[i])) + newSummaryDataPoint((*es.getOrig())[i]).CopyTo(newSummaryDataPoint(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -2012,20 +2050,20 @@ func (es SummaryDataPointSlice) CopyTo(dest SummaryDataPointSlice) { // // Here should set all the values for e. // } func (es SummaryDataPointSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.SummaryDataPoint, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.SummaryDataPoint, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty SummaryDataPoint. // It returns the newly added SummaryDataPoint. func (es SummaryDataPointSlice) AppendEmpty() SummaryDataPoint { - *es.orig = append(*es.orig, &otlpmetrics.SummaryDataPoint{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.SummaryDataPoint{}) return es.At(es.Len() - 1) } @@ -2038,29 +2076,29 @@ func (es SummaryDataPointSlice) AppendEmpty() SummaryDataPoint { // lessFunc := func(a, b SummaryDataPoint) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es SummaryDataPointSlice) Sort(less func(a, b SummaryDataPoint) bool) SummaryDataPointSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es SummaryDataPointSlice) MoveAndAppendTo(dest SummaryDataPointSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es SummaryDataPointSlice) RemoveIf(f func(SummaryDataPoint) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -2069,11 +2107,11 @@ func (es SummaryDataPointSlice) RemoveIf(f func(SummaryDataPoint) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // SummaryDataPoint is a single data point in a timeseries that describes the time-varying values of a Summary of double values. @@ -2083,12 +2121,15 @@ func (es SummaryDataPointSlice) RemoveIf(f func(SummaryDataPoint) bool) { // // Must use NewSummaryDataPoint function to create new instances. // Important: zero-initialized instance is not valid for use. -type SummaryDataPoint struct { - orig *otlpmetrics.SummaryDataPoint -} + +type SummaryDataPoint internal.SummaryDataPoint func newSummaryDataPoint(orig *otlpmetrics.SummaryDataPoint) SummaryDataPoint { - return SummaryDataPoint{orig: orig} + return SummaryDataPoint(internal.NewSummaryDataPoint(orig)) +} + +func (ms SummaryDataPoint) getOrig() *otlpmetrics.SummaryDataPoint { + return internal.GetOrigSummaryDataPoint(internal.SummaryDataPoint(ms)) } // NewSummaryDataPoint creates a new empty SummaryDataPoint. @@ -2102,63 +2143,63 @@ func NewSummaryDataPoint() SummaryDataPoint { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms SummaryDataPoint) MoveTo(dest SummaryDataPoint) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.SummaryDataPoint{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.SummaryDataPoint{} } // Attributes returns the Attributes associated with this SummaryDataPoint. -func (ms SummaryDataPoint) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms SummaryDataPoint) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // StartTimestamp returns the starttimestamp associated with this SummaryDataPoint. -func (ms SummaryDataPoint) StartTimestamp() Timestamp { - return Timestamp(ms.orig.StartTimeUnixNano) +func (ms SummaryDataPoint) StartTimestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().StartTimeUnixNano) } // SetStartTimestamp replaces the starttimestamp associated with this SummaryDataPoint. -func (ms SummaryDataPoint) SetStartTimestamp(v Timestamp) { - ms.orig.StartTimeUnixNano = uint64(v) +func (ms SummaryDataPoint) SetStartTimestamp(v pcommon.Timestamp) { + ms.getOrig().StartTimeUnixNano = uint64(v) } // Timestamp returns the timestamp associated with this SummaryDataPoint. -func (ms SummaryDataPoint) Timestamp() Timestamp { - return Timestamp(ms.orig.TimeUnixNano) +func (ms SummaryDataPoint) Timestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this SummaryDataPoint. -func (ms SummaryDataPoint) SetTimestamp(v Timestamp) { - ms.orig.TimeUnixNano = uint64(v) +func (ms SummaryDataPoint) SetTimestamp(v pcommon.Timestamp) { + ms.getOrig().TimeUnixNano = uint64(v) } // Count returns the count associated with this SummaryDataPoint. func (ms SummaryDataPoint) Count() uint64 { - return ms.orig.Count + return ms.getOrig().Count } // SetCount replaces the count associated with this SummaryDataPoint. func (ms SummaryDataPoint) SetCount(v uint64) { - ms.orig.Count = v + ms.getOrig().Count = v } // Sum returns the sum associated with this SummaryDataPoint. func (ms SummaryDataPoint) Sum() float64 { - return ms.orig.Sum + return ms.getOrig().Sum } // SetSum replaces the sum associated with this SummaryDataPoint. func (ms SummaryDataPoint) SetSum(v float64) { - ms.orig.Sum = v + ms.getOrig().Sum = v } // QuantileValues returns the QuantileValues associated with this SummaryDataPoint. func (ms SummaryDataPoint) QuantileValues() ValueAtQuantileSlice { - return newValueAtQuantileSlice(&ms.orig.QuantileValues) + return ValueAtQuantileSlice(internal.NewValueAtQuantileSlice(&ms.getOrig().QuantileValues)) } // Flags returns the flags associated with this SummaryDataPoint. func (ms SummaryDataPoint) Flags() MetricDataPointFlags { - return newMetricDataPointFlags(&ms.orig.Flags) + return MetricDataPointFlags(internal.NewMetricDataPointFlags(&ms.getOrig().Flags)) } // CopyTo copies all properties from the current struct to the dest. @@ -2179,28 +2220,28 @@ func (ms SummaryDataPoint) CopyTo(dest SummaryDataPoint) { // // Must use NewValueAtQuantileSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ValueAtQuantileSlice struct { - // orig points to the slice otlpmetrics.SummaryDataPoint_ValueAtQuantile field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile -} +type ValueAtQuantileSlice internal.ValueAtQuantileSlice func newValueAtQuantileSlice(orig *[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile) ValueAtQuantileSlice { - return ValueAtQuantileSlice{orig} + return ValueAtQuantileSlice(internal.NewValueAtQuantileSlice(orig)) +} + +func (ms ValueAtQuantileSlice) getOrig() *[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile { + return internal.GetOrigValueAtQuantileSlice(internal.ValueAtQuantileSlice(ms)) } // NewValueAtQuantileSlice creates a ValueAtQuantileSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewValueAtQuantileSlice() ValueAtQuantileSlice { orig := []*otlpmetrics.SummaryDataPoint_ValueAtQuantile(nil) - return ValueAtQuantileSlice{&orig} + return newValueAtQuantileSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewValueAtQuantileSlice()". func (es ValueAtQuantileSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -2212,27 +2253,27 @@ func (es ValueAtQuantileSlice) Len() int { // ... // Do something with the element // } func (es ValueAtQuantileSlice) At(ix int) ValueAtQuantile { - return newValueAtQuantile((*es.orig)[ix]) + return newValueAtQuantile((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ValueAtQuantileSlice) CopyTo(dest ValueAtQuantileSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newValueAtQuantile((*es.orig)[i]).CopyTo(newValueAtQuantile((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newValueAtQuantile((*es.getOrig())[i]).CopyTo(newValueAtQuantile((*dest.getOrig())[i])) } return } origs := make([]otlpmetrics.SummaryDataPoint_ValueAtQuantile, srcLen) wrappers := make([]*otlpmetrics.SummaryDataPoint_ValueAtQuantile, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newValueAtQuantile((*es.orig)[i]).CopyTo(newValueAtQuantile(wrappers[i])) + newValueAtQuantile((*es.getOrig())[i]).CopyTo(newValueAtQuantile(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -2248,20 +2289,20 @@ func (es ValueAtQuantileSlice) CopyTo(dest ValueAtQuantileSlice) { // // Here should set all the values for e. // } func (es ValueAtQuantileSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlpmetrics.SummaryDataPoint_ValueAtQuantile, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlpmetrics.SummaryDataPoint_ValueAtQuantile, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ValueAtQuantile. // It returns the newly added ValueAtQuantile. func (es ValueAtQuantileSlice) AppendEmpty() ValueAtQuantile { - *es.orig = append(*es.orig, &otlpmetrics.SummaryDataPoint_ValueAtQuantile{}) + *es.getOrig() = append(*es.getOrig(), &otlpmetrics.SummaryDataPoint_ValueAtQuantile{}) return es.At(es.Len() - 1) } @@ -2274,29 +2315,29 @@ func (es ValueAtQuantileSlice) AppendEmpty() ValueAtQuantile { // lessFunc := func(a, b ValueAtQuantile) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ValueAtQuantileSlice) Sort(less func(a, b ValueAtQuantile) bool) ValueAtQuantileSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ValueAtQuantileSlice) MoveAndAppendTo(dest ValueAtQuantileSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ValueAtQuantileSlice) RemoveIf(f func(ValueAtQuantile) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -2305,11 +2346,11 @@ func (es ValueAtQuantileSlice) RemoveIf(f func(ValueAtQuantile) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ValueAtQuantile is a quantile value within a Summary data point. @@ -2319,12 +2360,15 @@ func (es ValueAtQuantileSlice) RemoveIf(f func(ValueAtQuantile) bool) { // // Must use NewValueAtQuantile function to create new instances. // Important: zero-initialized instance is not valid for use. -type ValueAtQuantile struct { - orig *otlpmetrics.SummaryDataPoint_ValueAtQuantile -} + +type ValueAtQuantile internal.ValueAtQuantile func newValueAtQuantile(orig *otlpmetrics.SummaryDataPoint_ValueAtQuantile) ValueAtQuantile { - return ValueAtQuantile{orig: orig} + return ValueAtQuantile(internal.NewValueAtQuantile(orig)) +} + +func (ms ValueAtQuantile) getOrig() *otlpmetrics.SummaryDataPoint_ValueAtQuantile { + return internal.GetOrigValueAtQuantile(internal.ValueAtQuantile(ms)) } // NewValueAtQuantile creates a new empty ValueAtQuantile. @@ -2338,28 +2382,28 @@ func NewValueAtQuantile() ValueAtQuantile { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ValueAtQuantile) MoveTo(dest ValueAtQuantile) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.SummaryDataPoint_ValueAtQuantile{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.SummaryDataPoint_ValueAtQuantile{} } // Quantile returns the quantile associated with this ValueAtQuantile. func (ms ValueAtQuantile) Quantile() float64 { - return ms.orig.Quantile + return ms.getOrig().Quantile } // SetQuantile replaces the quantile associated with this ValueAtQuantile. func (ms ValueAtQuantile) SetQuantile(v float64) { - ms.orig.Quantile = v + ms.getOrig().Quantile = v } // Value returns the value associated with this ValueAtQuantile. func (ms ValueAtQuantile) Value() float64 { - return ms.orig.Value + return ms.getOrig().Value } // SetValue replaces the value associated with this ValueAtQuantile. func (ms ValueAtQuantile) SetValue(v float64) { - ms.orig.Value = v + ms.getOrig().Value = v } // CopyTo copies all properties from the current struct to the dest. @@ -2375,28 +2419,28 @@ func (ms ValueAtQuantile) CopyTo(dest ValueAtQuantile) { // // Must use NewExemplarSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ExemplarSlice struct { - // orig points to the slice otlpmetrics.Exemplar field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]otlpmetrics.Exemplar -} +type ExemplarSlice internal.ExemplarSlice func newExemplarSlice(orig *[]otlpmetrics.Exemplar) ExemplarSlice { - return ExemplarSlice{orig} + return ExemplarSlice(internal.NewExemplarSlice(orig)) +} + +func (ms ExemplarSlice) getOrig() *[]otlpmetrics.Exemplar { + return internal.GetOrigExemplarSlice(internal.ExemplarSlice(ms)) } // NewExemplarSlice creates a ExemplarSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewExemplarSlice() ExemplarSlice { orig := []otlpmetrics.Exemplar(nil) - return ExemplarSlice{&orig} + return ExemplarSlice(internal.NewExemplarSlice(&orig)) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewExemplarSlice()". func (es ExemplarSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -2408,21 +2452,21 @@ func (es ExemplarSlice) Len() int { // ... // Do something with the element // } func (es ExemplarSlice) At(ix int) Exemplar { - return newExemplar(&(*es.orig)[ix]) + return newExemplar(&(*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ExemplarSlice) CopyTo(dest ExemplarSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] } else { - (*dest.orig) = make([]otlpmetrics.Exemplar, srcLen) + (*dest.getOrig()) = make([]otlpmetrics.Exemplar, srcLen) } - for i := range *es.orig { - newExemplar(&(*es.orig)[i]).CopyTo(newExemplar(&(*dest.orig)[i])) + for i := range *es.getOrig() { + newExemplar(&(*es.getOrig())[i]).CopyTo(newExemplar(&(*dest.getOrig())[i])) } } @@ -2439,40 +2483,40 @@ func (es ExemplarSlice) CopyTo(dest ExemplarSlice) { // // Here should set all the values for e. // } func (es ExemplarSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]otlpmetrics.Exemplar, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]otlpmetrics.Exemplar, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty Exemplar. // It returns the newly added Exemplar. func (es ExemplarSlice) AppendEmpty() Exemplar { - *es.orig = append(*es.orig, otlpmetrics.Exemplar{}) + *es.getOrig() = append(*es.getOrig(), otlpmetrics.Exemplar{}) return es.At(es.Len() - 1) } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ExemplarSlice) MoveAndAppendTo(dest ExemplarSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ExemplarSlice) RemoveIf(f func(Exemplar) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -2481,11 +2525,11 @@ func (es ExemplarSlice) RemoveIf(f func(Exemplar) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // Exemplar is a sample input double measurement. @@ -2498,12 +2542,15 @@ func (es ExemplarSlice) RemoveIf(f func(Exemplar) bool) { // // Must use NewExemplar function to create new instances. // Important: zero-initialized instance is not valid for use. -type Exemplar struct { - orig *otlpmetrics.Exemplar -} + +type Exemplar internal.Exemplar func newExemplar(orig *otlpmetrics.Exemplar) Exemplar { - return Exemplar{orig: orig} + return Exemplar(internal.NewExemplar(orig)) +} + +func (ms Exemplar) getOrig() *otlpmetrics.Exemplar { + return internal.GetOrigExemplar(internal.Exemplar(ms)) } // NewExemplar creates a new empty Exemplar. @@ -2517,24 +2564,24 @@ func NewExemplar() Exemplar { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Exemplar) MoveTo(dest Exemplar) { - *dest.orig = *ms.orig - *ms.orig = otlpmetrics.Exemplar{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpmetrics.Exemplar{} } // Timestamp returns the timestamp associated with this Exemplar. -func (ms Exemplar) Timestamp() Timestamp { - return Timestamp(ms.orig.TimeUnixNano) +func (ms Exemplar) Timestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this Exemplar. -func (ms Exemplar) SetTimestamp(v Timestamp) { - ms.orig.TimeUnixNano = uint64(v) +func (ms Exemplar) SetTimestamp(v pcommon.Timestamp) { + ms.getOrig().TimeUnixNano = uint64(v) } // ValueType returns the type of the value for this Exemplar. // Calling this function on zero-initialized Exemplar will cause a panic. func (ms Exemplar) ValueType() ExemplarValueType { - switch ms.orig.Value.(type) { + switch ms.getOrig().Value.(type) { case *otlpmetrics.Exemplar_AsDouble: return ExemplarValueTypeDouble case *otlpmetrics.Exemplar_AsInt: @@ -2545,51 +2592,51 @@ func (ms Exemplar) ValueType() ExemplarValueType { // DoubleVal returns the doubleval associated with this Exemplar. func (ms Exemplar) DoubleVal() float64 { - return ms.orig.GetAsDouble() + return ms.getOrig().GetAsDouble() } // SetDoubleVal replaces the doubleval associated with this Exemplar. func (ms Exemplar) SetDoubleVal(v float64) { - ms.orig.Value = &otlpmetrics.Exemplar_AsDouble{ + ms.getOrig().Value = &otlpmetrics.Exemplar_AsDouble{ AsDouble: v, } } // IntVal returns the intval associated with this Exemplar. func (ms Exemplar) IntVal() int64 { - return ms.orig.GetAsInt() + return ms.getOrig().GetAsInt() } // SetIntVal replaces the intval associated with this Exemplar. func (ms Exemplar) SetIntVal(v int64) { - ms.orig.Value = &otlpmetrics.Exemplar_AsInt{ + ms.getOrig().Value = &otlpmetrics.Exemplar_AsInt{ AsInt: v, } } // FilteredAttributes returns the FilteredAttributes associated with this Exemplar. -func (ms Exemplar) FilteredAttributes() Map { - return newMap(&ms.orig.FilteredAttributes) +func (ms Exemplar) FilteredAttributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().FilteredAttributes)) } // TraceID returns the traceid associated with this Exemplar. -func (ms Exemplar) TraceID() TraceID { - return TraceID{orig: (ms.orig.TraceId)} +func (ms Exemplar) TraceID() pcommon.TraceID { + return pcommon.TraceID(internal.NewTraceID(ms.getOrig().TraceId)) } // SetTraceID replaces the traceid associated with this Exemplar. -func (ms Exemplar) SetTraceID(v TraceID) { - ms.orig.TraceId = v.orig +func (ms Exemplar) SetTraceID(v pcommon.TraceID) { + ms.getOrig().TraceId = internal.GetOrigTraceID(internal.TraceID(v)) } // SpanID returns the spanid associated with this Exemplar. -func (ms Exemplar) SpanID() SpanID { - return SpanID{orig: (ms.orig.SpanId)} +func (ms Exemplar) SpanID() pcommon.SpanID { + return pcommon.SpanID(internal.NewSpanID(ms.getOrig().SpanId)) } // SetSpanID replaces the spanid associated with this Exemplar. -func (ms Exemplar) SetSpanID(v SpanID) { - ms.orig.SpanId = v.orig +func (ms Exemplar) SetSpanID(v pcommon.SpanID) { + ms.getOrig().SpanId = internal.GetOrigSpanID(internal.SpanID(v)) } // CopyTo copies all properties from the current struct to the dest. diff --git a/pdata/pmetric/generated_metrics_test.go b/pdata/pmetric/generated_metrics_test.go new file mode 100644 index 00000000000..ba2c78fe2b8 --- /dev/null +++ b/pdata/pmetric/generated_metrics_test.go @@ -0,0 +1,1855 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package pmetric + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "go.opentelemetry.io/collector/pdata/internal" + "go.opentelemetry.io/collector/pdata/internal/data" + otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" + "go.opentelemetry.io/collector/pdata/pcommon" +) + +func TestResourceMetricsSlice(t *testing.T) { + es := NewResourceMetricsSlice() + assert.Equal(t, 0, es.Len()) + es = newResourceMetricsSlice(&[]*otlpmetrics.ResourceMetrics{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newResourceMetrics(&otlpmetrics.ResourceMetrics{}) + testVal := ResourceMetrics(internal.GenerateTestResourceMetrics()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestResourceMetrics(internal.ResourceMetrics(el)) + assert.Equal(t, testVal, el) + } +} + +func TestResourceMetricsSlice_CopyTo(t *testing.T) { + dest := NewResourceMetricsSlice() + // Test CopyTo to empty + NewResourceMetricsSlice().CopyTo(dest) + assert.Equal(t, NewResourceMetricsSlice(), dest) + + // Test CopyTo larger slice + ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()).CopyTo(dest) + assert.Equal(t, ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()), dest) + + // Test CopyTo same size slice + ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()).CopyTo(dest) + assert.Equal(t, ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()), dest) +} + +func TestResourceMetricsSlice_EnsureCapacity(t *testing.T) { + es := ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.ResourceMetrics]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.ResourceMetrics]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.ResourceMetrics]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.ResourceMetrics]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestResourceMetricsSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()) + dest := NewResourceMetricsSlice() + src := ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestResourceMetricsSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewResourceMetricsSlice() + emptySlice.RemoveIf(func(el ResourceMetrics) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()) + pos := 0 + filtered.RemoveIf(func(el ResourceMetrics) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestResourceMetrics_MoveTo(t *testing.T) { + ms := ResourceMetrics(internal.GenerateTestResourceMetrics()) + dest := NewResourceMetrics() + ms.MoveTo(dest) + assert.Equal(t, NewResourceMetrics(), ms) + assert.Equal(t, ResourceMetrics(internal.GenerateTestResourceMetrics()), dest) +} + +func TestResourceMetrics_CopyTo(t *testing.T) { + ms := NewResourceMetrics() + orig := NewResourceMetrics() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ResourceMetrics(internal.GenerateTestResourceMetrics()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestResourceMetrics_Resource(t *testing.T) { + ms := NewResourceMetrics() + internal.FillTestResource(internal.Resource(ms.Resource())) + assert.Equal(t, pcommon.Resource(internal.GenerateTestResource()), ms.Resource()) +} + +func TestResourceMetrics_SchemaUrl(t *testing.T) { + ms := NewResourceMetrics() + assert.Equal(t, "", ms.SchemaUrl()) + ms.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") + assert.Equal(t, "https://opentelemetry.io/schemas/1.5.0", ms.SchemaUrl()) +} + +func TestResourceMetrics_ScopeMetrics(t *testing.T) { + ms := NewResourceMetrics() + assert.Equal(t, NewScopeMetricsSlice(), ms.ScopeMetrics()) + internal.FillTestScopeMetricsSlice(internal.ScopeMetricsSlice(ms.ScopeMetrics())) + assert.Equal(t, ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()), ms.ScopeMetrics()) +} + +func TestScopeMetricsSlice(t *testing.T) { + es := NewScopeMetricsSlice() + assert.Equal(t, 0, es.Len()) + es = newScopeMetricsSlice(&[]*otlpmetrics.ScopeMetrics{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newScopeMetrics(&otlpmetrics.ScopeMetrics{}) + testVal := ScopeMetrics(internal.GenerateTestScopeMetrics()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestScopeMetrics(internal.ScopeMetrics(el)) + assert.Equal(t, testVal, el) + } +} + +func TestScopeMetricsSlice_CopyTo(t *testing.T) { + dest := NewScopeMetricsSlice() + // Test CopyTo to empty + NewScopeMetricsSlice().CopyTo(dest) + assert.Equal(t, NewScopeMetricsSlice(), dest) + + // Test CopyTo larger slice + ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()).CopyTo(dest) + assert.Equal(t, ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()), dest) + + // Test CopyTo same size slice + ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()).CopyTo(dest) + assert.Equal(t, ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()), dest) +} + +func TestScopeMetricsSlice_EnsureCapacity(t *testing.T) { + es := ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.ScopeMetrics]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.ScopeMetrics]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.ScopeMetrics]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.ScopeMetrics]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestScopeMetricsSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()) + dest := NewScopeMetricsSlice() + src := ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestScopeMetricsSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewScopeMetricsSlice() + emptySlice.RemoveIf(func(el ScopeMetrics) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ScopeMetricsSlice(internal.GenerateTestScopeMetricsSlice()) + pos := 0 + filtered.RemoveIf(func(el ScopeMetrics) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestScopeMetrics_MoveTo(t *testing.T) { + ms := ScopeMetrics(internal.GenerateTestScopeMetrics()) + dest := NewScopeMetrics() + ms.MoveTo(dest) + assert.Equal(t, NewScopeMetrics(), ms) + assert.Equal(t, ScopeMetrics(internal.GenerateTestScopeMetrics()), dest) +} + +func TestScopeMetrics_CopyTo(t *testing.T) { + ms := NewScopeMetrics() + orig := NewScopeMetrics() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ScopeMetrics(internal.GenerateTestScopeMetrics()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestScopeMetrics_Scope(t *testing.T) { + ms := NewScopeMetrics() + internal.FillTestInstrumentationScope(internal.InstrumentationScope(ms.Scope())) + assert.Equal(t, pcommon.InstrumentationScope(internal.GenerateTestInstrumentationScope()), ms.Scope()) +} + +func TestScopeMetrics_SchemaUrl(t *testing.T) { + ms := NewScopeMetrics() + assert.Equal(t, "", ms.SchemaUrl()) + ms.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") + assert.Equal(t, "https://opentelemetry.io/schemas/1.5.0", ms.SchemaUrl()) +} + +func TestScopeMetrics_Metrics(t *testing.T) { + ms := NewScopeMetrics() + assert.Equal(t, NewMetricSlice(), ms.Metrics()) + internal.FillTestMetricSlice(internal.MetricSlice(ms.Metrics())) + assert.Equal(t, MetricSlice(internal.GenerateTestMetricSlice()), ms.Metrics()) +} + +func TestMetricSlice(t *testing.T) { + es := NewMetricSlice() + assert.Equal(t, 0, es.Len()) + es = newMetricSlice(&[]*otlpmetrics.Metric{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newMetric(&otlpmetrics.Metric{}) + testVal := Metric(internal.GenerateTestMetric()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestMetric(internal.Metric(el)) + assert.Equal(t, testVal, el) + } +} + +func TestMetricSlice_CopyTo(t *testing.T) { + dest := NewMetricSlice() + // Test CopyTo to empty + NewMetricSlice().CopyTo(dest) + assert.Equal(t, NewMetricSlice(), dest) + + // Test CopyTo larger slice + MetricSlice(internal.GenerateTestMetricSlice()).CopyTo(dest) + assert.Equal(t, MetricSlice(internal.GenerateTestMetricSlice()), dest) + + // Test CopyTo same size slice + MetricSlice(internal.GenerateTestMetricSlice()).CopyTo(dest) + assert.Equal(t, MetricSlice(internal.GenerateTestMetricSlice()), dest) +} + +func TestMetricSlice_EnsureCapacity(t *testing.T) { + es := MetricSlice(internal.GenerateTestMetricSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.Metric]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.Metric]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.Metric]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.Metric]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestMetricSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := MetricSlice(internal.GenerateTestMetricSlice()) + dest := NewMetricSlice() + src := MetricSlice(internal.GenerateTestMetricSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, MetricSlice(internal.GenerateTestMetricSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, MetricSlice(internal.GenerateTestMetricSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + MetricSlice(internal.GenerateTestMetricSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestMetricSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewMetricSlice() + emptySlice.RemoveIf(func(el Metric) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := MetricSlice(internal.GenerateTestMetricSlice()) + pos := 0 + filtered.RemoveIf(func(el Metric) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestMetric_MoveTo(t *testing.T) { + ms := Metric(internal.GenerateTestMetric()) + dest := NewMetric() + ms.MoveTo(dest) + assert.Equal(t, NewMetric(), ms) + assert.Equal(t, Metric(internal.GenerateTestMetric()), dest) +} + +func TestMetric_CopyTo(t *testing.T) { + ms := NewMetric() + orig := NewMetric() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Metric(internal.GenerateTestMetric()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestMetric_Name(t *testing.T) { + ms := NewMetric() + assert.Equal(t, "", ms.Name()) + ms.SetName("test_name") + assert.Equal(t, "test_name", ms.Name()) +} + +func TestMetric_Description(t *testing.T) { + ms := NewMetric() + assert.Equal(t, "", ms.Description()) + ms.SetDescription("test_description") + assert.Equal(t, "test_description", ms.Description()) +} + +func TestMetric_Unit(t *testing.T) { + ms := NewMetric() + assert.Equal(t, "", ms.Unit()) + ms.SetUnit("1") + assert.Equal(t, "1", ms.Unit()) +} + +func TestMetric_DataType(t *testing.T) { + tv := NewMetric() + assert.Equal(t, MetricDataTypeNone, tv.DataType()) +} + +func TestMetric_Gauge(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeGauge) + assert.Equal(t, MetricDataTypeGauge, ms.DataType()) + internal.FillTestGauge(internal.Gauge(ms.Gauge())) + assert.Equal(t, Gauge(internal.GenerateTestGauge()), ms.Gauge()) +} + +func TestMetric_CopyTo_Gauge(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeGauge) + internal.FillTestGauge(internal.Gauge(ms.Gauge())) + dest := NewMetric() + ms.CopyTo(dest) + assert.Equal(t, ms, dest) +} + +func TestMetric_Sum(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeSum) + assert.Equal(t, MetricDataTypeSum, ms.DataType()) + internal.FillTestSum(internal.Sum(ms.Sum())) + assert.Equal(t, Sum(internal.GenerateTestSum()), ms.Sum()) +} + +func TestMetric_CopyTo_Sum(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeSum) + internal.FillTestSum(internal.Sum(ms.Sum())) + dest := NewMetric() + ms.CopyTo(dest) + assert.Equal(t, ms, dest) +} + +func TestMetric_Histogram(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeHistogram) + assert.Equal(t, MetricDataTypeHistogram, ms.DataType()) + internal.FillTestHistogram(internal.Histogram(ms.Histogram())) + assert.Equal(t, Histogram(internal.GenerateTestHistogram()), ms.Histogram()) +} + +func TestMetric_CopyTo_Histogram(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeHistogram) + internal.FillTestHistogram(internal.Histogram(ms.Histogram())) + dest := NewMetric() + ms.CopyTo(dest) + assert.Equal(t, ms, dest) +} + +func TestMetric_ExponentialHistogram(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeExponentialHistogram) + assert.Equal(t, MetricDataTypeExponentialHistogram, ms.DataType()) + internal.FillTestExponentialHistogram(internal.ExponentialHistogram(ms.ExponentialHistogram())) + assert.Equal(t, ExponentialHistogram(internal.GenerateTestExponentialHistogram()), ms.ExponentialHistogram()) +} + +func TestMetric_CopyTo_ExponentialHistogram(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeExponentialHistogram) + internal.FillTestExponentialHistogram(internal.ExponentialHistogram(ms.ExponentialHistogram())) + dest := NewMetric() + ms.CopyTo(dest) + assert.Equal(t, ms, dest) +} + +func TestMetric_Summary(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeSummary) + assert.Equal(t, MetricDataTypeSummary, ms.DataType()) + internal.FillTestSummary(internal.Summary(ms.Summary())) + assert.Equal(t, Summary(internal.GenerateTestSummary()), ms.Summary()) +} + +func TestMetric_CopyTo_Summary(t *testing.T) { + ms := NewMetric() + ms.SetDataType(MetricDataTypeSummary) + internal.FillTestSummary(internal.Summary(ms.Summary())) + dest := NewMetric() + ms.CopyTo(dest) + assert.Equal(t, ms, dest) +} + +func TestGauge_MoveTo(t *testing.T) { + ms := Gauge(internal.GenerateTestGauge()) + dest := NewGauge() + ms.MoveTo(dest) + assert.Equal(t, NewGauge(), ms) + assert.Equal(t, Gauge(internal.GenerateTestGauge()), dest) +} + +func TestGauge_CopyTo(t *testing.T) { + ms := NewGauge() + orig := NewGauge() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Gauge(internal.GenerateTestGauge()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestGauge_DataPoints(t *testing.T) { + ms := NewGauge() + assert.Equal(t, NewNumberDataPointSlice(), ms.DataPoints()) + internal.FillTestNumberDataPointSlice(internal.NumberDataPointSlice(ms.DataPoints())) + assert.Equal(t, NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()), ms.DataPoints()) +} + +func TestSum_MoveTo(t *testing.T) { + ms := Sum(internal.GenerateTestSum()) + dest := NewSum() + ms.MoveTo(dest) + assert.Equal(t, NewSum(), ms) + assert.Equal(t, Sum(internal.GenerateTestSum()), dest) +} + +func TestSum_CopyTo(t *testing.T) { + ms := NewSum() + orig := NewSum() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Sum(internal.GenerateTestSum()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestSum_AggregationTemporality(t *testing.T) { + ms := NewSum() + assert.Equal(t, MetricAggregationTemporality(otlpmetrics.AggregationTemporality(0)), ms.AggregationTemporality()) + testValAggregationTemporality := MetricAggregationTemporality(otlpmetrics.AggregationTemporality(1)) + ms.SetAggregationTemporality(testValAggregationTemporality) + assert.Equal(t, testValAggregationTemporality, ms.AggregationTemporality()) +} + +func TestSum_IsMonotonic(t *testing.T) { + ms := NewSum() + assert.Equal(t, false, ms.IsMonotonic()) + ms.SetIsMonotonic(true) + assert.Equal(t, true, ms.IsMonotonic()) +} + +func TestSum_DataPoints(t *testing.T) { + ms := NewSum() + assert.Equal(t, NewNumberDataPointSlice(), ms.DataPoints()) + internal.FillTestNumberDataPointSlice(internal.NumberDataPointSlice(ms.DataPoints())) + assert.Equal(t, NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()), ms.DataPoints()) +} + +func TestHistogram_MoveTo(t *testing.T) { + ms := Histogram(internal.GenerateTestHistogram()) + dest := NewHistogram() + ms.MoveTo(dest) + assert.Equal(t, NewHistogram(), ms) + assert.Equal(t, Histogram(internal.GenerateTestHistogram()), dest) +} + +func TestHistogram_CopyTo(t *testing.T) { + ms := NewHistogram() + orig := NewHistogram() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Histogram(internal.GenerateTestHistogram()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestHistogram_AggregationTemporality(t *testing.T) { + ms := NewHistogram() + assert.Equal(t, MetricAggregationTemporality(otlpmetrics.AggregationTemporality(0)), ms.AggregationTemporality()) + testValAggregationTemporality := MetricAggregationTemporality(otlpmetrics.AggregationTemporality(1)) + ms.SetAggregationTemporality(testValAggregationTemporality) + assert.Equal(t, testValAggregationTemporality, ms.AggregationTemporality()) +} + +func TestHistogram_DataPoints(t *testing.T) { + ms := NewHistogram() + assert.Equal(t, NewHistogramDataPointSlice(), ms.DataPoints()) + internal.FillTestHistogramDataPointSlice(internal.HistogramDataPointSlice(ms.DataPoints())) + assert.Equal(t, HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()), ms.DataPoints()) +} + +func TestExponentialHistogram_MoveTo(t *testing.T) { + ms := ExponentialHistogram(internal.GenerateTestExponentialHistogram()) + dest := NewExponentialHistogram() + ms.MoveTo(dest) + assert.Equal(t, NewExponentialHistogram(), ms) + assert.Equal(t, ExponentialHistogram(internal.GenerateTestExponentialHistogram()), dest) +} + +func TestExponentialHistogram_CopyTo(t *testing.T) { + ms := NewExponentialHistogram() + orig := NewExponentialHistogram() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ExponentialHistogram(internal.GenerateTestExponentialHistogram()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestExponentialHistogram_AggregationTemporality(t *testing.T) { + ms := NewExponentialHistogram() + assert.Equal(t, MetricAggregationTemporality(otlpmetrics.AggregationTemporality(0)), ms.AggregationTemporality()) + testValAggregationTemporality := MetricAggregationTemporality(otlpmetrics.AggregationTemporality(1)) + ms.SetAggregationTemporality(testValAggregationTemporality) + assert.Equal(t, testValAggregationTemporality, ms.AggregationTemporality()) +} + +func TestExponentialHistogram_DataPoints(t *testing.T) { + ms := NewExponentialHistogram() + assert.Equal(t, NewExponentialHistogramDataPointSlice(), ms.DataPoints()) + internal.FillTestExponentialHistogramDataPointSlice(internal.ExponentialHistogramDataPointSlice(ms.DataPoints())) + assert.Equal(t, ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()), ms.DataPoints()) +} + +func TestSummary_MoveTo(t *testing.T) { + ms := Summary(internal.GenerateTestSummary()) + dest := NewSummary() + ms.MoveTo(dest) + assert.Equal(t, NewSummary(), ms) + assert.Equal(t, Summary(internal.GenerateTestSummary()), dest) +} + +func TestSummary_CopyTo(t *testing.T) { + ms := NewSummary() + orig := NewSummary() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Summary(internal.GenerateTestSummary()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestSummary_DataPoints(t *testing.T) { + ms := NewSummary() + assert.Equal(t, NewSummaryDataPointSlice(), ms.DataPoints()) + internal.FillTestSummaryDataPointSlice(internal.SummaryDataPointSlice(ms.DataPoints())) + assert.Equal(t, SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()), ms.DataPoints()) +} + +func TestNumberDataPointSlice(t *testing.T) { + es := NewNumberDataPointSlice() + assert.Equal(t, 0, es.Len()) + es = newNumberDataPointSlice(&[]*otlpmetrics.NumberDataPoint{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newNumberDataPoint(&otlpmetrics.NumberDataPoint{}) + testVal := NumberDataPoint(internal.GenerateTestNumberDataPoint()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestNumberDataPoint(internal.NumberDataPoint(el)) + assert.Equal(t, testVal, el) + } +} + +func TestNumberDataPointSlice_CopyTo(t *testing.T) { + dest := NewNumberDataPointSlice() + // Test CopyTo to empty + NewNumberDataPointSlice().CopyTo(dest) + assert.Equal(t, NewNumberDataPointSlice(), dest) + + // Test CopyTo larger slice + NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()).CopyTo(dest) + assert.Equal(t, NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()), dest) + + // Test CopyTo same size slice + NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()).CopyTo(dest) + assert.Equal(t, NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()), dest) +} + +func TestNumberDataPointSlice_EnsureCapacity(t *testing.T) { + es := NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.NumberDataPoint]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.NumberDataPoint]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.NumberDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.NumberDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestNumberDataPointSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()) + dest := NewNumberDataPointSlice() + src := NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestNumberDataPointSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewNumberDataPointSlice() + emptySlice.RemoveIf(func(el NumberDataPoint) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := NumberDataPointSlice(internal.GenerateTestNumberDataPointSlice()) + pos := 0 + filtered.RemoveIf(func(el NumberDataPoint) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestNumberDataPoint_MoveTo(t *testing.T) { + ms := NumberDataPoint(internal.GenerateTestNumberDataPoint()) + dest := NewNumberDataPoint() + ms.MoveTo(dest) + assert.Equal(t, NewNumberDataPoint(), ms) + assert.Equal(t, NumberDataPoint(internal.GenerateTestNumberDataPoint()), dest) +} + +func TestNumberDataPoint_CopyTo(t *testing.T) { + ms := NewNumberDataPoint() + orig := NewNumberDataPoint() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = NumberDataPoint(internal.GenerateTestNumberDataPoint()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestNumberDataPoint_Attributes(t *testing.T) { + ms := NewNumberDataPoint() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestNumberDataPoint_StartTimestamp(t *testing.T) { + ms := NewNumberDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.StartTimestamp()) + testValStartTimestamp := pcommon.Timestamp(1234567890) + ms.SetStartTimestamp(testValStartTimestamp) + assert.Equal(t, testValStartTimestamp, ms.StartTimestamp()) +} + +func TestNumberDataPoint_Timestamp(t *testing.T) { + ms := NewNumberDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.Timestamp()) + testValTimestamp := pcommon.Timestamp(1234567890) + ms.SetTimestamp(testValTimestamp) + assert.Equal(t, testValTimestamp, ms.Timestamp()) +} + +func TestNumberDataPoint_ValueType(t *testing.T) { + tv := NewNumberDataPoint() + assert.Equal(t, NumberDataPointValueTypeNone, tv.ValueType()) +} + +func TestNumberDataPoint_DoubleVal(t *testing.T) { + ms := NewNumberDataPoint() + assert.Equal(t, float64(0.0), ms.DoubleVal()) + ms.SetDoubleVal(float64(17.13)) + assert.Equal(t, float64(17.13), ms.DoubleVal()) + assert.Equal(t, NumberDataPointValueTypeDouble, ms.ValueType()) +} + +func TestNumberDataPoint_IntVal(t *testing.T) { + ms := NewNumberDataPoint() + assert.Equal(t, int64(0), ms.IntVal()) + ms.SetIntVal(int64(17)) + assert.Equal(t, int64(17), ms.IntVal()) + assert.Equal(t, NumberDataPointValueTypeInt, ms.ValueType()) +} + +func TestNumberDataPoint_Exemplars(t *testing.T) { + ms := NewNumberDataPoint() + assert.Equal(t, NewExemplarSlice(), ms.Exemplars()) + internal.FillTestExemplarSlice(internal.ExemplarSlice(ms.Exemplars())) + assert.Equal(t, ExemplarSlice(internal.GenerateTestExemplarSlice()), ms.Exemplars()) +} + +func TestNumberDataPoint_Flags(t *testing.T) { + ms := NewNumberDataPoint() + internal.FillTestMetricDataPointFlags(internal.MetricDataPointFlags(ms.Flags())) + assert.Equal(t, MetricDataPointFlags(internal.GenerateTestMetricDataPointFlags()), ms.Flags()) +} + +func TestHistogramDataPointSlice(t *testing.T) { + es := NewHistogramDataPointSlice() + assert.Equal(t, 0, es.Len()) + es = newHistogramDataPointSlice(&[]*otlpmetrics.HistogramDataPoint{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newHistogramDataPoint(&otlpmetrics.HistogramDataPoint{}) + testVal := HistogramDataPoint(internal.GenerateTestHistogramDataPoint()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestHistogramDataPoint(internal.HistogramDataPoint(el)) + assert.Equal(t, testVal, el) + } +} + +func TestHistogramDataPointSlice_CopyTo(t *testing.T) { + dest := NewHistogramDataPointSlice() + // Test CopyTo to empty + NewHistogramDataPointSlice().CopyTo(dest) + assert.Equal(t, NewHistogramDataPointSlice(), dest) + + // Test CopyTo larger slice + HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()).CopyTo(dest) + assert.Equal(t, HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()), dest) + + // Test CopyTo same size slice + HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()).CopyTo(dest) + assert.Equal(t, HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()), dest) +} + +func TestHistogramDataPointSlice_EnsureCapacity(t *testing.T) { + es := HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.HistogramDataPoint]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.HistogramDataPoint]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.HistogramDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.HistogramDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestHistogramDataPointSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()) + dest := NewHistogramDataPointSlice() + src := HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestHistogramDataPointSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewHistogramDataPointSlice() + emptySlice.RemoveIf(func(el HistogramDataPoint) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := HistogramDataPointSlice(internal.GenerateTestHistogramDataPointSlice()) + pos := 0 + filtered.RemoveIf(func(el HistogramDataPoint) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestHistogramDataPoint_MoveTo(t *testing.T) { + ms := HistogramDataPoint(internal.GenerateTestHistogramDataPoint()) + dest := NewHistogramDataPoint() + ms.MoveTo(dest) + assert.Equal(t, NewHistogramDataPoint(), ms) + assert.Equal(t, HistogramDataPoint(internal.GenerateTestHistogramDataPoint()), dest) +} + +func TestHistogramDataPoint_CopyTo(t *testing.T) { + ms := NewHistogramDataPoint() + orig := NewHistogramDataPoint() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = HistogramDataPoint(internal.GenerateTestHistogramDataPoint()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestHistogramDataPoint_Attributes(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestHistogramDataPoint_StartTimestamp(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.StartTimestamp()) + testValStartTimestamp := pcommon.Timestamp(1234567890) + ms.SetStartTimestamp(testValStartTimestamp) + assert.Equal(t, testValStartTimestamp, ms.StartTimestamp()) +} + +func TestHistogramDataPoint_Timestamp(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.Timestamp()) + testValTimestamp := pcommon.Timestamp(1234567890) + ms.SetTimestamp(testValTimestamp) + assert.Equal(t, testValTimestamp, ms.Timestamp()) +} + +func TestHistogramDataPoint_Count(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, uint64(0), ms.Count()) + ms.SetCount(uint64(17)) + assert.Equal(t, uint64(17), ms.Count()) +} + +func TestHistogramDataPoint_Sum(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, float64(0.0), ms.Sum()) + ms.SetSum(float64(17.13)) + assert.Equal(t, float64(17.13), ms.Sum()) +} + +func TestHistogramDataPoint_BucketCounts(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, pcommon.ImmutableUInt64Slice(internal.NewImmutableUInt64Slice([]uint64(nil))), ms.BucketCounts()) + testValBucketCounts := pcommon.ImmutableUInt64Slice(internal.NewImmutableUInt64Slice([]uint64{1, 2, 3})) + ms.SetBucketCounts(testValBucketCounts) + assert.Equal(t, testValBucketCounts, ms.BucketCounts()) +} + +func TestHistogramDataPoint_ExplicitBounds(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, pcommon.ImmutableFloat64Slice(internal.NewImmutableFloat64Slice([]float64(nil))), ms.ExplicitBounds()) + testValExplicitBounds := pcommon.ImmutableFloat64Slice(internal.NewImmutableFloat64Slice([]float64{1, 2, 3})) + ms.SetExplicitBounds(testValExplicitBounds) + assert.Equal(t, testValExplicitBounds, ms.ExplicitBounds()) +} + +func TestHistogramDataPoint_Exemplars(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, NewExemplarSlice(), ms.Exemplars()) + internal.FillTestExemplarSlice(internal.ExemplarSlice(ms.Exemplars())) + assert.Equal(t, ExemplarSlice(internal.GenerateTestExemplarSlice()), ms.Exemplars()) +} + +func TestHistogramDataPoint_Flags(t *testing.T) { + ms := NewHistogramDataPoint() + internal.FillTestMetricDataPointFlags(internal.MetricDataPointFlags(ms.Flags())) + assert.Equal(t, MetricDataPointFlags(internal.GenerateTestMetricDataPointFlags()), ms.Flags()) +} + +func TestHistogramDataPoint_Min(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, float64(0.0), ms.Min()) + ms.SetMin(float64(9.23)) + assert.Equal(t, float64(9.23), ms.Min()) +} + +func TestHistogramDataPoint_Max(t *testing.T) { + ms := NewHistogramDataPoint() + assert.Equal(t, float64(0.0), ms.Max()) + ms.SetMax(float64(182.55)) + assert.Equal(t, float64(182.55), ms.Max()) +} + +func TestExponentialHistogramDataPointSlice(t *testing.T) { + es := NewExponentialHistogramDataPointSlice() + assert.Equal(t, 0, es.Len()) + es = newExponentialHistogramDataPointSlice(&[]*otlpmetrics.ExponentialHistogramDataPoint{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newExponentialHistogramDataPoint(&otlpmetrics.ExponentialHistogramDataPoint{}) + testVal := ExponentialHistogramDataPoint(internal.GenerateTestExponentialHistogramDataPoint()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestExponentialHistogramDataPoint(internal.ExponentialHistogramDataPoint(el)) + assert.Equal(t, testVal, el) + } +} + +func TestExponentialHistogramDataPointSlice_CopyTo(t *testing.T) { + dest := NewExponentialHistogramDataPointSlice() + // Test CopyTo to empty + NewExponentialHistogramDataPointSlice().CopyTo(dest) + assert.Equal(t, NewExponentialHistogramDataPointSlice(), dest) + + // Test CopyTo larger slice + ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()).CopyTo(dest) + assert.Equal(t, ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()), dest) + + // Test CopyTo same size slice + ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()).CopyTo(dest) + assert.Equal(t, ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()), dest) +} + +func TestExponentialHistogramDataPointSlice_EnsureCapacity(t *testing.T) { + es := ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.ExponentialHistogramDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestExponentialHistogramDataPointSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()) + dest := NewExponentialHistogramDataPointSlice() + src := ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestExponentialHistogramDataPointSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewExponentialHistogramDataPointSlice() + emptySlice.RemoveIf(func(el ExponentialHistogramDataPoint) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ExponentialHistogramDataPointSlice(internal.GenerateTestExponentialHistogramDataPointSlice()) + pos := 0 + filtered.RemoveIf(func(el ExponentialHistogramDataPoint) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestExponentialHistogramDataPoint_MoveTo(t *testing.T) { + ms := ExponentialHistogramDataPoint(internal.GenerateTestExponentialHistogramDataPoint()) + dest := NewExponentialHistogramDataPoint() + ms.MoveTo(dest) + assert.Equal(t, NewExponentialHistogramDataPoint(), ms) + assert.Equal(t, ExponentialHistogramDataPoint(internal.GenerateTestExponentialHistogramDataPoint()), dest) +} + +func TestExponentialHistogramDataPoint_CopyTo(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + orig := NewExponentialHistogramDataPoint() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ExponentialHistogramDataPoint(internal.GenerateTestExponentialHistogramDataPoint()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestExponentialHistogramDataPoint_Attributes(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestExponentialHistogramDataPoint_StartTimestamp(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.StartTimestamp()) + testValStartTimestamp := pcommon.Timestamp(1234567890) + ms.SetStartTimestamp(testValStartTimestamp) + assert.Equal(t, testValStartTimestamp, ms.StartTimestamp()) +} + +func TestExponentialHistogramDataPoint_Timestamp(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.Timestamp()) + testValTimestamp := pcommon.Timestamp(1234567890) + ms.SetTimestamp(testValTimestamp) + assert.Equal(t, testValTimestamp, ms.Timestamp()) +} + +func TestExponentialHistogramDataPoint_Count(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, uint64(0), ms.Count()) + ms.SetCount(uint64(17)) + assert.Equal(t, uint64(17), ms.Count()) +} + +func TestExponentialHistogramDataPoint_Sum(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, float64(0.0), ms.Sum()) + ms.SetSum(float64(17.13)) + assert.Equal(t, float64(17.13), ms.Sum()) +} + +func TestExponentialHistogramDataPoint_Scale(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, int32(0), ms.Scale()) + ms.SetScale(int32(4)) + assert.Equal(t, int32(4), ms.Scale()) +} + +func TestExponentialHistogramDataPoint_ZeroCount(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, uint64(0), ms.ZeroCount()) + ms.SetZeroCount(uint64(201)) + assert.Equal(t, uint64(201), ms.ZeroCount()) +} + +func TestExponentialHistogramDataPoint_Positive(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + internal.FillTestBuckets(internal.Buckets(ms.Positive())) + assert.Equal(t, Buckets(internal.GenerateTestBuckets()), ms.Positive()) +} + +func TestExponentialHistogramDataPoint_Negative(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + internal.FillTestBuckets(internal.Buckets(ms.Negative())) + assert.Equal(t, Buckets(internal.GenerateTestBuckets()), ms.Negative()) +} + +func TestExponentialHistogramDataPoint_Exemplars(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, NewExemplarSlice(), ms.Exemplars()) + internal.FillTestExemplarSlice(internal.ExemplarSlice(ms.Exemplars())) + assert.Equal(t, ExemplarSlice(internal.GenerateTestExemplarSlice()), ms.Exemplars()) +} + +func TestExponentialHistogramDataPoint_Flags(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + internal.FillTestMetricDataPointFlags(internal.MetricDataPointFlags(ms.Flags())) + assert.Equal(t, MetricDataPointFlags(internal.GenerateTestMetricDataPointFlags()), ms.Flags()) +} + +func TestExponentialHistogramDataPoint_Min(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, float64(0.0), ms.Min()) + ms.SetMin(float64(9.23)) + assert.Equal(t, float64(9.23), ms.Min()) +} + +func TestExponentialHistogramDataPoint_Max(t *testing.T) { + ms := NewExponentialHistogramDataPoint() + assert.Equal(t, float64(0.0), ms.Max()) + ms.SetMax(float64(182.55)) + assert.Equal(t, float64(182.55), ms.Max()) +} + +func TestBuckets_MoveTo(t *testing.T) { + ms := Buckets(internal.GenerateTestBuckets()) + dest := NewBuckets() + ms.MoveTo(dest) + assert.Equal(t, NewBuckets(), ms) + assert.Equal(t, Buckets(internal.GenerateTestBuckets()), dest) +} + +func TestBuckets_CopyTo(t *testing.T) { + ms := NewBuckets() + orig := NewBuckets() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Buckets(internal.GenerateTestBuckets()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestBuckets_Offset(t *testing.T) { + ms := NewBuckets() + assert.Equal(t, int32(0), ms.Offset()) + ms.SetOffset(int32(909)) + assert.Equal(t, int32(909), ms.Offset()) +} + +func TestBuckets_BucketCounts(t *testing.T) { + ms := NewBuckets() + assert.Equal(t, pcommon.ImmutableUInt64Slice(internal.NewImmutableUInt64Slice([]uint64(nil))), ms.BucketCounts()) + testValBucketCounts := pcommon.ImmutableUInt64Slice(internal.NewImmutableUInt64Slice([]uint64{1, 2, 3})) + ms.SetBucketCounts(testValBucketCounts) + assert.Equal(t, testValBucketCounts, ms.BucketCounts()) +} + +func TestSummaryDataPointSlice(t *testing.T) { + es := NewSummaryDataPointSlice() + assert.Equal(t, 0, es.Len()) + es = newSummaryDataPointSlice(&[]*otlpmetrics.SummaryDataPoint{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newSummaryDataPoint(&otlpmetrics.SummaryDataPoint{}) + testVal := SummaryDataPoint(internal.GenerateTestSummaryDataPoint()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestSummaryDataPoint(internal.SummaryDataPoint(el)) + assert.Equal(t, testVal, el) + } +} + +func TestSummaryDataPointSlice_CopyTo(t *testing.T) { + dest := NewSummaryDataPointSlice() + // Test CopyTo to empty + NewSummaryDataPointSlice().CopyTo(dest) + assert.Equal(t, NewSummaryDataPointSlice(), dest) + + // Test CopyTo larger slice + SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()).CopyTo(dest) + assert.Equal(t, SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()), dest) + + // Test CopyTo same size slice + SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()).CopyTo(dest) + assert.Equal(t, SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()), dest) +} + +func TestSummaryDataPointSlice_EnsureCapacity(t *testing.T) { + es := SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.SummaryDataPoint]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.SummaryDataPoint]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.SummaryDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.SummaryDataPoint]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestSummaryDataPointSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()) + dest := NewSummaryDataPointSlice() + src := SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestSummaryDataPointSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewSummaryDataPointSlice() + emptySlice.RemoveIf(func(el SummaryDataPoint) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := SummaryDataPointSlice(internal.GenerateTestSummaryDataPointSlice()) + pos := 0 + filtered.RemoveIf(func(el SummaryDataPoint) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestSummaryDataPoint_MoveTo(t *testing.T) { + ms := SummaryDataPoint(internal.GenerateTestSummaryDataPoint()) + dest := NewSummaryDataPoint() + ms.MoveTo(dest) + assert.Equal(t, NewSummaryDataPoint(), ms) + assert.Equal(t, SummaryDataPoint(internal.GenerateTestSummaryDataPoint()), dest) +} + +func TestSummaryDataPoint_CopyTo(t *testing.T) { + ms := NewSummaryDataPoint() + orig := NewSummaryDataPoint() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = SummaryDataPoint(internal.GenerateTestSummaryDataPoint()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestSummaryDataPoint_Attributes(t *testing.T) { + ms := NewSummaryDataPoint() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestSummaryDataPoint_StartTimestamp(t *testing.T) { + ms := NewSummaryDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.StartTimestamp()) + testValStartTimestamp := pcommon.Timestamp(1234567890) + ms.SetStartTimestamp(testValStartTimestamp) + assert.Equal(t, testValStartTimestamp, ms.StartTimestamp()) +} + +func TestSummaryDataPoint_Timestamp(t *testing.T) { + ms := NewSummaryDataPoint() + assert.Equal(t, pcommon.Timestamp(0), ms.Timestamp()) + testValTimestamp := pcommon.Timestamp(1234567890) + ms.SetTimestamp(testValTimestamp) + assert.Equal(t, testValTimestamp, ms.Timestamp()) +} + +func TestSummaryDataPoint_Count(t *testing.T) { + ms := NewSummaryDataPoint() + assert.Equal(t, uint64(0), ms.Count()) + ms.SetCount(uint64(17)) + assert.Equal(t, uint64(17), ms.Count()) +} + +func TestSummaryDataPoint_Sum(t *testing.T) { + ms := NewSummaryDataPoint() + assert.Equal(t, float64(0.0), ms.Sum()) + ms.SetSum(float64(17.13)) + assert.Equal(t, float64(17.13), ms.Sum()) +} + +func TestSummaryDataPoint_QuantileValues(t *testing.T) { + ms := NewSummaryDataPoint() + assert.Equal(t, NewValueAtQuantileSlice(), ms.QuantileValues()) + internal.FillTestValueAtQuantileSlice(internal.ValueAtQuantileSlice(ms.QuantileValues())) + assert.Equal(t, ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()), ms.QuantileValues()) +} + +func TestSummaryDataPoint_Flags(t *testing.T) { + ms := NewSummaryDataPoint() + internal.FillTestMetricDataPointFlags(internal.MetricDataPointFlags(ms.Flags())) + assert.Equal(t, MetricDataPointFlags(internal.GenerateTestMetricDataPointFlags()), ms.Flags()) +} + +func TestValueAtQuantileSlice(t *testing.T) { + es := NewValueAtQuantileSlice() + assert.Equal(t, 0, es.Len()) + es = newValueAtQuantileSlice(&[]*otlpmetrics.SummaryDataPoint_ValueAtQuantile{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newValueAtQuantile(&otlpmetrics.SummaryDataPoint_ValueAtQuantile{}) + testVal := ValueAtQuantile(internal.GenerateTestValueAtQuantile()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestValueAtQuantile(internal.ValueAtQuantile(el)) + assert.Equal(t, testVal, el) + } +} + +func TestValueAtQuantileSlice_CopyTo(t *testing.T) { + dest := NewValueAtQuantileSlice() + // Test CopyTo to empty + NewValueAtQuantileSlice().CopyTo(dest) + assert.Equal(t, NewValueAtQuantileSlice(), dest) + + // Test CopyTo larger slice + ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()).CopyTo(dest) + assert.Equal(t, ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()), dest) + + // Test CopyTo same size slice + ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()).CopyTo(dest) + assert.Equal(t, ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()), dest) +} + +func TestValueAtQuantileSlice_EnsureCapacity(t *testing.T) { + es := ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlpmetrics.SummaryDataPoint_ValueAtQuantile]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestValueAtQuantileSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()) + dest := NewValueAtQuantileSlice() + src := ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestValueAtQuantileSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewValueAtQuantileSlice() + emptySlice.RemoveIf(func(el ValueAtQuantile) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ValueAtQuantileSlice(internal.GenerateTestValueAtQuantileSlice()) + pos := 0 + filtered.RemoveIf(func(el ValueAtQuantile) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestValueAtQuantile_MoveTo(t *testing.T) { + ms := ValueAtQuantile(internal.GenerateTestValueAtQuantile()) + dest := NewValueAtQuantile() + ms.MoveTo(dest) + assert.Equal(t, NewValueAtQuantile(), ms) + assert.Equal(t, ValueAtQuantile(internal.GenerateTestValueAtQuantile()), dest) +} + +func TestValueAtQuantile_CopyTo(t *testing.T) { + ms := NewValueAtQuantile() + orig := NewValueAtQuantile() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ValueAtQuantile(internal.GenerateTestValueAtQuantile()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestValueAtQuantile_Quantile(t *testing.T) { + ms := NewValueAtQuantile() + assert.Equal(t, float64(0.0), ms.Quantile()) + ms.SetQuantile(float64(17.13)) + assert.Equal(t, float64(17.13), ms.Quantile()) +} + +func TestValueAtQuantile_Value(t *testing.T) { + ms := NewValueAtQuantile() + assert.Equal(t, float64(0.0), ms.Value()) + ms.SetValue(float64(17.13)) + assert.Equal(t, float64(17.13), ms.Value()) +} + +func TestExemplarSlice(t *testing.T) { + es := NewExemplarSlice() + assert.Equal(t, 0, es.Len()) + es = newExemplarSlice(&[]otlpmetrics.Exemplar{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newExemplar(&otlpmetrics.Exemplar{}) + testVal := Exemplar(internal.GenerateTestExemplar()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestExemplar(internal.Exemplar(el)) + assert.Equal(t, testVal, el) + } +} + +func TestExemplarSlice_CopyTo(t *testing.T) { + dest := NewExemplarSlice() + // Test CopyTo to empty + NewExemplarSlice().CopyTo(dest) + assert.Equal(t, NewExemplarSlice(), dest) + + // Test CopyTo larger slice + ExemplarSlice(internal.GenerateTestExemplarSlice()).CopyTo(dest) + assert.Equal(t, ExemplarSlice(internal.GenerateTestExemplarSlice()), dest) + + // Test CopyTo same size slice + ExemplarSlice(internal.GenerateTestExemplarSlice()).CopyTo(dest) + assert.Equal(t, ExemplarSlice(internal.GenerateTestExemplarSlice()), dest) +} + +func TestExemplarSlice_EnsureCapacity(t *testing.T) { + es := ExemplarSlice(internal.GenerateTestExemplarSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlpmetrics.Exemplar]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlpmetrics.Exemplar]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) +} + +func TestExemplarSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ExemplarSlice(internal.GenerateTestExemplarSlice()) + dest := NewExemplarSlice() + src := ExemplarSlice(internal.GenerateTestExemplarSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ExemplarSlice(internal.GenerateTestExemplarSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ExemplarSlice(internal.GenerateTestExemplarSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ExemplarSlice(internal.GenerateTestExemplarSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestExemplarSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewExemplarSlice() + emptySlice.RemoveIf(func(el Exemplar) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ExemplarSlice(internal.GenerateTestExemplarSlice()) + pos := 0 + filtered.RemoveIf(func(el Exemplar) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestExemplar_MoveTo(t *testing.T) { + ms := Exemplar(internal.GenerateTestExemplar()) + dest := NewExemplar() + ms.MoveTo(dest) + assert.Equal(t, NewExemplar(), ms) + assert.Equal(t, Exemplar(internal.GenerateTestExemplar()), dest) +} + +func TestExemplar_CopyTo(t *testing.T) { + ms := NewExemplar() + orig := NewExemplar() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Exemplar(internal.GenerateTestExemplar()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestExemplar_Timestamp(t *testing.T) { + ms := NewExemplar() + assert.Equal(t, pcommon.Timestamp(0), ms.Timestamp()) + testValTimestamp := pcommon.Timestamp(1234567890) + ms.SetTimestamp(testValTimestamp) + assert.Equal(t, testValTimestamp, ms.Timestamp()) +} + +func TestExemplar_ValueType(t *testing.T) { + tv := NewExemplar() + assert.Equal(t, ExemplarValueTypeNone, tv.ValueType()) +} + +func TestExemplar_DoubleVal(t *testing.T) { + ms := NewExemplar() + assert.Equal(t, float64(0.0), ms.DoubleVal()) + ms.SetDoubleVal(float64(17.13)) + assert.Equal(t, float64(17.13), ms.DoubleVal()) + assert.Equal(t, ExemplarValueTypeDouble, ms.ValueType()) +} + +func TestExemplar_IntVal(t *testing.T) { + ms := NewExemplar() + assert.Equal(t, int64(0), ms.IntVal()) + ms.SetIntVal(int64(17)) + assert.Equal(t, int64(17), ms.IntVal()) + assert.Equal(t, ExemplarValueTypeInt, ms.ValueType()) +} + +func TestExemplar_FilteredAttributes(t *testing.T) { + ms := NewExemplar() + assert.Equal(t, pcommon.NewMap(), ms.FilteredAttributes()) + internal.FillTestMap(internal.Map(ms.FilteredAttributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.FilteredAttributes()) +} + +func TestExemplar_TraceID(t *testing.T) { + ms := NewExemplar() + assert.Equal(t, pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{}))), ms.TraceID()) + testValTraceID := pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}))) + ms.SetTraceID(testValTraceID) + assert.Equal(t, testValTraceID, ms.TraceID()) +} + +func TestExemplar_SpanID(t *testing.T) { + ms := NewExemplar() + assert.Equal(t, pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{}))), ms.SpanID()) + testValSpanID := pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}))) + ms.SetSpanID(testValSpanID) + assert.Equal(t, testValSpanID, ms.SpanID()) +} diff --git a/pdata/pmetric/json.go b/pdata/pmetric/json.go index f8aeb6919f0..396f135daf6 100644 --- a/pdata/pmetric/json.go +++ b/pdata/pmetric/json.go @@ -42,7 +42,8 @@ func newJSONMarshaler() *jsonMarshaler { func (e *jsonMarshaler) MarshalMetrics(md Metrics) ([]byte, error) { buf := bytes.Buffer{} - err := e.delegate.Marshal(&buf, internal.MetricsToOtlp(md)) + pb := internal.MetricsToProto(internal.Metrics(md)) + err := e.delegate.Marshal(&buf, &pb) return buf.Bytes(), err } @@ -62,7 +63,7 @@ func (d *jsonUnmarshaler) UnmarshalMetrics(buf []byte) (Metrics, error) { return Metrics{}, iter.Error } otlp.MigrateMetrics(md.ResourceMetrics) - return internal.MetricsFromProto(md), nil + return Metrics(internal.MetricsFromProto(md)), nil } func (d *jsonUnmarshaler) readMetricsData(iter *jsoniter.Iterator) otlpmetrics.MetricsData { diff --git a/pdata/pmetric/json_test.go b/pdata/pmetric/json_test.go index 74f4106cd8e..23457c921f4 100644 --- a/pdata/pmetric/json_test.go +++ b/pdata/pmetric/json_test.go @@ -22,8 +22,8 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/stretchr/testify/assert" - "go.opentelemetry.io/collector/pdata/internal" otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" + "go.opentelemetry.io/collector/pdata/pcommon" ) var metricsOTLP = func() Metrics { @@ -77,27 +77,27 @@ var metricsSumOTLPFull = func() Metrics { sumData := m.Metrics().AppendEmpty() sumData.SetName("test sum") sumData.SetDescription("test sum") - sumData.SetDataType(internal.MetricDataTypeSum) + sumData.SetDataType(MetricDataTypeSum) sumData.SetUnit("unit") - sumData.Sum().SetAggregationTemporality(internal.MetricAggregationTemporalityCumulative) + sumData.Sum().SetAggregationTemporality(MetricAggregationTemporalityCumulative) sumData.Sum().SetIsMonotonic(true) datapoint := sumData.Sum().DataPoints().AppendEmpty() - datapoint.SetStartTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) datapoint.SetIntVal(100) datapoint.Attributes().UpsertString("string", "value") datapoint.Attributes().UpsertBool("bool", true) datapoint.Attributes().UpsertInt("int", 1) datapoint.Attributes().UpsertDouble("double", 1.1) - datapoint.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) + datapoint.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) exemplar := datapoint.Exemplars().AppendEmpty() exemplar.SetDoubleVal(99.3) - exemplar.SetTimestamp(internal.NewTimestampFromTime(time.Now())) - traceID := internal.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) - spanID := internal.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) + exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) + traceID := pcommon.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) + spanID := pcommon.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) exemplar.SetSpanID(spanID) exemplar.SetTraceID(traceID) exemplar.FilteredAttributes().UpsertString("service.name", "testService") - datapoint.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) return metric } @@ -118,25 +118,25 @@ var metricsGaugeOTLPFull = func() Metrics { gaugeData := m.Metrics().AppendEmpty() gaugeData.SetName("test gauge") gaugeData.SetDescription("test gauge") - gaugeData.SetDataType(internal.MetricDataTypeGauge) + gaugeData.SetDataType(MetricDataTypeGauge) gaugeData.SetUnit("unit") datapoint := gaugeData.Gauge().DataPoints().AppendEmpty() - datapoint.SetStartTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) datapoint.SetDoubleVal(10.2) datapoint.Attributes().UpsertString("string", "value") datapoint.Attributes().UpsertBool("bool", true) datapoint.Attributes().UpsertInt("int", 1) datapoint.Attributes().UpsertDouble("double", 1.1) - datapoint.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) + datapoint.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) exemplar := datapoint.Exemplars().AppendEmpty() exemplar.SetDoubleVal(99.3) - exemplar.SetTimestamp(internal.NewTimestampFromTime(time.Now())) - traceID := internal.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) - spanID := internal.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) + exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) + traceID := pcommon.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) + spanID := pcommon.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) exemplar.SetSpanID(spanID) exemplar.SetTraceID(traceID) exemplar.FilteredAttributes().UpsertString("service.name", "testService") - datapoint.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) return metric } @@ -157,31 +157,31 @@ var metricsHistogramOTLPFull = func() Metrics { histogramData := m.Metrics().AppendEmpty() histogramData.SetName("test Histogram") histogramData.SetDescription("test Histogram") - histogramData.SetDataType(internal.MetricDataTypeHistogram) + histogramData.SetDataType(MetricDataTypeHistogram) histogramData.SetUnit("unit") histogramData.Histogram().SetAggregationTemporality(MetricAggregationTemporalityCumulative) datapoint := histogramData.Histogram().DataPoints().AppendEmpty() - datapoint.SetStartTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) datapoint.Attributes().UpsertString("string", "value") datapoint.Attributes().UpsertBool("bool", true) datapoint.Attributes().UpsertInt("int", 1) datapoint.Attributes().UpsertDouble("double", 1.1) - datapoint.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) + datapoint.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) datapoint.SetCount(4) datapoint.SetSum(345) - datapoint.SetBucketCounts(internal.NewImmutableUInt64Slice([]uint64{1, 1, 2})) - datapoint.SetExplicitBounds(internal.NewImmutableFloat64Slice([]float64{10, 100})) + datapoint.SetBucketCounts(pcommon.NewImmutableUInt64Slice([]uint64{1, 1, 2})) + datapoint.SetExplicitBounds(pcommon.NewImmutableFloat64Slice([]float64{10, 100})) exemplar := datapoint.Exemplars().AppendEmpty() exemplar.SetDoubleVal(99.3) - exemplar.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) datapoint.SetMin(float64(time.Now().Unix())) - traceID := internal.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) - spanID := internal.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) + traceID := pcommon.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) + spanID := pcommon.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) exemplar.SetSpanID(spanID) exemplar.SetTraceID(traceID) exemplar.FilteredAttributes().UpsertString("service.name", "testService") datapoint.SetMax(float64(time.Now().Unix())) - datapoint.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) return metric } @@ -202,35 +202,35 @@ var metricsExponentialHistogramOTLPFull = func() Metrics { histogramData := m.Metrics().AppendEmpty() histogramData.SetName("test ExponentialHistogram") histogramData.SetDescription("test ExponentialHistogram") - histogramData.SetDataType(internal.MetricDataTypeExponentialHistogram) + histogramData.SetDataType(MetricDataTypeExponentialHistogram) histogramData.SetUnit("unit") histogramData.ExponentialHistogram().SetAggregationTemporality(MetricAggregationTemporalityCumulative) datapoint := histogramData.ExponentialHistogram().DataPoints().AppendEmpty() datapoint.SetScale(1) - datapoint.SetStartTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) datapoint.Attributes().UpsertString("string", "value") datapoint.Attributes().UpsertBool("bool", true) datapoint.Attributes().UpsertInt("int", 1) datapoint.Attributes().UpsertDouble("double", 1.1) - datapoint.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) + datapoint.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) datapoint.SetCount(4) datapoint.SetSum(345) - datapoint.Positive().SetBucketCounts(internal.NewImmutableUInt64Slice([]uint64{1, 1, 2})) + datapoint.Positive().SetBucketCounts(pcommon.NewImmutableUInt64Slice([]uint64{1, 1, 2})) datapoint.Positive().SetOffset(2) exemplar := datapoint.Exemplars().AppendEmpty() exemplar.SetDoubleVal(99.3) - exemplar.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + exemplar.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) datapoint.SetMin(float64(time.Now().Unix())) - traceID := internal.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) - spanID := internal.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) + traceID := pcommon.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) + spanID := pcommon.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) exemplar.SetSpanID(spanID) exemplar.SetTraceID(traceID) exemplar.FilteredAttributes().UpsertString("service.name", "testService") datapoint.SetMax(float64(time.Now().Unix())) - datapoint.Negative().SetBucketCounts(internal.NewImmutableUInt64Slice([]uint64{1, 1, 2})) + datapoint.Negative().SetBucketCounts(pcommon.NewImmutableUInt64Slice([]uint64{1, 1, 2})) datapoint.Negative().SetOffset(2) datapoint.SetZeroCount(5) - datapoint.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) return metric } @@ -251,10 +251,10 @@ var metricsSummaryOTLPFull = func() Metrics { sumData := m.Metrics().AppendEmpty() sumData.SetName("test summary") sumData.SetDescription("test summary") - sumData.SetDataType(internal.MetricDataTypeSummary) + sumData.SetDataType(MetricDataTypeSummary) sumData.SetUnit("unit") datapoint := sumData.Summary().DataPoints().AppendEmpty() - datapoint.SetStartTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) datapoint.SetCount(100) datapoint.SetSum(100) quantile := datapoint.QuantileValues().AppendEmpty() @@ -264,8 +264,8 @@ var metricsSummaryOTLPFull = func() Metrics { datapoint.Attributes().UpsertBool("bool", true) datapoint.Attributes().UpsertInt("int", 1) datapoint.Attributes().UpsertDouble("double", 1.1) - datapoint.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) - datapoint.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + datapoint.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) + datapoint.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) return metric } @@ -343,7 +343,7 @@ func TestReadMetricsDataUnknownField(t *testing.T) { assert.EqualValues(t, otlpmetrics.MetricsData{}, value) } -func TestExemplar_IntVal(t *testing.T) { +func TestExemplarIntVal(t *testing.T) { tests := []struct { name string jsonStr string @@ -411,6 +411,7 @@ func TestReadResourceMetricsResourceUnknown(t *testing.T) { assert.NoError(t, iter.Error) assert.EqualValues(t, &otlpmetrics.ResourceMetrics{}, value) } + func TestReadResourceMetricsUnknownField(t *testing.T) { jsonStr := `{"exists":"true"}` iter := jsoniter.ConfigFastest.BorrowIterator([]byte(jsonStr)) diff --git a/pdata/internal/metrics.go b/pdata/pmetric/metrics.go similarity index 76% rename from pdata/internal/metrics.go rename to pdata/pmetric/metrics.go index 8300fd71c54..2e7f1e8744f 100644 --- a/pdata/internal/metrics.go +++ b/pdata/pmetric/metrics.go @@ -12,71 +12,54 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal // import "go.opentelemetry.io/collector/pdata/internal" +package pmetric // import "go.opentelemetry.io/collector/pdata/pmetric" import ( + "go.opentelemetry.io/collector/pdata/internal" otlpcollectormetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/metrics/v1" otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" ) -// MetricsToOtlp internal helper to convert Metrics to otlp request representation. -func MetricsToOtlp(mw Metrics) *otlpcollectormetrics.ExportMetricsServiceRequest { - return mw.orig -} - -// MetricsFromOtlp internal helper to convert otlp request representation to Metrics. -func MetricsFromOtlp(orig *otlpcollectormetrics.ExportMetricsServiceRequest) Metrics { - return Metrics{orig: orig} -} - -// MetricsToProto internal helper to convert Metrics to protobuf representation. -func MetricsToProto(l Metrics) otlpmetrics.MetricsData { - return otlpmetrics.MetricsData{ - ResourceMetrics: l.orig.ResourceMetrics, - } -} +// Metrics is the top-level struct that is propagated through the metrics pipeline. +// Use NewMetrics to create new instance, zero-initialized instance is not valid for use. +type Metrics internal.Metrics -// MetricsFromProto internal helper to convert protobuf representation to Metrics. -func MetricsFromProto(orig otlpmetrics.MetricsData) Metrics { - return Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{ - ResourceMetrics: orig.ResourceMetrics, - }} +func newMetrics(orig *otlpcollectormetrics.ExportMetricsServiceRequest) Metrics { + return Metrics(internal.NewMetrics(orig)) } -// Metrics is the top-level struct that is propagated through the metrics pipeline. -// Use NewMetrics to create new instance, zero-initialized instance is not valid for use. -type Metrics struct { - orig *otlpcollectormetrics.ExportMetricsServiceRequest +func (ms Metrics) getOrig() *otlpcollectormetrics.ExportMetricsServiceRequest { + return internal.GetOrigMetrics(internal.Metrics(ms)) } // NewMetrics creates a new Metrics struct. func NewMetrics() Metrics { - return Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{}} + return newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{}) } // Clone returns a copy of MetricData. -func (md Metrics) Clone() Metrics { +func (ms Metrics) Clone() Metrics { cloneMd := NewMetrics() - md.ResourceMetrics().CopyTo(cloneMd.ResourceMetrics()) + ms.ResourceMetrics().CopyTo(cloneMd.ResourceMetrics()) return cloneMd } // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value. -func (md Metrics) MoveTo(dest Metrics) { - *dest.orig = *md.orig - *md.orig = otlpcollectormetrics.ExportMetricsServiceRequest{} +func (ms Metrics) MoveTo(dest Metrics) { + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpcollectormetrics.ExportMetricsServiceRequest{} } // ResourceMetrics returns the ResourceMetricsSlice associated with this Metrics. -func (md Metrics) ResourceMetrics() ResourceMetricsSlice { - return newResourceMetricsSlice(&md.orig.ResourceMetrics) +func (ms Metrics) ResourceMetrics() ResourceMetricsSlice { + return newResourceMetricsSlice(&ms.getOrig().ResourceMetrics) } // MetricCount calculates the total number of metrics. -func (md Metrics) MetricCount() int { +func (ms Metrics) MetricCount() int { metricCount := 0 - rms := md.ResourceMetrics() + rms := ms.ResourceMetrics() for i := 0; i < rms.Len(); i++ { rm := rms.At(i) ilms := rm.ScopeMetrics() @@ -89,8 +72,8 @@ func (md Metrics) MetricCount() int { } // DataPointCount calculates the total number of data points. -func (md Metrics) DataPointCount() (dataPointCount int) { - rms := md.ResourceMetrics() +func (ms Metrics) DataPointCount() (dataPointCount int) { + rms := ms.ResourceMetrics() for i := 0; i < rms.Len(); i++ { rm := rms.At(i) ilms := rm.ScopeMetrics() @@ -153,15 +136,15 @@ func (mdt MetricDataType) String() string { func (ms Metric) SetDataType(ty MetricDataType) { switch ty { case MetricDataTypeGauge: - ms.orig.Data = &otlpmetrics.Metric_Gauge{Gauge: &otlpmetrics.Gauge{}} + ms.getOrig().Data = &otlpmetrics.Metric_Gauge{Gauge: &otlpmetrics.Gauge{}} case MetricDataTypeSum: - ms.orig.Data = &otlpmetrics.Metric_Sum{Sum: &otlpmetrics.Sum{}} + ms.getOrig().Data = &otlpmetrics.Metric_Sum{Sum: &otlpmetrics.Sum{}} case MetricDataTypeHistogram: - ms.orig.Data = &otlpmetrics.Metric_Histogram{Histogram: &otlpmetrics.Histogram{}} + ms.getOrig().Data = &otlpmetrics.Metric_Histogram{Histogram: &otlpmetrics.Histogram{}} case MetricDataTypeExponentialHistogram: - ms.orig.Data = &otlpmetrics.Metric_ExponentialHistogram{ExponentialHistogram: &otlpmetrics.ExponentialHistogram{}} + ms.getOrig().Data = &otlpmetrics.Metric_ExponentialHistogram{ExponentialHistogram: &otlpmetrics.ExponentialHistogram{}} case MetricDataTypeSummary: - ms.orig.Data = &otlpmetrics.Metric_Summary{Summary: &otlpmetrics.Summary{}} + ms.getOrig().Data = &otlpmetrics.Metric_Summary{Summary: &otlpmetrics.Summary{}} } } @@ -191,12 +174,14 @@ func (at MetricAggregationTemporality) String() string { // // Must use NewMetricDataPointFlagsStruct function to create new instances. // Important: zero-initialized instance is not valid for use. -type MetricDataPointFlags struct { - orig *uint32 -} +type MetricDataPointFlags internal.MetricDataPointFlags func newMetricDataPointFlags(orig *uint32) MetricDataPointFlags { - return MetricDataPointFlags{orig: orig} + return MetricDataPointFlags(internal.NewMetricDataPointFlags(orig)) +} + +func (ms MetricDataPointFlags) getOrig() *uint32 { + return internal.GetOrigMetricDataPointFlags(internal.MetricDataPointFlags(ms)) } // NewMetricDataPointFlags creates a new empty MetricDataPointFlags. @@ -210,33 +195,33 @@ func NewMetricDataPointFlags() MetricDataPointFlags { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms MetricDataPointFlags) MoveTo(dest MetricDataPointFlags) { - *dest.orig = *ms.orig - *ms.orig = uint32(otlpmetrics.DataPointFlags_FLAG_NONE) + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = uint32(otlpmetrics.DataPointFlags_FLAG_NONE) } // CopyTo copies all properties from the current struct to the dest. func (ms MetricDataPointFlags) CopyTo(dest MetricDataPointFlags) { - *dest.orig = *ms.orig + *dest.getOrig() = *ms.getOrig() } // NoRecordedValue returns true if the MetricDataPointFlags contains the NO_RECORDED_VALUE flag. func (ms MetricDataPointFlags) NoRecordedValue() bool { - return *ms.orig&uint32(otlpmetrics.DataPointFlags_FLAG_NO_RECORDED_VALUE) != 0 + return *ms.getOrig()&uint32(otlpmetrics.DataPointFlags_FLAG_NO_RECORDED_VALUE) != 0 } // SetNoRecordedValue sets the FLAG_NO_RECORDED_VALUE flag if true and removes it if false. // Setting this Flag when it is already set will change nothing. func (ms MetricDataPointFlags) SetNoRecordedValue(b bool) { if b { - *ms.orig |= uint32(otlpmetrics.DataPointFlags_FLAG_NO_RECORDED_VALUE) + *ms.getOrig() |= uint32(otlpmetrics.DataPointFlags_FLAG_NO_RECORDED_VALUE) } else { - *ms.orig &^= uint32(otlpmetrics.DataPointFlags_FLAG_NO_RECORDED_VALUE) + *ms.getOrig() &^= uint32(otlpmetrics.DataPointFlags_FLAG_NO_RECORDED_VALUE) } } // AsRaw converts MetricDataPointFlags to the OTLP uint32 representation. func (ms MetricDataPointFlags) AsRaw() uint32 { - return *ms.orig + return *ms.getOrig() } // NumberDataPointValueType specifies the type of NumberDataPoint value. diff --git a/pdata/internal/metrics_test.go b/pdata/pmetric/metrics_test.go similarity index 89% rename from pdata/internal/metrics_test.go rename to pdata/pmetric/metrics_test.go index 7cd3ca9b0a5..cfb902d134e 100644 --- a/pdata/internal/metrics_test.go +++ b/pdata/pmetric/metrics_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal +package pmetric import ( "testing" @@ -22,10 +22,12 @@ import ( goproto "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/emptypb" + "go.opentelemetry.io/collector/pdata/internal" otlpcollectormetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/metrics/v1" otlpcommon "go.opentelemetry.io/collector/pdata/internal/data/protogen/common/v1" otlpmetrics "go.opentelemetry.io/collector/pdata/internal/data/protogen/metrics/v1" otlpresource "go.opentelemetry.io/collector/pdata/internal/data/protogen/resource/v1" + "go.opentelemetry.io/collector/pdata/pcommon" ) const ( @@ -63,10 +65,11 @@ func TestResourceMetricsWireCompatibility(t *testing.T) { // this repository are wire compatible. // Generate ResourceMetrics as pdata struct. - metrics := generateTestResourceMetrics() + metrics := NewMetrics() + internal.FillTestResourceMetricsSlice(internal.ResourceMetricsSlice(metrics.ResourceMetrics())) // Marshal its underlying ProtoBuf to wire. - wire1, err := gogoproto.Marshal(metrics.orig) + wire1, err := gogoproto.Marshal(metrics.getOrig()) assert.NoError(t, err) assert.NotNil(t, wire1) @@ -81,13 +84,13 @@ func TestResourceMetricsWireCompatibility(t *testing.T) { assert.NotNil(t, wire2) // Unmarshal from the wire into gogoproto's representation. - var gogoprotoRM otlpmetrics.ResourceMetrics + var gogoprotoRM otlpcollectormetrics.ExportMetricsServiceRequest err = gogoproto.Unmarshal(wire2, &gogoprotoRM) assert.NoError(t, err) // Now compare that the original and final ProtoBuf messages are the same. // This proves that goproto and gogoproto marshaling/unmarshaling are wire compatible. - assert.True(t, assert.EqualValues(t, metrics.orig, &gogoprotoRM)) + assert.True(t, assert.EqualValues(t, metrics.getOrig(), &gogoprotoRM)) } func TestMetricCount(t *testing.T) { @@ -226,15 +229,15 @@ func TestHistogramWithValidSum(t *testing.T) { func TestMetricsMoveTo(t *testing.T) { metrics := NewMetrics() - fillTestResourceMetricsSlice(metrics.ResourceMetrics()) + internal.FillTestResourceMetricsSlice(internal.ResourceMetricsSlice(metrics.ResourceMetrics())) dest := NewMetrics() metrics.MoveTo(dest) assert.EqualValues(t, NewMetrics(), metrics) - assert.EqualValues(t, generateTestResourceMetricsSlice(), dest.ResourceMetrics()) + assert.EqualValues(t, ResourceMetricsSlice(internal.GenerateTestResourceMetricsSlice()), dest.ResourceMetrics()) } func TestOtlpToInternalReadOnly(t *testing.T) { - md := Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{ + md := newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { Resource: generateTestProtoResource(), @@ -246,12 +249,12 @@ func TestOtlpToInternalReadOnly(t *testing.T) { }, }, }, - }} + }) resourceMetrics := md.ResourceMetrics() assert.EqualValues(t, 1, resourceMetrics.Len()) resourceMetric := resourceMetrics.At(0) - assert.EqualValues(t, NewMapFromRaw(map[string]interface{}{ + assert.EqualValues(t, pcommon.NewMapFromRaw(map[string]interface{}{ "string": "string-resource", }), resourceMetric.Resource().Attributes()) metrics := resourceMetric.ScopeMetrics().At(0).Metrics() @@ -269,12 +272,12 @@ func TestOtlpToInternalReadOnly(t *testing.T) { assert.EqualValues(t, startTime, gaugeDataPoints.At(0).StartTimestamp()) assert.EqualValues(t, endTime, gaugeDataPoints.At(0).Timestamp()) assert.EqualValues(t, 123.1, gaugeDataPoints.At(0).DoubleVal()) - assert.EqualValues(t, NewMapFromRaw(map[string]interface{}{"key0": "value0"}), gaugeDataPoints.At(0).Attributes()) + assert.EqualValues(t, pcommon.NewMapFromRaw(map[string]interface{}{"key0": "value0"}), gaugeDataPoints.At(0).Attributes()) // Second point assert.EqualValues(t, startTime, gaugeDataPoints.At(1).StartTimestamp()) assert.EqualValues(t, endTime, gaugeDataPoints.At(1).Timestamp()) assert.EqualValues(t, 456.1, gaugeDataPoints.At(1).DoubleVal()) - assert.EqualValues(t, NewMapFromRaw(map[string]interface{}{"key1": "value1"}), gaugeDataPoints.At(1).Attributes()) + assert.EqualValues(t, pcommon.NewMapFromRaw(map[string]interface{}{"key1": "value1"}), gaugeDataPoints.At(1).Attributes()) // Check double metric metricDouble := metrics.At(1) @@ -290,12 +293,12 @@ func TestOtlpToInternalReadOnly(t *testing.T) { assert.EqualValues(t, startTime, sumDataPoints.At(0).StartTimestamp()) assert.EqualValues(t, endTime, sumDataPoints.At(0).Timestamp()) assert.EqualValues(t, 123.1, sumDataPoints.At(0).DoubleVal()) - assert.EqualValues(t, NewMapFromRaw(map[string]interface{}{"key0": "value0"}), sumDataPoints.At(0).Attributes()) + assert.EqualValues(t, pcommon.NewMapFromRaw(map[string]interface{}{"key0": "value0"}), sumDataPoints.At(0).Attributes()) // Second point assert.EqualValues(t, startTime, sumDataPoints.At(1).StartTimestamp()) assert.EqualValues(t, endTime, sumDataPoints.At(1).Timestamp()) assert.EqualValues(t, 456.1, sumDataPoints.At(1).DoubleVal()) - assert.EqualValues(t, NewMapFromRaw(map[string]interface{}{"key1": "value1"}), sumDataPoints.At(1).Attributes()) + assert.EqualValues(t, pcommon.NewMapFromRaw(map[string]interface{}{"key1": "value1"}), sumDataPoints.At(1).Attributes()) // Check histogram metric metricHistogram := metrics.At(2) @@ -311,18 +314,18 @@ func TestOtlpToInternalReadOnly(t *testing.T) { assert.EqualValues(t, startTime, histogramDataPoints.At(0).StartTimestamp()) assert.EqualValues(t, endTime, histogramDataPoints.At(0).Timestamp()) assert.EqualValues(t, []float64{1, 2}, histogramDataPoints.At(0).ExplicitBounds().AsRaw()) - assert.EqualValues(t, NewMapFromRaw(map[string]interface{}{"key0": "value0"}), histogramDataPoints.At(0).Attributes()) + assert.EqualValues(t, pcommon.NewMapFromRaw(map[string]interface{}{"key0": "value0"}), histogramDataPoints.At(0).Attributes()) assert.EqualValues(t, []uint64{10, 15, 1}, histogramDataPoints.At(0).BucketCounts().AsRaw()) // Second point assert.EqualValues(t, startTime, histogramDataPoints.At(1).StartTimestamp()) assert.EqualValues(t, endTime, histogramDataPoints.At(1).Timestamp()) assert.EqualValues(t, []float64{1}, histogramDataPoints.At(1).ExplicitBounds().AsRaw()) - assert.EqualValues(t, NewMapFromRaw(map[string]interface{}{"key1": "value1"}), histogramDataPoints.At(1).Attributes()) + assert.EqualValues(t, pcommon.NewMapFromRaw(map[string]interface{}{"key1": "value1"}), histogramDataPoints.At(1).Attributes()) assert.EqualValues(t, []uint64{10, 1}, histogramDataPoints.At(1).BucketCounts().AsRaw()) } func TestOtlpToFromInternalReadOnly(t *testing.T) { - md := MetricsFromOtlp(&otlpcollectormetrics.ExportMetricsServiceRequest{ + md := newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { Resource: generateTestProtoResource(), @@ -348,13 +351,13 @@ func TestOtlpToFromInternalReadOnly(t *testing.T) { }, }, }, - }, MetricsToOtlp(md)) + }, md.getOrig()) } func TestOtlpToFromInternalGaugeMutating(t *testing.T) { - newAttributes := NewMapFromRaw(map[string]interface{}{"k": "v"}) + newAttributes := pcommon.NewMapFromRaw(map[string]interface{}{"k": "v"}) - md := MetricsFromOtlp(&otlpcollectormetrics.ExportMetricsServiceRequest{ + md := newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { Resource: generateTestProtoResource(), @@ -383,9 +386,9 @@ func TestOtlpToFromInternalGaugeMutating(t *testing.T) { gaugeDataPoints := metric.Gauge().DataPoints() gaugeDataPoints.AppendEmpty() assert.EqualValues(t, 1, gaugeDataPoints.Len()) - gaugeDataPoints.At(0).SetStartTimestamp(Timestamp(startTime + 1)) + gaugeDataPoints.At(0).SetStartTimestamp(pcommon.Timestamp(startTime + 1)) assert.EqualValues(t, startTime+1, gaugeDataPoints.At(0).StartTimestamp()) - gaugeDataPoints.At(0).SetTimestamp(Timestamp(endTime + 1)) + gaugeDataPoints.At(0).SetTimestamp(pcommon.Timestamp(endTime + 1)) assert.EqualValues(t, endTime+1, gaugeDataPoints.At(0).Timestamp()) gaugeDataPoints.At(0).SetDoubleVal(124.1) assert.EqualValues(t, 124.1, gaugeDataPoints.At(0).DoubleVal()) @@ -431,13 +434,13 @@ func TestOtlpToFromInternalGaugeMutating(t *testing.T) { }, }, }, - }, MetricsToOtlp(md)) + }, md.getOrig()) } func TestOtlpToFromInternalSumMutating(t *testing.T) { - newAttributes := NewMapFromRaw(map[string]interface{}{"k": "v"}) + newAttributes := pcommon.NewMapFromRaw(map[string]interface{}{"k": "v"}) - md := MetricsFromOtlp(&otlpcollectormetrics.ExportMetricsServiceRequest{ + md := newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { Resource: generateTestProtoResource(), @@ -467,9 +470,9 @@ func TestOtlpToFromInternalSumMutating(t *testing.T) { metric.Sum().SetAggregationTemporality(MetricAggregationTemporalityCumulative) doubleDataPoints.AppendEmpty() assert.EqualValues(t, 1, doubleDataPoints.Len()) - doubleDataPoints.At(0).SetStartTimestamp(Timestamp(startTime + 1)) + doubleDataPoints.At(0).SetStartTimestamp(pcommon.Timestamp(startTime + 1)) assert.EqualValues(t, startTime+1, doubleDataPoints.At(0).StartTimestamp()) - doubleDataPoints.At(0).SetTimestamp(Timestamp(endTime + 1)) + doubleDataPoints.At(0).SetTimestamp(pcommon.Timestamp(endTime + 1)) assert.EqualValues(t, endTime+1, doubleDataPoints.At(0).Timestamp()) doubleDataPoints.At(0).SetDoubleVal(124.1) assert.EqualValues(t, 124.1, doubleDataPoints.At(0).DoubleVal()) @@ -516,13 +519,13 @@ func TestOtlpToFromInternalSumMutating(t *testing.T) { }, }, }, - }, MetricsToOtlp(md)) + }, md.getOrig()) } func TestOtlpToFromInternalHistogramMutating(t *testing.T) { - newAttributes := NewMapFromRaw(map[string]interface{}{"k": "v"}) + newAttributes := pcommon.NewMapFromRaw(map[string]interface{}{"k": "v"}) - md := MetricsFromOtlp(&otlpcollectormetrics.ExportMetricsServiceRequest{ + md := newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { Resource: generateTestProtoResource(), @@ -552,16 +555,16 @@ func TestOtlpToFromInternalHistogramMutating(t *testing.T) { histogramDataPoints := metric.Histogram().DataPoints() histogramDataPoints.AppendEmpty() assert.EqualValues(t, 1, histogramDataPoints.Len()) - histogramDataPoints.At(0).SetStartTimestamp(Timestamp(startTime + 1)) + histogramDataPoints.At(0).SetStartTimestamp(pcommon.Timestamp(startTime + 1)) assert.EqualValues(t, startTime+1, histogramDataPoints.At(0).StartTimestamp()) - histogramDataPoints.At(0).SetTimestamp(Timestamp(endTime + 1)) + histogramDataPoints.At(0).SetTimestamp(pcommon.Timestamp(endTime + 1)) assert.EqualValues(t, endTime+1, histogramDataPoints.At(0).Timestamp()) histogramDataPoints.At(0).Attributes().Remove("key0") histogramDataPoints.At(0).Attributes().UpsertString("k", "v") assert.EqualValues(t, newAttributes, histogramDataPoints.At(0).Attributes()) - histogramDataPoints.At(0).SetExplicitBounds(NewImmutableFloat64Slice(([]float64{1}))) + histogramDataPoints.At(0).SetExplicitBounds(pcommon.NewImmutableFloat64Slice([]float64{1})) assert.EqualValues(t, []float64{1}, histogramDataPoints.At(0).ExplicitBounds().AsRaw()) - histogramDataPoints.At(0).SetBucketCounts(NewImmutableUInt64Slice(([]uint64{21, 32}))) + histogramDataPoints.At(0).SetBucketCounts(pcommon.NewImmutableUInt64Slice([]uint64{21, 32})) // Test that everything is updated. assert.EqualValues(t, &otlpmetrics.MetricsData{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ @@ -600,13 +603,13 @@ func TestOtlpToFromInternalHistogramMutating(t *testing.T) { }, }, }, - }, MetricsToOtlp(md)) + }, md.getOrig()) } func TestOtlpToFromInternalExponentialHistogramMutating(t *testing.T) { - newAttributes := NewMapFromRaw(map[string]interface{}{"k": "v"}) + newAttributes := pcommon.NewMapFromRaw(map[string]interface{}{"k": "v"}) - md := MetricsFromOtlp(&otlpcollectormetrics.ExportMetricsServiceRequest{ + md := newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { Resource: generateTestProtoResource(), @@ -636,9 +639,9 @@ func TestOtlpToFromInternalExponentialHistogramMutating(t *testing.T) { histogramDataPoints := metric.ExponentialHistogram().DataPoints() histogramDataPoints.AppendEmpty() assert.EqualValues(t, 1, histogramDataPoints.Len()) - histogramDataPoints.At(0).SetStartTimestamp(Timestamp(startTime + 1)) + histogramDataPoints.At(0).SetStartTimestamp(pcommon.Timestamp(startTime + 1)) assert.EqualValues(t, startTime+1, histogramDataPoints.At(0).StartTimestamp()) - histogramDataPoints.At(0).SetTimestamp(Timestamp(endTime + 1)) + histogramDataPoints.At(0).SetTimestamp(pcommon.Timestamp(endTime + 1)) assert.EqualValues(t, endTime+1, histogramDataPoints.At(0).Timestamp()) histogramDataPoints.At(0).Attributes().Remove("key0") histogramDataPoints.At(0).Attributes().UpsertString("k", "v") @@ -679,17 +682,17 @@ func TestOtlpToFromInternalExponentialHistogramMutating(t *testing.T) { }, }, }, - }, MetricsToOtlp(md)) + }, md.getOrig()) } func TestMetricsClone(t *testing.T) { metrics := NewMetrics() - fillTestResourceMetricsSlice(metrics.ResourceMetrics()) + internal.FillTestResourceMetricsSlice(internal.ResourceMetricsSlice(metrics.ResourceMetrics())) assert.EqualValues(t, metrics, metrics.Clone()) } func TestMetricDataPointFlags(t *testing.T) { - flags := generateTestMetricDataPointFlags() + flags := MetricDataPointFlags(internal.GenerateTestMetricDataPointFlags()) assert.False(t, flags.NoRecordedValue()) assert.Equal(t, uint32(0), flags.AsRaw()) flags.SetNoRecordedValue(true) @@ -720,7 +723,7 @@ func TestMetricDataPointFlags(t *testing.T) { func BenchmarkMetricsClone(b *testing.B) { metrics := NewMetrics() - fillTestResourceMetricsSlice(metrics.ResourceMetrics()) + internal.FillTestResourceMetricsSlice(internal.ResourceMetricsSlice(metrics.ResourceMetrics())) b.ResetTimer() for n := 0; n < b.N; n++ { clone := metrics.Clone() @@ -747,8 +750,8 @@ func BenchmarkOtlpToFromInternal_PassThrough(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - md := MetricsFromOtlp(req) - newReq := MetricsToOtlp(md) + md := newMetrics(req) + newReq := md.getOrig() if len(req.ResourceMetrics) != len(newReq.ResourceMetrics) { b.Fail() } @@ -772,9 +775,9 @@ func BenchmarkOtlpToFromInternal_Gauge_MutateOneLabel(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - md := MetricsFromOtlp(req) + md := newMetrics(req) md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).Attributes().UpsertString("key0", "value2") - newReq := MetricsToOtlp(md) + newReq := md.getOrig() if len(req.ResourceMetrics) != len(newReq.ResourceMetrics) { b.Fail() } @@ -798,9 +801,9 @@ func BenchmarkOtlpToFromInternal_Sum_MutateOneLabel(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - md := MetricsFromOtlp(req) + md := newMetrics(req) md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().UpsertString("key0", "value2") - newReq := MetricsToOtlp(md) + newReq := md.getOrig() if len(req.ResourceMetrics) != len(newReq.ResourceMetrics) { b.Fail() } @@ -824,9 +827,9 @@ func BenchmarkOtlpToFromInternal_HistogramPoints_MutateOneLabel(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - md := MetricsFromOtlp(req) + md := newMetrics(req) md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0).Histogram().DataPoints().At(0).Attributes().UpsertString("key0", "value2") - newReq := MetricsToOtlp(md) + newReq := md.getOrig() if len(req.ResourceMetrics) != len(newReq.ResourceMetrics) { b.Fail() } @@ -971,23 +974,23 @@ func generateTestProtoHistogramMetric() *otlpmetrics.Metric { } func generateMetricsEmptyResource() Metrics { - return Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{ + return newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{{}}, - }} + }) } func generateMetricsEmptyInstrumentation() Metrics { - return Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{ + return newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { ScopeMetrics: []*otlpmetrics.ScopeMetrics{{}}, }, }, - }} + }) } func generateMetricsEmptyMetrics() Metrics { - return Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{ + return newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { ScopeMetrics: []*otlpmetrics.ScopeMetrics{ @@ -997,11 +1000,11 @@ func generateMetricsEmptyMetrics() Metrics { }, }, }, - }} + }) } func generateMetricsEmptyDataPoints() Metrics { - return Metrics{orig: &otlpcollectormetrics.ExportMetricsServiceRequest{ + return newMetrics(&otlpcollectormetrics.ExportMetricsServiceRequest{ ResourceMetrics: []*otlpmetrics.ResourceMetrics{ { ScopeMetrics: []*otlpmetrics.ScopeMetrics{ @@ -1021,15 +1024,5 @@ func generateMetricsEmptyDataPoints() Metrics { }, }, }, - }} -} - -func fillTestMetricDataPointFlags(tv MetricDataPointFlags) { - *tv.orig = uint32(otlpmetrics.DataPointFlags_FLAG_NONE) -} - -func generateTestMetricDataPointFlags() MetricDataPointFlags { - tv := NewMetricDataPointFlags() - fillTestMetricDataPointFlags(tv) - return tv + }) } diff --git a/pdata/pmetric/pb.go b/pdata/pmetric/pb.go index db05b35c253..be3ec443ca5 100644 --- a/pdata/pmetric/pb.go +++ b/pdata/pmetric/pb.go @@ -34,12 +34,12 @@ func newPbMarshaler() *pbMarshaler { var _ Sizer = (*pbMarshaler)(nil) func (e *pbMarshaler) MarshalMetrics(md Metrics) ([]byte, error) { - pb := internal.MetricsToProto(md) + pb := internal.MetricsToProto(internal.Metrics(md)) return pb.Marshal() } func (e *pbMarshaler) MetricsSize(md Metrics) int { - pb := internal.MetricsToProto(md) + pb := internal.MetricsToProto(internal.Metrics(md)) return pb.Size() } @@ -57,5 +57,5 @@ func newPbUnmarshaler() *pbUnmarshaler { func (d *pbUnmarshaler) UnmarshalMetrics(buf []byte) (Metrics, error) { pb := otlpmetrics.MetricsData{} err := pb.Unmarshal(buf) - return internal.MetricsFromProto(pb), err + return Metrics(internal.MetricsFromProto(pb)), err } diff --git a/pdata/pmetric/pmetricotlp/metrics.go b/pdata/pmetric/pmetricotlp/metrics.go index 01fe2bafa3e..c2f9ffcd907 100644 --- a/pdata/pmetric/pmetricotlp/metrics.go +++ b/pdata/pmetric/pmetricotlp/metrics.go @@ -78,8 +78,8 @@ func NewRequest() Request { // NewRequestFromMetrics returns a Request from pmetric.Metrics. // Because Request is a wrapper for pmetric.Metrics, // any changes to the provided Metrics struct will be reflected in the Request and vice versa. -func NewRequestFromMetrics(m pmetric.Metrics) Request { - return Request{orig: internal.MetricsToOtlp(m)} +func NewRequestFromMetrics(md pmetric.Metrics) Request { + return Request{orig: internal.GetOrigMetrics(internal.Metrics(md))} } // MarshalProto marshals Request into proto bytes. @@ -111,7 +111,7 @@ func (mr Request) UnmarshalJSON(data []byte) error { } func (mr Request) Metrics() pmetric.Metrics { - return internal.MetricsFromOtlp(mr.orig) + return pmetric.Metrics(internal.NewMetrics(mr.orig)) } // Client is the client API for OTLP-GRPC Metrics service. diff --git a/pdata/ptrace/alias.go b/pdata/ptrace/alias.go deleted file mode 100644 index 331f52c5e77..00000000000 --- a/pdata/ptrace/alias.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package ptrace // import "go.opentelemetry.io/collector/pdata/ptrace" - -import "go.opentelemetry.io/collector/pdata/internal" - -// This file contains aliases for trace data structures. - -// Traces is the top-level struct that is propagated through the traces pipeline. -// Use NewTraces to create new instance, zero-initialized instance is not valid for use. -type Traces = internal.Traces - -// NewTraces creates a new Traces struct. -var NewTraces = internal.NewTraces - -// TraceState is a string representing the tracestate in w3c-trace-context format: https://www.w3.org/TR/trace-context/#tracestate-header -type TraceState = internal.TraceState - -const ( - // TraceStateEmpty represents the empty TraceState. - TraceStateEmpty = internal.TraceStateEmpty -) - -// SpanKind is the type of span. Can be used to specify additional relationships between spans -// in addition to a parent/child relationship. -type SpanKind = internal.SpanKind - -const ( - // SpanKindUnspecified represents that the SpanKind is unspecified, it MUST NOT be used. - SpanKindUnspecified = internal.SpanKindUnspecified - - // SpanKindInternal indicates that the span represents an internal operation within an application, - // as opposed to an operation happening at the boundaries. Default value. - SpanKindInternal = internal.SpanKindInternal - - // SpanKindServer indicates that the span covers server-side handling of an RPC or other - // remote network request. - SpanKindServer = internal.SpanKindServer - - // SpanKindProducer indicates that the span describes a producer sending a message to a broker. - // Unlike CLIENT and SERVER, there is often no direct critical path latency relationship - // between producer and consumer spans. - // A PRODUCER span ends when the message was accepted by the broker while the logical processing of - // the message might span a much longer time. - SpanKindClient = internal.SpanKindClient - - // SpanKindProducer indicates that the span describes a producer sending a message to a broker. - // Unlike CLIENT and SERVER, there is often no direct critical path latency relationship - // between producer and consumer spans. - // A PRODUCER span ends when the message was accepted by the broker while the logical processing of - // the message might span a much longer time. - SpanKindProducer = internal.SpanKindProducer - - // SpanKindConsumer indicates that the span describes consumer receiving a message from a broker. - // Like the PRODUCER kind, there is often no direct critical path latency relationship between - // producer and consumer spans. - SpanKindConsumer = internal.SpanKindConsumer -) - -// StatusCode mirrors the codes defined at -// https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#set-status -type StatusCode = internal.StatusCode - -const ( - StatusCodeUnset = internal.StatusCodeUnset - StatusCodeOk = internal.StatusCodeOk - StatusCodeError = internal.StatusCodeError -) diff --git a/pdata/ptrace/generated_alias.go b/pdata/ptrace/generated_alias.go deleted file mode 100644 index e932ad1e0e0..00000000000 --- a/pdata/ptrace/generated_alias.go +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright The OpenTelemetry Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. -// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". - -package ptrace - -import "go.opentelemetry.io/collector/pdata/internal" - -// ResourceSpansSlice logically represents a slice of ResourceSpans. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewResourceSpansSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ResourceSpansSlice = internal.ResourceSpansSlice - -// NewResourceSpansSlice creates a ResourceSpansSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewResourceSpansSlice = internal.NewResourceSpansSlice - -// ResourceSpans is a collection of spans from a Resource. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewResourceSpans function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ResourceSpans = internal.ResourceSpans - -// NewResourceSpans is an alias for a function to create a new empty ResourceSpans. -var NewResourceSpans = internal.NewResourceSpans - -// ScopeSpansSlice logically represents a slice of ScopeSpans. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewScopeSpansSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ScopeSpansSlice = internal.ScopeSpansSlice - -// NewScopeSpansSlice creates a ScopeSpansSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewScopeSpansSlice = internal.NewScopeSpansSlice - -// ScopeSpans is a collection of spans from a LibraryInstrumentation. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewScopeSpans function to create new instances. -// Important: zero-initialized instance is not valid for use. -type ScopeSpans = internal.ScopeSpans - -// NewScopeSpans is an alias for a function to create a new empty ScopeSpans. -var NewScopeSpans = internal.NewScopeSpans - -// SpanSlice logically represents a slice of Span. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewSpanSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SpanSlice = internal.SpanSlice - -// NewSpanSlice creates a SpanSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewSpanSlice = internal.NewSpanSlice - -// Span represents a single operation within a trace. -// See Span definition in OTLP: https://github.com/open-telemetry/opentelemetry-proto/blob/main/opentelemetry/proto/trace/v1/trace.proto -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewSpan function to create new instances. -// Important: zero-initialized instance is not valid for use. -type Span = internal.Span - -// NewSpan is an alias for a function to create a new empty Span. -var NewSpan = internal.NewSpan - -// SpanEventSlice logically represents a slice of SpanEvent. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewSpanEventSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SpanEventSlice = internal.SpanEventSlice - -// NewSpanEventSlice creates a SpanEventSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewSpanEventSlice = internal.NewSpanEventSlice - -// SpanEvent is a time-stamped annotation of the span, consisting of user-supplied -// text description and key-value pairs. See OTLP for event definition. -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewSpanEvent function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SpanEvent = internal.SpanEvent - -// NewSpanEvent is an alias for a function to create a new empty SpanEvent. -var NewSpanEvent = internal.NewSpanEvent - -// SpanLinkSlice logically represents a slice of SpanLink. -// -// This is a reference type. If passed by value and callee modifies it, the -// caller will see the modification. -// -// Must use NewSpanLinkSlice function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SpanLinkSlice = internal.SpanLinkSlice - -// NewSpanLinkSlice creates a SpanLinkSlice with 0 elements. -// Can use "EnsureCapacity" to initialize with a given capacity. -var NewSpanLinkSlice = internal.NewSpanLinkSlice - -// SpanLink is a pointer from the current span to another span in the same trace or in a -// different trace. -// See Link definition in OTLP: https://github.com/open-telemetry/opentelemetry-proto/blob/main/opentelemetry/proto/trace/v1/trace.proto -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewSpanLink function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SpanLink = internal.SpanLink - -// NewSpanLink is an alias for a function to create a new empty SpanLink. -var NewSpanLink = internal.NewSpanLink - -// SpanStatus is an optional final status for this span. Semantically, when Status was not -// set, that means the span ended without errors and to assume Status.Ok (code = 0). -// -// This is a reference type, if passed by value and callee modifies it the -// caller will see the modification. -// -// Must use NewSpanStatus function to create new instances. -// Important: zero-initialized instance is not valid for use. -type SpanStatus = internal.SpanStatus - -// NewSpanStatus is an alias for a function to create a new empty SpanStatus. -var NewSpanStatus = internal.NewSpanStatus diff --git a/pdata/internal/generated_ptrace.go b/pdata/ptrace/generated_traces.go similarity index 71% rename from pdata/internal/generated_ptrace.go rename to pdata/ptrace/generated_traces.go index 5f17d6f362b..e243bcaaafa 100644 --- a/pdata/internal/generated_ptrace.go +++ b/pdata/ptrace/generated_traces.go @@ -15,12 +15,14 @@ // Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. // To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". -package internal +package ptrace import ( "sort" + "go.opentelemetry.io/collector/pdata/internal" otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" + "go.opentelemetry.io/collector/pdata/pcommon" ) // ResourceSpansSlice logically represents a slice of ResourceSpans. @@ -30,28 +32,28 @@ import ( // // Must use NewResourceSpansSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ResourceSpansSlice struct { - // orig points to the slice otlptrace.ResourceSpans field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlptrace.ResourceSpans -} +type ResourceSpansSlice internal.ResourceSpansSlice func newResourceSpansSlice(orig *[]*otlptrace.ResourceSpans) ResourceSpansSlice { - return ResourceSpansSlice{orig} + return ResourceSpansSlice(internal.NewResourceSpansSlice(orig)) +} + +func (ms ResourceSpansSlice) getOrig() *[]*otlptrace.ResourceSpans { + return internal.GetOrigResourceSpansSlice(internal.ResourceSpansSlice(ms)) } // NewResourceSpansSlice creates a ResourceSpansSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewResourceSpansSlice() ResourceSpansSlice { orig := []*otlptrace.ResourceSpans(nil) - return ResourceSpansSlice{&orig} + return newResourceSpansSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewResourceSpansSlice()". func (es ResourceSpansSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -63,27 +65,27 @@ func (es ResourceSpansSlice) Len() int { // ... // Do something with the element // } func (es ResourceSpansSlice) At(ix int) ResourceSpans { - return newResourceSpans((*es.orig)[ix]) + return newResourceSpans((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ResourceSpansSlice) CopyTo(dest ResourceSpansSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newResourceSpans((*es.orig)[i]).CopyTo(newResourceSpans((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newResourceSpans((*es.getOrig())[i]).CopyTo(newResourceSpans((*dest.getOrig())[i])) } return } origs := make([]otlptrace.ResourceSpans, srcLen) wrappers := make([]*otlptrace.ResourceSpans, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newResourceSpans((*es.orig)[i]).CopyTo(newResourceSpans(wrappers[i])) + newResourceSpans((*es.getOrig())[i]).CopyTo(newResourceSpans(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -99,20 +101,20 @@ func (es ResourceSpansSlice) CopyTo(dest ResourceSpansSlice) { // // Here should set all the values for e. // } func (es ResourceSpansSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlptrace.ResourceSpans, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlptrace.ResourceSpans, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ResourceSpans. // It returns the newly added ResourceSpans. func (es ResourceSpansSlice) AppendEmpty() ResourceSpans { - *es.orig = append(*es.orig, &otlptrace.ResourceSpans{}) + *es.getOrig() = append(*es.getOrig(), &otlptrace.ResourceSpans{}) return es.At(es.Len() - 1) } @@ -125,29 +127,29 @@ func (es ResourceSpansSlice) AppendEmpty() ResourceSpans { // lessFunc := func(a, b ResourceSpans) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ResourceSpansSlice) Sort(less func(a, b ResourceSpans) bool) ResourceSpansSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ResourceSpansSlice) MoveAndAppendTo(dest ResourceSpansSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ResourceSpansSlice) RemoveIf(f func(ResourceSpans) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -156,11 +158,11 @@ func (es ResourceSpansSlice) RemoveIf(f func(ResourceSpans) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ResourceSpans is a collection of spans from a Resource. @@ -170,12 +172,15 @@ func (es ResourceSpansSlice) RemoveIf(f func(ResourceSpans) bool) { // // Must use NewResourceSpans function to create new instances. // Important: zero-initialized instance is not valid for use. -type ResourceSpans struct { - orig *otlptrace.ResourceSpans -} + +type ResourceSpans internal.ResourceSpans func newResourceSpans(orig *otlptrace.ResourceSpans) ResourceSpans { - return ResourceSpans{orig: orig} + return ResourceSpans(internal.NewResourceSpans(orig)) +} + +func (ms ResourceSpans) getOrig() *otlptrace.ResourceSpans { + return internal.GetOrigResourceSpans(internal.ResourceSpans(ms)) } // NewResourceSpans creates a new empty ResourceSpans. @@ -189,28 +194,28 @@ func NewResourceSpans() ResourceSpans { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ResourceSpans) MoveTo(dest ResourceSpans) { - *dest.orig = *ms.orig - *ms.orig = otlptrace.ResourceSpans{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlptrace.ResourceSpans{} } // Resource returns the resource associated with this ResourceSpans. -func (ms ResourceSpans) Resource() Resource { - return newResource(&ms.orig.Resource) +func (ms ResourceSpans) Resource() pcommon.Resource { + return pcommon.Resource(internal.NewResource(&ms.getOrig().Resource)) } // SchemaUrl returns the schemaurl associated with this ResourceSpans. func (ms ResourceSpans) SchemaUrl() string { - return ms.orig.SchemaUrl + return ms.getOrig().SchemaUrl } // SetSchemaUrl replaces the schemaurl associated with this ResourceSpans. func (ms ResourceSpans) SetSchemaUrl(v string) { - ms.orig.SchemaUrl = v + ms.getOrig().SchemaUrl = v } // ScopeSpans returns the ScopeSpans associated with this ResourceSpans. func (ms ResourceSpans) ScopeSpans() ScopeSpansSlice { - return newScopeSpansSlice(&ms.orig.ScopeSpans) + return ScopeSpansSlice(internal.NewScopeSpansSlice(&ms.getOrig().ScopeSpans)) } // CopyTo copies all properties from the current struct to the dest. @@ -227,28 +232,28 @@ func (ms ResourceSpans) CopyTo(dest ResourceSpans) { // // Must use NewScopeSpansSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type ScopeSpansSlice struct { - // orig points to the slice otlptrace.ScopeSpans field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlptrace.ScopeSpans -} +type ScopeSpansSlice internal.ScopeSpansSlice func newScopeSpansSlice(orig *[]*otlptrace.ScopeSpans) ScopeSpansSlice { - return ScopeSpansSlice{orig} + return ScopeSpansSlice(internal.NewScopeSpansSlice(orig)) +} + +func (ms ScopeSpansSlice) getOrig() *[]*otlptrace.ScopeSpans { + return internal.GetOrigScopeSpansSlice(internal.ScopeSpansSlice(ms)) } // NewScopeSpansSlice creates a ScopeSpansSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewScopeSpansSlice() ScopeSpansSlice { orig := []*otlptrace.ScopeSpans(nil) - return ScopeSpansSlice{&orig} + return newScopeSpansSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewScopeSpansSlice()". func (es ScopeSpansSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -260,27 +265,27 @@ func (es ScopeSpansSlice) Len() int { // ... // Do something with the element // } func (es ScopeSpansSlice) At(ix int) ScopeSpans { - return newScopeSpans((*es.orig)[ix]) + return newScopeSpans((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es ScopeSpansSlice) CopyTo(dest ScopeSpansSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newScopeSpans((*es.orig)[i]).CopyTo(newScopeSpans((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newScopeSpans((*es.getOrig())[i]).CopyTo(newScopeSpans((*dest.getOrig())[i])) } return } origs := make([]otlptrace.ScopeSpans, srcLen) wrappers := make([]*otlptrace.ScopeSpans, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newScopeSpans((*es.orig)[i]).CopyTo(newScopeSpans(wrappers[i])) + newScopeSpans((*es.getOrig())[i]).CopyTo(newScopeSpans(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -296,20 +301,20 @@ func (es ScopeSpansSlice) CopyTo(dest ScopeSpansSlice) { // // Here should set all the values for e. // } func (es ScopeSpansSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlptrace.ScopeSpans, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlptrace.ScopeSpans, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty ScopeSpans. // It returns the newly added ScopeSpans. func (es ScopeSpansSlice) AppendEmpty() ScopeSpans { - *es.orig = append(*es.orig, &otlptrace.ScopeSpans{}) + *es.getOrig() = append(*es.getOrig(), &otlptrace.ScopeSpans{}) return es.At(es.Len() - 1) } @@ -322,29 +327,29 @@ func (es ScopeSpansSlice) AppendEmpty() ScopeSpans { // lessFunc := func(a, b ScopeSpans) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es ScopeSpansSlice) Sort(less func(a, b ScopeSpans) bool) ScopeSpansSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es ScopeSpansSlice) MoveAndAppendTo(dest ScopeSpansSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es ScopeSpansSlice) RemoveIf(f func(ScopeSpans) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -353,11 +358,11 @@ func (es ScopeSpansSlice) RemoveIf(f func(ScopeSpans) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // ScopeSpans is a collection of spans from a LibraryInstrumentation. @@ -367,12 +372,15 @@ func (es ScopeSpansSlice) RemoveIf(f func(ScopeSpans) bool) { // // Must use NewScopeSpans function to create new instances. // Important: zero-initialized instance is not valid for use. -type ScopeSpans struct { - orig *otlptrace.ScopeSpans -} + +type ScopeSpans internal.ScopeSpans func newScopeSpans(orig *otlptrace.ScopeSpans) ScopeSpans { - return ScopeSpans{orig: orig} + return ScopeSpans(internal.NewScopeSpans(orig)) +} + +func (ms ScopeSpans) getOrig() *otlptrace.ScopeSpans { + return internal.GetOrigScopeSpans(internal.ScopeSpans(ms)) } // NewScopeSpans creates a new empty ScopeSpans. @@ -386,28 +394,28 @@ func NewScopeSpans() ScopeSpans { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms ScopeSpans) MoveTo(dest ScopeSpans) { - *dest.orig = *ms.orig - *ms.orig = otlptrace.ScopeSpans{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlptrace.ScopeSpans{} } // Scope returns the scope associated with this ScopeSpans. -func (ms ScopeSpans) Scope() InstrumentationScope { - return newInstrumentationScope(&ms.orig.Scope) +func (ms ScopeSpans) Scope() pcommon.InstrumentationScope { + return pcommon.InstrumentationScope(internal.NewInstrumentationScope(&ms.getOrig().Scope)) } // SchemaUrl returns the schemaurl associated with this ScopeSpans. func (ms ScopeSpans) SchemaUrl() string { - return ms.orig.SchemaUrl + return ms.getOrig().SchemaUrl } // SetSchemaUrl replaces the schemaurl associated with this ScopeSpans. func (ms ScopeSpans) SetSchemaUrl(v string) { - ms.orig.SchemaUrl = v + ms.getOrig().SchemaUrl = v } // Spans returns the Spans associated with this ScopeSpans. func (ms ScopeSpans) Spans() SpanSlice { - return newSpanSlice(&ms.orig.Spans) + return SpanSlice(internal.NewSpanSlice(&ms.getOrig().Spans)) } // CopyTo copies all properties from the current struct to the dest. @@ -424,28 +432,28 @@ func (ms ScopeSpans) CopyTo(dest ScopeSpans) { // // Must use NewSpanSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type SpanSlice struct { - // orig points to the slice otlptrace.Span field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlptrace.Span -} +type SpanSlice internal.SpanSlice func newSpanSlice(orig *[]*otlptrace.Span) SpanSlice { - return SpanSlice{orig} + return SpanSlice(internal.NewSpanSlice(orig)) +} + +func (ms SpanSlice) getOrig() *[]*otlptrace.Span { + return internal.GetOrigSpanSlice(internal.SpanSlice(ms)) } // NewSpanSlice creates a SpanSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewSpanSlice() SpanSlice { orig := []*otlptrace.Span(nil) - return SpanSlice{&orig} + return newSpanSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewSpanSlice()". func (es SpanSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -457,27 +465,27 @@ func (es SpanSlice) Len() int { // ... // Do something with the element // } func (es SpanSlice) At(ix int) Span { - return newSpan((*es.orig)[ix]) + return newSpan((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es SpanSlice) CopyTo(dest SpanSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newSpan((*es.orig)[i]).CopyTo(newSpan((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newSpan((*es.getOrig())[i]).CopyTo(newSpan((*dest.getOrig())[i])) } return } origs := make([]otlptrace.Span, srcLen) wrappers := make([]*otlptrace.Span, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newSpan((*es.orig)[i]).CopyTo(newSpan(wrappers[i])) + newSpan((*es.getOrig())[i]).CopyTo(newSpan(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -493,20 +501,20 @@ func (es SpanSlice) CopyTo(dest SpanSlice) { // // Here should set all the values for e. // } func (es SpanSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlptrace.Span, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlptrace.Span, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty Span. // It returns the newly added Span. func (es SpanSlice) AppendEmpty() Span { - *es.orig = append(*es.orig, &otlptrace.Span{}) + *es.getOrig() = append(*es.getOrig(), &otlptrace.Span{}) return es.At(es.Len() - 1) } @@ -519,29 +527,29 @@ func (es SpanSlice) AppendEmpty() Span { // lessFunc := func(a, b Span) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es SpanSlice) Sort(less func(a, b Span) bool) SpanSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es SpanSlice) MoveAndAppendTo(dest SpanSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es SpanSlice) RemoveIf(f func(Span) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -550,11 +558,11 @@ func (es SpanSlice) RemoveIf(f func(Span) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // Span represents a single operation within a trace. @@ -565,12 +573,15 @@ func (es SpanSlice) RemoveIf(f func(Span) bool) { // // Must use NewSpan function to create new instances. // Important: zero-initialized instance is not valid for use. -type Span struct { - orig *otlptrace.Span -} + +type Span internal.Span func newSpan(orig *otlptrace.Span) Span { - return Span{orig: orig} + return Span(internal.NewSpan(orig)) +} + +func (ms Span) getOrig() *otlptrace.Span { + return internal.GetOrigSpan(internal.Span(ms)) } // NewSpan creates a new empty Span. @@ -584,138 +595,138 @@ func NewSpan() Span { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms Span) MoveTo(dest Span) { - *dest.orig = *ms.orig - *ms.orig = otlptrace.Span{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlptrace.Span{} } // TraceID returns the traceid associated with this Span. -func (ms Span) TraceID() TraceID { - return TraceID{orig: (ms.orig.TraceId)} +func (ms Span) TraceID() pcommon.TraceID { + return pcommon.TraceID(internal.NewTraceID(ms.getOrig().TraceId)) } // SetTraceID replaces the traceid associated with this Span. -func (ms Span) SetTraceID(v TraceID) { - ms.orig.TraceId = v.orig +func (ms Span) SetTraceID(v pcommon.TraceID) { + ms.getOrig().TraceId = internal.GetOrigTraceID(internal.TraceID(v)) } // SpanID returns the spanid associated with this Span. -func (ms Span) SpanID() SpanID { - return SpanID{orig: (ms.orig.SpanId)} +func (ms Span) SpanID() pcommon.SpanID { + return pcommon.SpanID(internal.NewSpanID(ms.getOrig().SpanId)) } // SetSpanID replaces the spanid associated with this Span. -func (ms Span) SetSpanID(v SpanID) { - ms.orig.SpanId = v.orig +func (ms Span) SetSpanID(v pcommon.SpanID) { + ms.getOrig().SpanId = internal.GetOrigSpanID(internal.SpanID(v)) } // TraceState returns the tracestate associated with this Span. func (ms Span) TraceState() TraceState { - return TraceState(ms.orig.TraceState) + return TraceState(ms.getOrig().TraceState) } // SetTraceState replaces the tracestate associated with this Span. func (ms Span) SetTraceState(v TraceState) { - ms.orig.TraceState = string(v) + ms.getOrig().TraceState = string(v) } // ParentSpanID returns the parentspanid associated with this Span. -func (ms Span) ParentSpanID() SpanID { - return SpanID{orig: (ms.orig.ParentSpanId)} +func (ms Span) ParentSpanID() pcommon.SpanID { + return pcommon.SpanID(internal.NewSpanID(ms.getOrig().ParentSpanId)) } // SetParentSpanID replaces the parentspanid associated with this Span. -func (ms Span) SetParentSpanID(v SpanID) { - ms.orig.ParentSpanId = v.orig +func (ms Span) SetParentSpanID(v pcommon.SpanID) { + ms.getOrig().ParentSpanId = internal.GetOrigSpanID(internal.SpanID(v)) } // Name returns the name associated with this Span. func (ms Span) Name() string { - return ms.orig.Name + return ms.getOrig().Name } // SetName replaces the name associated with this Span. func (ms Span) SetName(v string) { - ms.orig.Name = v + ms.getOrig().Name = v } // Kind returns the kind associated with this Span. func (ms Span) Kind() SpanKind { - return SpanKind(ms.orig.Kind) + return SpanKind(ms.getOrig().Kind) } // SetKind replaces the kind associated with this Span. func (ms Span) SetKind(v SpanKind) { - ms.orig.Kind = otlptrace.Span_SpanKind(v) + ms.getOrig().Kind = otlptrace.Span_SpanKind(v) } // StartTimestamp returns the starttimestamp associated with this Span. -func (ms Span) StartTimestamp() Timestamp { - return Timestamp(ms.orig.StartTimeUnixNano) +func (ms Span) StartTimestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().StartTimeUnixNano) } // SetStartTimestamp replaces the starttimestamp associated with this Span. -func (ms Span) SetStartTimestamp(v Timestamp) { - ms.orig.StartTimeUnixNano = uint64(v) +func (ms Span) SetStartTimestamp(v pcommon.Timestamp) { + ms.getOrig().StartTimeUnixNano = uint64(v) } // EndTimestamp returns the endtimestamp associated with this Span. -func (ms Span) EndTimestamp() Timestamp { - return Timestamp(ms.orig.EndTimeUnixNano) +func (ms Span) EndTimestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().EndTimeUnixNano) } // SetEndTimestamp replaces the endtimestamp associated with this Span. -func (ms Span) SetEndTimestamp(v Timestamp) { - ms.orig.EndTimeUnixNano = uint64(v) +func (ms Span) SetEndTimestamp(v pcommon.Timestamp) { + ms.getOrig().EndTimeUnixNano = uint64(v) } // Attributes returns the Attributes associated with this Span. -func (ms Span) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms Span) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // DroppedAttributesCount returns the droppedattributescount associated with this Span. func (ms Span) DroppedAttributesCount() uint32 { - return ms.orig.DroppedAttributesCount + return ms.getOrig().DroppedAttributesCount } // SetDroppedAttributesCount replaces the droppedattributescount associated with this Span. func (ms Span) SetDroppedAttributesCount(v uint32) { - ms.orig.DroppedAttributesCount = v + ms.getOrig().DroppedAttributesCount = v } // Events returns the Events associated with this Span. func (ms Span) Events() SpanEventSlice { - return newSpanEventSlice(&ms.orig.Events) + return SpanEventSlice(internal.NewSpanEventSlice(&ms.getOrig().Events)) } // DroppedEventsCount returns the droppedeventscount associated with this Span. func (ms Span) DroppedEventsCount() uint32 { - return ms.orig.DroppedEventsCount + return ms.getOrig().DroppedEventsCount } // SetDroppedEventsCount replaces the droppedeventscount associated with this Span. func (ms Span) SetDroppedEventsCount(v uint32) { - ms.orig.DroppedEventsCount = v + ms.getOrig().DroppedEventsCount = v } // Links returns the Links associated with this Span. func (ms Span) Links() SpanLinkSlice { - return newSpanLinkSlice(&ms.orig.Links) + return SpanLinkSlice(internal.NewSpanLinkSlice(&ms.getOrig().Links)) } // DroppedLinksCount returns the droppedlinkscount associated with this Span. func (ms Span) DroppedLinksCount() uint32 { - return ms.orig.DroppedLinksCount + return ms.getOrig().DroppedLinksCount } // SetDroppedLinksCount replaces the droppedlinkscount associated with this Span. func (ms Span) SetDroppedLinksCount(v uint32) { - ms.orig.DroppedLinksCount = v + ms.getOrig().DroppedLinksCount = v } // Status returns the status associated with this Span. func (ms Span) Status() SpanStatus { - return newSpanStatus(&ms.orig.Status) + return SpanStatus(internal.NewSpanStatus(&ms.getOrig().Status)) } // CopyTo copies all properties from the current struct to the dest. @@ -744,28 +755,28 @@ func (ms Span) CopyTo(dest Span) { // // Must use NewSpanEventSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type SpanEventSlice struct { - // orig points to the slice otlptrace.Span_Event field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlptrace.Span_Event -} +type SpanEventSlice internal.SpanEventSlice func newSpanEventSlice(orig *[]*otlptrace.Span_Event) SpanEventSlice { - return SpanEventSlice{orig} + return SpanEventSlice(internal.NewSpanEventSlice(orig)) +} + +func (ms SpanEventSlice) getOrig() *[]*otlptrace.Span_Event { + return internal.GetOrigSpanEventSlice(internal.SpanEventSlice(ms)) } // NewSpanEventSlice creates a SpanEventSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewSpanEventSlice() SpanEventSlice { orig := []*otlptrace.Span_Event(nil) - return SpanEventSlice{&orig} + return newSpanEventSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewSpanEventSlice()". func (es SpanEventSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -777,27 +788,27 @@ func (es SpanEventSlice) Len() int { // ... // Do something with the element // } func (es SpanEventSlice) At(ix int) SpanEvent { - return newSpanEvent((*es.orig)[ix]) + return newSpanEvent((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es SpanEventSlice) CopyTo(dest SpanEventSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newSpanEvent((*es.orig)[i]).CopyTo(newSpanEvent((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newSpanEvent((*es.getOrig())[i]).CopyTo(newSpanEvent((*dest.getOrig())[i])) } return } origs := make([]otlptrace.Span_Event, srcLen) wrappers := make([]*otlptrace.Span_Event, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newSpanEvent((*es.orig)[i]).CopyTo(newSpanEvent(wrappers[i])) + newSpanEvent((*es.getOrig())[i]).CopyTo(newSpanEvent(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -813,20 +824,20 @@ func (es SpanEventSlice) CopyTo(dest SpanEventSlice) { // // Here should set all the values for e. // } func (es SpanEventSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlptrace.Span_Event, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlptrace.Span_Event, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty SpanEvent. // It returns the newly added SpanEvent. func (es SpanEventSlice) AppendEmpty() SpanEvent { - *es.orig = append(*es.orig, &otlptrace.Span_Event{}) + *es.getOrig() = append(*es.getOrig(), &otlptrace.Span_Event{}) return es.At(es.Len() - 1) } @@ -839,29 +850,29 @@ func (es SpanEventSlice) AppendEmpty() SpanEvent { // lessFunc := func(a, b SpanEvent) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es SpanEventSlice) Sort(less func(a, b SpanEvent) bool) SpanEventSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es SpanEventSlice) MoveAndAppendTo(dest SpanEventSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es SpanEventSlice) RemoveIf(f func(SpanEvent) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -870,11 +881,11 @@ func (es SpanEventSlice) RemoveIf(f func(SpanEvent) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // SpanEvent is a time-stamped annotation of the span, consisting of user-supplied @@ -885,12 +896,15 @@ func (es SpanEventSlice) RemoveIf(f func(SpanEvent) bool) { // // Must use NewSpanEvent function to create new instances. // Important: zero-initialized instance is not valid for use. -type SpanEvent struct { - orig *otlptrace.Span_Event -} + +type SpanEvent internal.SpanEvent func newSpanEvent(orig *otlptrace.Span_Event) SpanEvent { - return SpanEvent{orig: orig} + return SpanEvent(internal.NewSpanEvent(orig)) +} + +func (ms SpanEvent) getOrig() *otlptrace.Span_Event { + return internal.GetOrigSpanEvent(internal.SpanEvent(ms)) } // NewSpanEvent creates a new empty SpanEvent. @@ -904,43 +918,43 @@ func NewSpanEvent() SpanEvent { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms SpanEvent) MoveTo(dest SpanEvent) { - *dest.orig = *ms.orig - *ms.orig = otlptrace.Span_Event{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlptrace.Span_Event{} } // Timestamp returns the timestamp associated with this SpanEvent. -func (ms SpanEvent) Timestamp() Timestamp { - return Timestamp(ms.orig.TimeUnixNano) +func (ms SpanEvent) Timestamp() pcommon.Timestamp { + return pcommon.Timestamp(ms.getOrig().TimeUnixNano) } // SetTimestamp replaces the timestamp associated with this SpanEvent. -func (ms SpanEvent) SetTimestamp(v Timestamp) { - ms.orig.TimeUnixNano = uint64(v) +func (ms SpanEvent) SetTimestamp(v pcommon.Timestamp) { + ms.getOrig().TimeUnixNano = uint64(v) } // Name returns the name associated with this SpanEvent. func (ms SpanEvent) Name() string { - return ms.orig.Name + return ms.getOrig().Name } // SetName replaces the name associated with this SpanEvent. func (ms SpanEvent) SetName(v string) { - ms.orig.Name = v + ms.getOrig().Name = v } // Attributes returns the Attributes associated with this SpanEvent. -func (ms SpanEvent) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms SpanEvent) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // DroppedAttributesCount returns the droppedattributescount associated with this SpanEvent. func (ms SpanEvent) DroppedAttributesCount() uint32 { - return ms.orig.DroppedAttributesCount + return ms.getOrig().DroppedAttributesCount } // SetDroppedAttributesCount replaces the droppedattributescount associated with this SpanEvent. func (ms SpanEvent) SetDroppedAttributesCount(v uint32) { - ms.orig.DroppedAttributesCount = v + ms.getOrig().DroppedAttributesCount = v } // CopyTo copies all properties from the current struct to the dest. @@ -958,28 +972,28 @@ func (ms SpanEvent) CopyTo(dest SpanEvent) { // // Must use NewSpanLinkSlice function to create new instances. // Important: zero-initialized instance is not valid for use. -type SpanLinkSlice struct { - // orig points to the slice otlptrace.Span_Link field contained somewhere else. - // We use pointer-to-slice to be able to modify it in functions like EnsureCapacity. - orig *[]*otlptrace.Span_Link -} +type SpanLinkSlice internal.SpanLinkSlice func newSpanLinkSlice(orig *[]*otlptrace.Span_Link) SpanLinkSlice { - return SpanLinkSlice{orig} + return SpanLinkSlice(internal.NewSpanLinkSlice(orig)) +} + +func (ms SpanLinkSlice) getOrig() *[]*otlptrace.Span_Link { + return internal.GetOrigSpanLinkSlice(internal.SpanLinkSlice(ms)) } // NewSpanLinkSlice creates a SpanLinkSlice with 0 elements. // Can use "EnsureCapacity" to initialize with a given capacity. func NewSpanLinkSlice() SpanLinkSlice { orig := []*otlptrace.Span_Link(nil) - return SpanLinkSlice{&orig} + return newSpanLinkSlice(&orig) } // Len returns the number of elements in the slice. // // Returns "0" for a newly instance created with "NewSpanLinkSlice()". func (es SpanLinkSlice) Len() int { - return len(*es.orig) + return len(*es.getOrig()) } // At returns the element at the given index. @@ -991,27 +1005,27 @@ func (es SpanLinkSlice) Len() int { // ... // Do something with the element // } func (es SpanLinkSlice) At(ix int) SpanLink { - return newSpanLink((*es.orig)[ix]) + return newSpanLink((*es.getOrig())[ix]) } // CopyTo copies all elements from the current slice to the dest. func (es SpanLinkSlice) CopyTo(dest SpanLinkSlice) { srcLen := es.Len() - destCap := cap(*dest.orig) + destCap := cap(*dest.getOrig()) if srcLen <= destCap { - (*dest.orig) = (*dest.orig)[:srcLen:destCap] - for i := range *es.orig { - newSpanLink((*es.orig)[i]).CopyTo(newSpanLink((*dest.orig)[i])) + (*dest.getOrig()) = (*dest.getOrig())[:srcLen:destCap] + for i := range *es.getOrig() { + newSpanLink((*es.getOrig())[i]).CopyTo(newSpanLink((*dest.getOrig())[i])) } return } origs := make([]otlptrace.Span_Link, srcLen) wrappers := make([]*otlptrace.Span_Link, srcLen) - for i := range *es.orig { + for i := range *es.getOrig() { wrappers[i] = &origs[i] - newSpanLink((*es.orig)[i]).CopyTo(newSpanLink(wrappers[i])) + newSpanLink((*es.getOrig())[i]).CopyTo(newSpanLink(wrappers[i])) } - *dest.orig = wrappers + *dest.getOrig() = wrappers } // EnsureCapacity is an operation that ensures the slice has at least the specified capacity. @@ -1027,20 +1041,20 @@ func (es SpanLinkSlice) CopyTo(dest SpanLinkSlice) { // // Here should set all the values for e. // } func (es SpanLinkSlice) EnsureCapacity(newCap int) { - oldCap := cap(*es.orig) + oldCap := cap(*es.getOrig()) if newCap <= oldCap { return } - newOrig := make([]*otlptrace.Span_Link, len(*es.orig), newCap) - copy(newOrig, *es.orig) - *es.orig = newOrig + newOrig := make([]*otlptrace.Span_Link, len(*es.getOrig()), newCap) + copy(newOrig, *es.getOrig()) + *es.getOrig() = newOrig } // AppendEmpty will append to the end of the slice an empty SpanLink. // It returns the newly added SpanLink. func (es SpanLinkSlice) AppendEmpty() SpanLink { - *es.orig = append(*es.orig, &otlptrace.Span_Link{}) + *es.getOrig() = append(*es.getOrig(), &otlptrace.Span_Link{}) return es.At(es.Len() - 1) } @@ -1053,29 +1067,29 @@ func (es SpanLinkSlice) AppendEmpty() SpanLink { // lessFunc := func(a, b SpanLink) bool { // return a.Name() < b.Name() // choose any comparison here // } -// assert.EqualValues(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) +// assert.Equal(t, expected.Sort(lessFunc), actual.Sort(lessFunc)) func (es SpanLinkSlice) Sort(less func(a, b SpanLink) bool) SpanLinkSlice { - sort.SliceStable(*es.orig, func(i, j int) bool { return less(es.At(i), es.At(j)) }) + sort.SliceStable(*es.getOrig(), func(i, j int) bool { return less(es.At(i), es.At(j)) }) return es } // MoveAndAppendTo moves all elements from the current slice and appends them to the dest. // The current slice will be cleared. func (es SpanLinkSlice) MoveAndAppendTo(dest SpanLinkSlice) { - if *dest.orig == nil { + if *dest.getOrig() == nil { // We can simply move the entire vector and avoid any allocations. - *dest.orig = *es.orig + *dest.getOrig() = *es.getOrig() } else { - *dest.orig = append(*dest.orig, *es.orig...) + *dest.getOrig() = append(*dest.getOrig(), *es.getOrig()...) } - *es.orig = nil + *es.getOrig() = nil } // RemoveIf calls f sequentially for each element present in the slice. // If f returns true, the element is removed from the slice. func (es SpanLinkSlice) RemoveIf(f func(SpanLink) bool) { newLen := 0 - for i := 0; i < len(*es.orig); i++ { + for i := 0; i < len(*es.getOrig()); i++ { if f(es.At(i)) { continue } @@ -1084,11 +1098,11 @@ func (es SpanLinkSlice) RemoveIf(f func(SpanLink) bool) { newLen++ continue } - (*es.orig)[newLen] = (*es.orig)[i] + (*es.getOrig())[newLen] = (*es.getOrig())[i] newLen++ } // TODO: Prevent memory leak by erasing truncated values. - *es.orig = (*es.orig)[:newLen] + *es.getOrig() = (*es.getOrig())[:newLen] } // SpanLink is a pointer from the current span to another span in the same trace or in a @@ -1100,12 +1114,15 @@ func (es SpanLinkSlice) RemoveIf(f func(SpanLink) bool) { // // Must use NewSpanLink function to create new instances. // Important: zero-initialized instance is not valid for use. -type SpanLink struct { - orig *otlptrace.Span_Link -} + +type SpanLink internal.SpanLink func newSpanLink(orig *otlptrace.Span_Link) SpanLink { - return SpanLink{orig: orig} + return SpanLink(internal.NewSpanLink(orig)) +} + +func (ms SpanLink) getOrig() *otlptrace.Span_Link { + return internal.GetOrigSpanLink(internal.SpanLink(ms)) } // NewSpanLink creates a new empty SpanLink. @@ -1119,53 +1136,53 @@ func NewSpanLink() SpanLink { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms SpanLink) MoveTo(dest SpanLink) { - *dest.orig = *ms.orig - *ms.orig = otlptrace.Span_Link{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlptrace.Span_Link{} } // TraceID returns the traceid associated with this SpanLink. -func (ms SpanLink) TraceID() TraceID { - return TraceID{orig: (ms.orig.TraceId)} +func (ms SpanLink) TraceID() pcommon.TraceID { + return pcommon.TraceID(internal.NewTraceID(ms.getOrig().TraceId)) } // SetTraceID replaces the traceid associated with this SpanLink. -func (ms SpanLink) SetTraceID(v TraceID) { - ms.orig.TraceId = v.orig +func (ms SpanLink) SetTraceID(v pcommon.TraceID) { + ms.getOrig().TraceId = internal.GetOrigTraceID(internal.TraceID(v)) } // SpanID returns the spanid associated with this SpanLink. -func (ms SpanLink) SpanID() SpanID { - return SpanID{orig: (ms.orig.SpanId)} +func (ms SpanLink) SpanID() pcommon.SpanID { + return pcommon.SpanID(internal.NewSpanID(ms.getOrig().SpanId)) } // SetSpanID replaces the spanid associated with this SpanLink. -func (ms SpanLink) SetSpanID(v SpanID) { - ms.orig.SpanId = v.orig +func (ms SpanLink) SetSpanID(v pcommon.SpanID) { + ms.getOrig().SpanId = internal.GetOrigSpanID(internal.SpanID(v)) } // TraceState returns the tracestate associated with this SpanLink. func (ms SpanLink) TraceState() TraceState { - return TraceState(ms.orig.TraceState) + return TraceState(ms.getOrig().TraceState) } // SetTraceState replaces the tracestate associated with this SpanLink. func (ms SpanLink) SetTraceState(v TraceState) { - ms.orig.TraceState = string(v) + ms.getOrig().TraceState = string(v) } // Attributes returns the Attributes associated with this SpanLink. -func (ms SpanLink) Attributes() Map { - return newMap(&ms.orig.Attributes) +func (ms SpanLink) Attributes() pcommon.Map { + return pcommon.Map(internal.NewMap(&ms.getOrig().Attributes)) } // DroppedAttributesCount returns the droppedattributescount associated with this SpanLink. func (ms SpanLink) DroppedAttributesCount() uint32 { - return ms.orig.DroppedAttributesCount + return ms.getOrig().DroppedAttributesCount } // SetDroppedAttributesCount replaces the droppedattributescount associated with this SpanLink. func (ms SpanLink) SetDroppedAttributesCount(v uint32) { - ms.orig.DroppedAttributesCount = v + ms.getOrig().DroppedAttributesCount = v } // CopyTo copies all properties from the current struct to the dest. @@ -1185,12 +1202,15 @@ func (ms SpanLink) CopyTo(dest SpanLink) { // // Must use NewSpanStatus function to create new instances. // Important: zero-initialized instance is not valid for use. -type SpanStatus struct { - orig *otlptrace.Status -} + +type SpanStatus internal.SpanStatus func newSpanStatus(orig *otlptrace.Status) SpanStatus { - return SpanStatus{orig: orig} + return SpanStatus(internal.NewSpanStatus(orig)) +} + +func (ms SpanStatus) getOrig() *otlptrace.Status { + return internal.GetOrigSpanStatus(internal.SpanStatus(ms)) } // NewSpanStatus creates a new empty SpanStatus. @@ -1204,28 +1224,28 @@ func NewSpanStatus() SpanStatus { // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value func (ms SpanStatus) MoveTo(dest SpanStatus) { - *dest.orig = *ms.orig - *ms.orig = otlptrace.Status{} + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlptrace.Status{} } // Code returns the code associated with this SpanStatus. func (ms SpanStatus) Code() StatusCode { - return StatusCode(ms.orig.Code) + return StatusCode(ms.getOrig().Code) } // SetCode replaces the code associated with this SpanStatus. func (ms SpanStatus) SetCode(v StatusCode) { - ms.orig.Code = otlptrace.Status_StatusCode(v) + ms.getOrig().Code = otlptrace.Status_StatusCode(v) } // Message returns the message associated with this SpanStatus. func (ms SpanStatus) Message() string { - return ms.orig.Message + return ms.getOrig().Message } // SetMessage replaces the message associated with this SpanStatus. func (ms SpanStatus) SetMessage(v string) { - ms.orig.Message = v + ms.getOrig().Message = v } // CopyTo copies all properties from the current struct to the dest. diff --git a/pdata/ptrace/generated_traces_test.go b/pdata/ptrace/generated_traces_test.go new file mode 100644 index 00000000000..b25513200ca --- /dev/null +++ b/pdata/ptrace/generated_traces_test.go @@ -0,0 +1,920 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by "model/internal/cmd/pdatagen/main.go". DO NOT EDIT. +// To regenerate this file run "go run model/internal/cmd/pdatagen/main.go". + +package ptrace + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "go.opentelemetry.io/collector/pdata/internal" + "go.opentelemetry.io/collector/pdata/internal/data" + otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" + "go.opentelemetry.io/collector/pdata/pcommon" +) + +func TestResourceSpansSlice(t *testing.T) { + es := NewResourceSpansSlice() + assert.Equal(t, 0, es.Len()) + es = newResourceSpansSlice(&[]*otlptrace.ResourceSpans{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newResourceSpans(&otlptrace.ResourceSpans{}) + testVal := ResourceSpans(internal.GenerateTestResourceSpans()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestResourceSpans(internal.ResourceSpans(el)) + assert.Equal(t, testVal, el) + } +} + +func TestResourceSpansSlice_CopyTo(t *testing.T) { + dest := NewResourceSpansSlice() + // Test CopyTo to empty + NewResourceSpansSlice().CopyTo(dest) + assert.Equal(t, NewResourceSpansSlice(), dest) + + // Test CopyTo larger slice + ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()).CopyTo(dest) + assert.Equal(t, ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()), dest) + + // Test CopyTo same size slice + ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()).CopyTo(dest) + assert.Equal(t, ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()), dest) +} + +func TestResourceSpansSlice_EnsureCapacity(t *testing.T) { + es := ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlptrace.ResourceSpans]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlptrace.ResourceSpans]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlptrace.ResourceSpans]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlptrace.ResourceSpans]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestResourceSpansSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()) + dest := NewResourceSpansSlice() + src := ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestResourceSpansSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewResourceSpansSlice() + emptySlice.RemoveIf(func(el ResourceSpans) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()) + pos := 0 + filtered.RemoveIf(func(el ResourceSpans) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestResourceSpans_MoveTo(t *testing.T) { + ms := ResourceSpans(internal.GenerateTestResourceSpans()) + dest := NewResourceSpans() + ms.MoveTo(dest) + assert.Equal(t, NewResourceSpans(), ms) + assert.Equal(t, ResourceSpans(internal.GenerateTestResourceSpans()), dest) +} + +func TestResourceSpans_CopyTo(t *testing.T) { + ms := NewResourceSpans() + orig := NewResourceSpans() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ResourceSpans(internal.GenerateTestResourceSpans()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestResourceSpans_Resource(t *testing.T) { + ms := NewResourceSpans() + internal.FillTestResource(internal.Resource(ms.Resource())) + assert.Equal(t, pcommon.Resource(internal.GenerateTestResource()), ms.Resource()) +} + +func TestResourceSpans_SchemaUrl(t *testing.T) { + ms := NewResourceSpans() + assert.Equal(t, "", ms.SchemaUrl()) + ms.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") + assert.Equal(t, "https://opentelemetry.io/schemas/1.5.0", ms.SchemaUrl()) +} + +func TestResourceSpans_ScopeSpans(t *testing.T) { + ms := NewResourceSpans() + assert.Equal(t, NewScopeSpansSlice(), ms.ScopeSpans()) + internal.FillTestScopeSpansSlice(internal.ScopeSpansSlice(ms.ScopeSpans())) + assert.Equal(t, ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()), ms.ScopeSpans()) +} + +func TestScopeSpansSlice(t *testing.T) { + es := NewScopeSpansSlice() + assert.Equal(t, 0, es.Len()) + es = newScopeSpansSlice(&[]*otlptrace.ScopeSpans{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newScopeSpans(&otlptrace.ScopeSpans{}) + testVal := ScopeSpans(internal.GenerateTestScopeSpans()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestScopeSpans(internal.ScopeSpans(el)) + assert.Equal(t, testVal, el) + } +} + +func TestScopeSpansSlice_CopyTo(t *testing.T) { + dest := NewScopeSpansSlice() + // Test CopyTo to empty + NewScopeSpansSlice().CopyTo(dest) + assert.Equal(t, NewScopeSpansSlice(), dest) + + // Test CopyTo larger slice + ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()).CopyTo(dest) + assert.Equal(t, ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()), dest) + + // Test CopyTo same size slice + ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()).CopyTo(dest) + assert.Equal(t, ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()), dest) +} + +func TestScopeSpansSlice_EnsureCapacity(t *testing.T) { + es := ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlptrace.ScopeSpans]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlptrace.ScopeSpans]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlptrace.ScopeSpans]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlptrace.ScopeSpans]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestScopeSpansSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()) + dest := NewScopeSpansSlice() + src := ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestScopeSpansSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewScopeSpansSlice() + emptySlice.RemoveIf(func(el ScopeSpans) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := ScopeSpansSlice(internal.GenerateTestScopeSpansSlice()) + pos := 0 + filtered.RemoveIf(func(el ScopeSpans) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestScopeSpans_MoveTo(t *testing.T) { + ms := ScopeSpans(internal.GenerateTestScopeSpans()) + dest := NewScopeSpans() + ms.MoveTo(dest) + assert.Equal(t, NewScopeSpans(), ms) + assert.Equal(t, ScopeSpans(internal.GenerateTestScopeSpans()), dest) +} + +func TestScopeSpans_CopyTo(t *testing.T) { + ms := NewScopeSpans() + orig := NewScopeSpans() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = ScopeSpans(internal.GenerateTestScopeSpans()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestScopeSpans_Scope(t *testing.T) { + ms := NewScopeSpans() + internal.FillTestInstrumentationScope(internal.InstrumentationScope(ms.Scope())) + assert.Equal(t, pcommon.InstrumentationScope(internal.GenerateTestInstrumentationScope()), ms.Scope()) +} + +func TestScopeSpans_SchemaUrl(t *testing.T) { + ms := NewScopeSpans() + assert.Equal(t, "", ms.SchemaUrl()) + ms.SetSchemaUrl("https://opentelemetry.io/schemas/1.5.0") + assert.Equal(t, "https://opentelemetry.io/schemas/1.5.0", ms.SchemaUrl()) +} + +func TestScopeSpans_Spans(t *testing.T) { + ms := NewScopeSpans() + assert.Equal(t, NewSpanSlice(), ms.Spans()) + internal.FillTestSpanSlice(internal.SpanSlice(ms.Spans())) + assert.Equal(t, SpanSlice(internal.GenerateTestSpanSlice()), ms.Spans()) +} + +func TestSpanSlice(t *testing.T) { + es := NewSpanSlice() + assert.Equal(t, 0, es.Len()) + es = newSpanSlice(&[]*otlptrace.Span{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newSpan(&otlptrace.Span{}) + testVal := Span(internal.GenerateTestSpan()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestSpan(internal.Span(el)) + assert.Equal(t, testVal, el) + } +} + +func TestSpanSlice_CopyTo(t *testing.T) { + dest := NewSpanSlice() + // Test CopyTo to empty + NewSpanSlice().CopyTo(dest) + assert.Equal(t, NewSpanSlice(), dest) + + // Test CopyTo larger slice + SpanSlice(internal.GenerateTestSpanSlice()).CopyTo(dest) + assert.Equal(t, SpanSlice(internal.GenerateTestSpanSlice()), dest) + + // Test CopyTo same size slice + SpanSlice(internal.GenerateTestSpanSlice()).CopyTo(dest) + assert.Equal(t, SpanSlice(internal.GenerateTestSpanSlice()), dest) +} + +func TestSpanSlice_EnsureCapacity(t *testing.T) { + es := SpanSlice(internal.GenerateTestSpanSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlptrace.Span]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlptrace.Span]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlptrace.Span]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlptrace.Span]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestSpanSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := SpanSlice(internal.GenerateTestSpanSlice()) + dest := NewSpanSlice() + src := SpanSlice(internal.GenerateTestSpanSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, SpanSlice(internal.GenerateTestSpanSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, SpanSlice(internal.GenerateTestSpanSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + SpanSlice(internal.GenerateTestSpanSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestSpanSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewSpanSlice() + emptySlice.RemoveIf(func(el Span) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := SpanSlice(internal.GenerateTestSpanSlice()) + pos := 0 + filtered.RemoveIf(func(el Span) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestSpan_MoveTo(t *testing.T) { + ms := Span(internal.GenerateTestSpan()) + dest := NewSpan() + ms.MoveTo(dest) + assert.Equal(t, NewSpan(), ms) + assert.Equal(t, Span(internal.GenerateTestSpan()), dest) +} + +func TestSpan_CopyTo(t *testing.T) { + ms := NewSpan() + orig := NewSpan() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = Span(internal.GenerateTestSpan()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestSpan_TraceID(t *testing.T) { + ms := NewSpan() + assert.Equal(t, pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{}))), ms.TraceID()) + testValTraceID := pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}))) + ms.SetTraceID(testValTraceID) + assert.Equal(t, testValTraceID, ms.TraceID()) +} + +func TestSpan_SpanID(t *testing.T) { + ms := NewSpan() + assert.Equal(t, pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{}))), ms.SpanID()) + testValSpanID := pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}))) + ms.SetSpanID(testValSpanID) + assert.Equal(t, testValSpanID, ms.SpanID()) +} + +func TestSpan_TraceState(t *testing.T) { + ms := NewSpan() + assert.Equal(t, TraceState(""), ms.TraceState()) + testValTraceState := TraceState("congo=congos") + ms.SetTraceState(testValTraceState) + assert.Equal(t, testValTraceState, ms.TraceState()) +} + +func TestSpan_ParentSpanID(t *testing.T) { + ms := NewSpan() + assert.Equal(t, pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{}))), ms.ParentSpanID()) + testValParentSpanID := pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{8, 7, 6, 5, 4, 3, 2, 1}))) + ms.SetParentSpanID(testValParentSpanID) + assert.Equal(t, testValParentSpanID, ms.ParentSpanID()) +} + +func TestSpan_Name(t *testing.T) { + ms := NewSpan() + assert.Equal(t, "", ms.Name()) + ms.SetName("test_name") + assert.Equal(t, "test_name", ms.Name()) +} + +func TestSpan_Kind(t *testing.T) { + ms := NewSpan() + assert.Equal(t, SpanKind(otlptrace.Span_SpanKind(0)), ms.Kind()) + testValKind := SpanKind(otlptrace.Span_SpanKind(3)) + ms.SetKind(testValKind) + assert.Equal(t, testValKind, ms.Kind()) +} + +func TestSpan_StartTimestamp(t *testing.T) { + ms := NewSpan() + assert.Equal(t, pcommon.Timestamp(0), ms.StartTimestamp()) + testValStartTimestamp := pcommon.Timestamp(1234567890) + ms.SetStartTimestamp(testValStartTimestamp) + assert.Equal(t, testValStartTimestamp, ms.StartTimestamp()) +} + +func TestSpan_EndTimestamp(t *testing.T) { + ms := NewSpan() + assert.Equal(t, pcommon.Timestamp(0), ms.EndTimestamp()) + testValEndTimestamp := pcommon.Timestamp(1234567890) + ms.SetEndTimestamp(testValEndTimestamp) + assert.Equal(t, testValEndTimestamp, ms.EndTimestamp()) +} + +func TestSpan_Attributes(t *testing.T) { + ms := NewSpan() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestSpan_DroppedAttributesCount(t *testing.T) { + ms := NewSpan() + assert.Equal(t, uint32(0), ms.DroppedAttributesCount()) + ms.SetDroppedAttributesCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedAttributesCount()) +} + +func TestSpan_Events(t *testing.T) { + ms := NewSpan() + assert.Equal(t, NewSpanEventSlice(), ms.Events()) + internal.FillTestSpanEventSlice(internal.SpanEventSlice(ms.Events())) + assert.Equal(t, SpanEventSlice(internal.GenerateTestSpanEventSlice()), ms.Events()) +} + +func TestSpan_DroppedEventsCount(t *testing.T) { + ms := NewSpan() + assert.Equal(t, uint32(0), ms.DroppedEventsCount()) + ms.SetDroppedEventsCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedEventsCount()) +} + +func TestSpan_Links(t *testing.T) { + ms := NewSpan() + assert.Equal(t, NewSpanLinkSlice(), ms.Links()) + internal.FillTestSpanLinkSlice(internal.SpanLinkSlice(ms.Links())) + assert.Equal(t, SpanLinkSlice(internal.GenerateTestSpanLinkSlice()), ms.Links()) +} + +func TestSpan_DroppedLinksCount(t *testing.T) { + ms := NewSpan() + assert.Equal(t, uint32(0), ms.DroppedLinksCount()) + ms.SetDroppedLinksCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedLinksCount()) +} + +func TestSpan_Status(t *testing.T) { + ms := NewSpan() + internal.FillTestSpanStatus(internal.SpanStatus(ms.Status())) + assert.Equal(t, SpanStatus(internal.GenerateTestSpanStatus()), ms.Status()) +} + +func TestSpanEventSlice(t *testing.T) { + es := NewSpanEventSlice() + assert.Equal(t, 0, es.Len()) + es = newSpanEventSlice(&[]*otlptrace.Span_Event{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newSpanEvent(&otlptrace.Span_Event{}) + testVal := SpanEvent(internal.GenerateTestSpanEvent()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestSpanEvent(internal.SpanEvent(el)) + assert.Equal(t, testVal, el) + } +} + +func TestSpanEventSlice_CopyTo(t *testing.T) { + dest := NewSpanEventSlice() + // Test CopyTo to empty + NewSpanEventSlice().CopyTo(dest) + assert.Equal(t, NewSpanEventSlice(), dest) + + // Test CopyTo larger slice + SpanEventSlice(internal.GenerateTestSpanEventSlice()).CopyTo(dest) + assert.Equal(t, SpanEventSlice(internal.GenerateTestSpanEventSlice()), dest) + + // Test CopyTo same size slice + SpanEventSlice(internal.GenerateTestSpanEventSlice()).CopyTo(dest) + assert.Equal(t, SpanEventSlice(internal.GenerateTestSpanEventSlice()), dest) +} + +func TestSpanEventSlice_EnsureCapacity(t *testing.T) { + es := SpanEventSlice(internal.GenerateTestSpanEventSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlptrace.Span_Event]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlptrace.Span_Event]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlptrace.Span_Event]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlptrace.Span_Event]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestSpanEventSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := SpanEventSlice(internal.GenerateTestSpanEventSlice()) + dest := NewSpanEventSlice() + src := SpanEventSlice(internal.GenerateTestSpanEventSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, SpanEventSlice(internal.GenerateTestSpanEventSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, SpanEventSlice(internal.GenerateTestSpanEventSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + SpanEventSlice(internal.GenerateTestSpanEventSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestSpanEventSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewSpanEventSlice() + emptySlice.RemoveIf(func(el SpanEvent) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := SpanEventSlice(internal.GenerateTestSpanEventSlice()) + pos := 0 + filtered.RemoveIf(func(el SpanEvent) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestSpanEvent_MoveTo(t *testing.T) { + ms := SpanEvent(internal.GenerateTestSpanEvent()) + dest := NewSpanEvent() + ms.MoveTo(dest) + assert.Equal(t, NewSpanEvent(), ms) + assert.Equal(t, SpanEvent(internal.GenerateTestSpanEvent()), dest) +} + +func TestSpanEvent_CopyTo(t *testing.T) { + ms := NewSpanEvent() + orig := NewSpanEvent() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = SpanEvent(internal.GenerateTestSpanEvent()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestSpanEvent_Timestamp(t *testing.T) { + ms := NewSpanEvent() + assert.Equal(t, pcommon.Timestamp(0), ms.Timestamp()) + testValTimestamp := pcommon.Timestamp(1234567890) + ms.SetTimestamp(testValTimestamp) + assert.Equal(t, testValTimestamp, ms.Timestamp()) +} + +func TestSpanEvent_Name(t *testing.T) { + ms := NewSpanEvent() + assert.Equal(t, "", ms.Name()) + ms.SetName("test_name") + assert.Equal(t, "test_name", ms.Name()) +} + +func TestSpanEvent_Attributes(t *testing.T) { + ms := NewSpanEvent() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestSpanEvent_DroppedAttributesCount(t *testing.T) { + ms := NewSpanEvent() + assert.Equal(t, uint32(0), ms.DroppedAttributesCount()) + ms.SetDroppedAttributesCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedAttributesCount()) +} + +func TestSpanLinkSlice(t *testing.T) { + es := NewSpanLinkSlice() + assert.Equal(t, 0, es.Len()) + es = newSpanLinkSlice(&[]*otlptrace.Span_Link{}) + assert.Equal(t, 0, es.Len()) + + es.EnsureCapacity(7) + emptyVal := newSpanLink(&otlptrace.Span_Link{}) + testVal := SpanLink(internal.GenerateTestSpanLink()) + assert.Equal(t, 7, cap(*es.getOrig())) + for i := 0; i < es.Len(); i++ { + el := es.AppendEmpty() + assert.Equal(t, emptyVal, el) + internal.FillTestSpanLink(internal.SpanLink(el)) + assert.Equal(t, testVal, el) + } +} + +func TestSpanLinkSlice_CopyTo(t *testing.T) { + dest := NewSpanLinkSlice() + // Test CopyTo to empty + NewSpanLinkSlice().CopyTo(dest) + assert.Equal(t, NewSpanLinkSlice(), dest) + + // Test CopyTo larger slice + SpanLinkSlice(internal.GenerateTestSpanLinkSlice()).CopyTo(dest) + assert.Equal(t, SpanLinkSlice(internal.GenerateTestSpanLinkSlice()), dest) + + // Test CopyTo same size slice + SpanLinkSlice(internal.GenerateTestSpanLinkSlice()).CopyTo(dest) + assert.Equal(t, SpanLinkSlice(internal.GenerateTestSpanLinkSlice()), dest) +} + +func TestSpanLinkSlice_EnsureCapacity(t *testing.T) { + es := SpanLinkSlice(internal.GenerateTestSpanLinkSlice()) + // Test ensure smaller capacity. + const ensureSmallLen = 4 + expectedEs := make(map[*otlptrace.Span_Link]bool) + for i := 0; i < es.Len(); i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, es.Len(), len(expectedEs)) + es.EnsureCapacity(ensureSmallLen) + assert.Less(t, ensureSmallLen, es.Len()) + foundEs := make(map[*otlptrace.Span_Link]bool, es.Len()) + for i := 0; i < es.Len(); i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) + + // Test ensure larger capacity + const ensureLargeLen = 9 + oldLen := es.Len() + expectedEs = make(map[*otlptrace.Span_Link]bool, oldLen) + for i := 0; i < oldLen; i++ { + expectedEs[es.At(i).getOrig()] = true + } + assert.Equal(t, oldLen, len(expectedEs)) + es.EnsureCapacity(ensureLargeLen) + assert.Equal(t, ensureLargeLen, cap(*es.getOrig())) + foundEs = make(map[*otlptrace.Span_Link]bool, oldLen) + for i := 0; i < oldLen; i++ { + foundEs[es.At(i).getOrig()] = true + } + assert.Equal(t, expectedEs, foundEs) +} + +func TestSpanLinkSlice_MoveAndAppendTo(t *testing.T) { + // Test MoveAndAppendTo to empty + expectedSlice := SpanLinkSlice(internal.GenerateTestSpanLinkSlice()) + dest := NewSpanLinkSlice() + src := SpanLinkSlice(internal.GenerateTestSpanLinkSlice()) + src.MoveAndAppendTo(dest) + assert.Equal(t, SpanLinkSlice(internal.GenerateTestSpanLinkSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo empty slice + src.MoveAndAppendTo(dest) + assert.Equal(t, SpanLinkSlice(internal.GenerateTestSpanLinkSlice()), dest) + assert.Equal(t, 0, src.Len()) + assert.Equal(t, expectedSlice.Len(), dest.Len()) + + // Test MoveAndAppendTo not empty slice + SpanLinkSlice(internal.GenerateTestSpanLinkSlice()).MoveAndAppendTo(dest) + assert.Equal(t, 2*expectedSlice.Len(), dest.Len()) + for i := 0; i < expectedSlice.Len(); i++ { + assert.Equal(t, expectedSlice.At(i), dest.At(i)) + assert.Equal(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) + } +} + +func TestSpanLinkSlice_RemoveIf(t *testing.T) { + // Test RemoveIf on empty slice + emptySlice := NewSpanLinkSlice() + emptySlice.RemoveIf(func(el SpanLink) bool { + t.Fail() + return false + }) + + // Test RemoveIf + filtered := SpanLinkSlice(internal.GenerateTestSpanLinkSlice()) + pos := 0 + filtered.RemoveIf(func(el SpanLink) bool { + pos++ + return pos%3 == 0 + }) + assert.Equal(t, 5, filtered.Len()) +} + +func TestSpanLink_MoveTo(t *testing.T) { + ms := SpanLink(internal.GenerateTestSpanLink()) + dest := NewSpanLink() + ms.MoveTo(dest) + assert.Equal(t, NewSpanLink(), ms) + assert.Equal(t, SpanLink(internal.GenerateTestSpanLink()), dest) +} + +func TestSpanLink_CopyTo(t *testing.T) { + ms := NewSpanLink() + orig := NewSpanLink() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = SpanLink(internal.GenerateTestSpanLink()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestSpanLink_TraceID(t *testing.T) { + ms := NewSpanLink() + assert.Equal(t, pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{}))), ms.TraceID()) + testValTraceID := pcommon.TraceID(internal.NewTraceID(data.NewTraceID([16]byte{1, 2, 3, 4, 5, 6, 7, 8, 8, 7, 6, 5, 4, 3, 2, 1}))) + ms.SetTraceID(testValTraceID) + assert.Equal(t, testValTraceID, ms.TraceID()) +} + +func TestSpanLink_SpanID(t *testing.T) { + ms := NewSpanLink() + assert.Equal(t, pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{}))), ms.SpanID()) + testValSpanID := pcommon.SpanID(internal.NewSpanID(data.NewSpanID([8]byte{1, 2, 3, 4, 5, 6, 7, 8}))) + ms.SetSpanID(testValSpanID) + assert.Equal(t, testValSpanID, ms.SpanID()) +} + +func TestSpanLink_TraceState(t *testing.T) { + ms := NewSpanLink() + assert.Equal(t, TraceState(""), ms.TraceState()) + testValTraceState := TraceState("congo=congos") + ms.SetTraceState(testValTraceState) + assert.Equal(t, testValTraceState, ms.TraceState()) +} + +func TestSpanLink_Attributes(t *testing.T) { + ms := NewSpanLink() + assert.Equal(t, pcommon.NewMap(), ms.Attributes()) + internal.FillTestMap(internal.Map(ms.Attributes())) + assert.Equal(t, pcommon.Map(internal.GenerateTestMap()), ms.Attributes()) +} + +func TestSpanLink_DroppedAttributesCount(t *testing.T) { + ms := NewSpanLink() + assert.Equal(t, uint32(0), ms.DroppedAttributesCount()) + ms.SetDroppedAttributesCount(uint32(17)) + assert.Equal(t, uint32(17), ms.DroppedAttributesCount()) +} + +func TestSpanStatus_MoveTo(t *testing.T) { + ms := SpanStatus(internal.GenerateTestSpanStatus()) + dest := NewSpanStatus() + ms.MoveTo(dest) + assert.Equal(t, NewSpanStatus(), ms) + assert.Equal(t, SpanStatus(internal.GenerateTestSpanStatus()), dest) +} + +func TestSpanStatus_CopyTo(t *testing.T) { + ms := NewSpanStatus() + orig := NewSpanStatus() + orig.CopyTo(ms) + assert.Equal(t, orig, ms) + orig = SpanStatus(internal.GenerateTestSpanStatus()) + orig.CopyTo(ms) + assert.Equal(t, orig, ms) +} + +func TestSpanStatus_Code(t *testing.T) { + ms := NewSpanStatus() + assert.Equal(t, StatusCode(0), ms.Code()) + testValCode := StatusCode(1) + ms.SetCode(testValCode) + assert.Equal(t, testValCode, ms.Code()) +} + +func TestSpanStatus_Message(t *testing.T) { + ms := NewSpanStatus() + assert.Equal(t, "", ms.Message()) + ms.SetMessage("cancelled") + assert.Equal(t, "cancelled", ms.Message()) +} diff --git a/pdata/ptrace/json.go b/pdata/ptrace/json.go index e3068321f39..c09d2c54506 100644 --- a/pdata/ptrace/json.go +++ b/pdata/ptrace/json.go @@ -41,7 +41,7 @@ func newJSONMarshaler() *jsonMarshaler { func (e *jsonMarshaler) MarshalTraces(td Traces) ([]byte, error) { buf := bytes.Buffer{} - pb := internal.TracesToProto(td) + pb := internal.TracesToProto(internal.Traces(td)) err := e.delegate.Marshal(&buf, &pb) return buf.Bytes(), err } @@ -59,7 +59,7 @@ func (d *jsonUnmarshaler) UnmarshalTraces(buf []byte) (Traces, error) { defer jsoniter.ConfigFastest.ReturnIterator(iter) td := readTraceData(iter) err := iter.Error - return internal.TracesFromProto(td), err + return Traces(internal.TracesFromProto(td)), err } func readTraceData(iter *jsoniter.Iterator) otlptrace.TracesData { diff --git a/pdata/ptrace/json_test.go b/pdata/ptrace/json_test.go index c558abaed7b..f4a44b4aa0e 100644 --- a/pdata/ptrace/json_test.go +++ b/pdata/ptrace/json_test.go @@ -21,8 +21,8 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/stretchr/testify/assert" - "go.opentelemetry.io/collector/pdata/internal" otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" + "go.opentelemetry.io/collector/pdata/pcommon" ) var tracesOTLP = func() Traces { @@ -59,8 +59,8 @@ func TestTracesJSON_Marshal(t *testing.T) { } var tracesOTLPFull = func() Traces { - traceID := internal.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) - spanID := internal.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) + traceID := pcommon.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}) + spanID := pcommon.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}) td := NewTraces() // Add ResourceSpans. rs := td.ResourceSpans().AppendEmpty() @@ -77,42 +77,42 @@ var tracesOTLPFull = func() Traces { // Add spans. sp := il.Spans().AppendEmpty() sp.SetName("testSpan") - sp.SetKind(internal.SpanKindClient) + sp.SetKind(SpanKindClient) sp.SetDroppedAttributesCount(1) - sp.SetStartTimestamp(internal.NewTimestampFromTime(time.Now())) + sp.SetStartTimestamp(pcommon.NewTimestampFromTime(time.Now())) sp.SetTraceID(traceID) sp.SetSpanID(spanID) sp.SetDroppedEventsCount(1) sp.SetDroppedLinksCount(1) - sp.SetEndTimestamp(internal.NewTimestampFromTime(time.Now())) + sp.SetEndTimestamp(pcommon.NewTimestampFromTime(time.Now())) sp.SetParentSpanID(spanID) sp.SetTraceState("state") - sp.Status().SetCode(internal.StatusCodeOk) + sp.Status().SetCode(StatusCodeOk) sp.Status().SetMessage("message") // Add attributes. sp.Attributes().UpsertString("string", "value") sp.Attributes().UpsertBool("bool", true) sp.Attributes().UpsertInt("int", 1) sp.Attributes().UpsertDouble("double", 1.1) - sp.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) - arr := internal.NewValueSlice() + sp.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) + arr := pcommon.NewValueSlice() arr.SliceVal().AppendEmpty().SetIntVal(1) arr.SliceVal().AppendEmpty().SetStringVal("str") sp.Attributes().Upsert("array", arr) - kvList := internal.NewValueMap() - kvList.MapVal().Upsert("int", internal.NewValueInt(1)) - kvList.MapVal().Upsert("string", internal.NewValueString("string")) + kvList := pcommon.NewValueMap() + kvList.MapVal().Upsert("int", pcommon.NewValueInt(1)) + kvList.MapVal().Upsert("string", pcommon.NewValueString("string")) sp.Attributes().Upsert("kvList", kvList) // Add events. event := sp.Events().AppendEmpty() event.SetName("eventName") - event.SetTimestamp(internal.NewTimestampFromTime(time.Now())) + event.SetTimestamp(pcommon.NewTimestampFromTime(time.Now())) event.SetDroppedAttributesCount(1) event.Attributes().UpsertString("string", "value") event.Attributes().UpsertBool("bool", true) event.Attributes().UpsertInt("int", 1) event.Attributes().UpsertDouble("double", 1.1) - event.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) + event.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) // Add links. link := sp.Links().AppendEmpty() link.SetTraceState("state") @@ -123,7 +123,7 @@ var tracesOTLPFull = func() Traces { link.Attributes().UpsertBool("bool", true) link.Attributes().UpsertInt("int", 1) link.Attributes().UpsertDouble("double", 1.1) - link.Attributes().UpsertBytes("bytes", internal.NewImmutableByteSlice([]byte("foo"))) + link.Attributes().UpsertBytes("bytes", pcommon.NewImmutableByteSlice([]byte("foo"))) // Add another span. sp2 := il.Spans().AppendEmpty() sp2.SetName("testSpan2") diff --git a/pdata/ptrace/pb.go b/pdata/ptrace/pb.go index e164145904a..5af934e1ff9 100644 --- a/pdata/ptrace/pb.go +++ b/pdata/ptrace/pb.go @@ -34,12 +34,12 @@ func newPbMarshaler() *pbMarshaler { var _ Sizer = (*pbMarshaler)(nil) func (e *pbMarshaler) MarshalTraces(td Traces) ([]byte, error) { - pb := internal.TracesToProto(td) + pb := internal.TracesToProto(internal.Traces(td)) return pb.Marshal() } func (e *pbMarshaler) TracesSize(td Traces) int { - pb := internal.TracesToProto(td) + pb := internal.TracesToProto(internal.Traces(td)) return pb.Size() } @@ -57,5 +57,5 @@ func newPbUnmarshaler() *pbUnmarshaler { func (d *pbUnmarshaler) UnmarshalTraces(buf []byte) (Traces, error) { pb := otlptrace.TracesData{} err := pb.Unmarshal(buf) - return internal.TracesFromProto(pb), err + return Traces(internal.TracesFromProto(pb)), err } diff --git a/pdata/ptrace/ptraceotlp/traces.go b/pdata/ptrace/ptraceotlp/traces.go index ddbe475c170..d5e981ec936 100644 --- a/pdata/ptrace/ptraceotlp/traces.go +++ b/pdata/ptrace/ptraceotlp/traces.go @@ -78,8 +78,8 @@ func NewRequest() Request { // NewRequestFromTraces returns a Request from ptrace.Traces. // Because Request is a wrapper for ptrace.Traces, // any changes to the provided Traces struct will be reflected in the Request and vice versa. -func NewRequestFromTraces(t ptrace.Traces) Request { - return Request{orig: internal.TracesToOtlp(t)} +func NewRequestFromTraces(td ptrace.Traces) Request { + return Request{orig: internal.GetOrigTraces(internal.Traces(td))} } // MarshalProto marshals Request into proto bytes. @@ -115,7 +115,7 @@ func (tr Request) UnmarshalJSON(data []byte) error { } func (tr Request) Traces() ptrace.Traces { - return internal.TracesFromOtlp(tr.orig) + return ptrace.Traces(internal.NewTraces(tr.orig)) } // Client is the client API for OTLP-GRPC Traces service. diff --git a/pdata/internal/traces.go b/pdata/ptrace/traces.go similarity index 73% rename from pdata/internal/traces.go rename to pdata/ptrace/traces.go index e9477fb6a17..cad8b95fcaa 100644 --- a/pdata/internal/traces.go +++ b/pdata/ptrace/traces.go @@ -12,67 +12,49 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal // import "go.opentelemetry.io/collector/pdata/internal" +package ptrace // import "go.opentelemetry.io/collector/pdata/ptrace" import ( + "go.opentelemetry.io/collector/pdata/internal" otlpcollectortrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/trace/v1" otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" ) -// TracesToOtlp internal helper to convert Traces to otlp request representation. -func TracesToOtlp(mw Traces) *otlpcollectortrace.ExportTraceServiceRequest { - return mw.orig -} - -// TracesFromOtlp internal helper to convert otlp request representation to Traces. -func TracesFromOtlp(orig *otlpcollectortrace.ExportTraceServiceRequest) Traces { - return Traces{orig: orig} -} - -// TracesToProto internal helper to convert Traces to protobuf representation. -func TracesToProto(mw Traces) otlptrace.TracesData { - return otlptrace.TracesData{ - ResourceSpans: mw.orig.ResourceSpans, - } -} +// Traces is the top-level struct that is propagated through the traces pipeline. +// Use NewTraces to create new instance, zero-initialized instance is not valid for use. +type Traces internal.Traces -// TracesFromProto internal helper to convert protobuf representation to Traces. -func TracesFromProto(orig otlptrace.TracesData) Traces { - return Traces{orig: &otlpcollectortrace.ExportTraceServiceRequest{ - ResourceSpans: orig.ResourceSpans, - }} +func newTraces(orig *otlpcollectortrace.ExportTraceServiceRequest) Traces { + return Traces(internal.NewTraces(orig)) } -// Traces is the top-level struct that is propagated through the traces pipeline. -// Use NewTraces to create new instance, zero-initialized instance is not valid for use. -type Traces struct { - // When marhsal/unmarshal unless it is in the request for otlp protocol, convert to otlptrace.TracesData. - orig *otlpcollectortrace.ExportTraceServiceRequest +func (ms Traces) getOrig() *otlpcollectortrace.ExportTraceServiceRequest { + return internal.GetOrigTraces(internal.Traces(ms)) } // NewTraces creates a new Traces struct. func NewTraces() Traces { - return Traces{orig: &otlpcollectortrace.ExportTraceServiceRequest{}} + return newTraces(&otlpcollectortrace.ExportTraceServiceRequest{}) } // MoveTo moves all properties from the current struct to dest // resetting the current instance to its zero value. -func (td Traces) MoveTo(dest Traces) { - *dest.orig = *td.orig - *td.orig = otlpcollectortrace.ExportTraceServiceRequest{} +func (ms Traces) MoveTo(dest Traces) { + *dest.getOrig() = *ms.getOrig() + *ms.getOrig() = otlpcollectortrace.ExportTraceServiceRequest{} } // Clone returns a copy of Traces. -func (td Traces) Clone() Traces { +func (ms Traces) Clone() Traces { cloneTd := NewTraces() - td.ResourceSpans().CopyTo(cloneTd.ResourceSpans()) + ms.ResourceSpans().CopyTo(cloneTd.ResourceSpans()) return cloneTd } // SpanCount calculates the total number of spans. -func (td Traces) SpanCount() int { +func (ms Traces) SpanCount() int { spanCount := 0 - rss := td.ResourceSpans() + rss := ms.ResourceSpans() for i := 0; i < rss.Len(); i++ { rs := rss.At(i) ilss := rs.ScopeSpans() @@ -84,8 +66,8 @@ func (td Traces) SpanCount() int { } // ResourceSpans returns the ResourceSpansSlice associated with this Metrics. -func (td Traces) ResourceSpans() ResourceSpansSlice { - return newResourceSpansSlice(&td.orig.ResourceSpans) +func (ms Traces) ResourceSpans() ResourceSpansSlice { + return newResourceSpansSlice(&ms.getOrig().ResourceSpans) } // TraceState is a string representing the tracestate in w3c-trace-context format: https://www.w3.org/TR/trace-context/#tracestate-header diff --git a/pdata/internal/traces_test.go b/pdata/ptrace/traces_test.go similarity index 77% rename from pdata/internal/traces_test.go rename to pdata/ptrace/traces_test.go index 8ff7df65e7a..e28e10f47ea 100644 --- a/pdata/internal/traces_test.go +++ b/pdata/ptrace/traces_test.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package internal +package ptrace import ( "testing" @@ -22,6 +22,7 @@ import ( goproto "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/emptypb" + "go.opentelemetry.io/collector/pdata/internal" otlpcollectortrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/collector/trace/v1" otlptrace "go.opentelemetry.io/collector/pdata/internal/data/protogen/trace/v1" ) @@ -51,17 +52,17 @@ func TestSpanCount(t *testing.T) { } func TestSpanCountWithEmpty(t *testing.T) { - assert.EqualValues(t, 0, Traces{orig: &otlpcollectortrace.ExportTraceServiceRequest{ + assert.EqualValues(t, 0, newTraces(&otlpcollectortrace.ExportTraceServiceRequest{ ResourceSpans: []*otlptrace.ResourceSpans{{}}, - }}.SpanCount()) - assert.EqualValues(t, 0, Traces{orig: &otlpcollectortrace.ExportTraceServiceRequest{ + }).SpanCount()) + assert.EqualValues(t, 0, newTraces(&otlpcollectortrace.ExportTraceServiceRequest{ ResourceSpans: []*otlptrace.ResourceSpans{ { ScopeSpans: []*otlptrace.ScopeSpans{{}}, }, }, - }}.SpanCount()) - assert.EqualValues(t, 1, Traces{orig: &otlpcollectortrace.ExportTraceServiceRequest{ + }).SpanCount()) + assert.EqualValues(t, 1, newTraces(&otlpcollectortrace.ExportTraceServiceRequest{ ResourceSpans: []*otlptrace.ResourceSpans{ { ScopeSpans: []*otlptrace.ScopeSpans{ @@ -71,14 +72,14 @@ func TestSpanCountWithEmpty(t *testing.T) { }, }, }, - }}.SpanCount()) + }).SpanCount()) } func TestToFromOtlp(t *testing.T) { otlp := &otlpcollectortrace.ExportTraceServiceRequest{} - traces := TracesFromOtlp(otlp) + traces := newTraces(otlp) assert.EqualValues(t, NewTraces(), traces) - assert.EqualValues(t, otlp, TracesToOtlp(traces)) + assert.EqualValues(t, otlp, traces.getOrig()) // More tests in ./tracedata/traces_test.go. Cannot have them here because of // circular dependency. } @@ -89,10 +90,11 @@ func TestResourceSpansWireCompatibility(t *testing.T) { // this repository are wire compatible. // Generate ResourceSpans as pdata struct. - traces := generateTestResourceSpans() + traces := NewTraces() + internal.FillTestResourceSpansSlice(internal.ResourceSpansSlice(traces.ResourceSpans())) // Marshal its underlying ProtoBuf to wire. - wire1, err := gogoproto.Marshal(traces.orig) + wire1, err := gogoproto.Marshal(traces.getOrig()) assert.NoError(t, err) assert.NotNil(t, wire1) @@ -107,33 +109,33 @@ func TestResourceSpansWireCompatibility(t *testing.T) { assert.NotNil(t, wire2) // Unmarshal from the wire into gogoproto's representation. - var gogoprotoRS2 otlptrace.ResourceSpans + var gogoprotoRS2 otlpcollectortrace.ExportTraceServiceRequest err = gogoproto.Unmarshal(wire2, &gogoprotoRS2) assert.NoError(t, err) // Now compare that the original and final ProtoBuf messages are the same. // This proves that goproto and gogoproto marshaling/unmarshaling are wire compatible. - assert.EqualValues(t, traces.orig, &gogoprotoRS2) + assert.EqualValues(t, traces.getOrig(), &gogoprotoRS2) } func TestTracesMoveTo(t *testing.T) { traces := NewTraces() - fillTestResourceSpansSlice(traces.ResourceSpans()) + internal.FillTestResourceSpansSlice(internal.ResourceSpansSlice(traces.ResourceSpans())) dest := NewTraces() traces.MoveTo(dest) assert.EqualValues(t, NewTraces(), traces) - assert.EqualValues(t, generateTestResourceSpansSlice(), dest.ResourceSpans()) + assert.EqualValues(t, ResourceSpansSlice(internal.GenerateTestResourceSpansSlice()), dest.ResourceSpans()) } func TestTracesClone(t *testing.T) { traces := NewTraces() - fillTestResourceSpansSlice(traces.ResourceSpans()) + internal.FillTestResourceSpansSlice(internal.ResourceSpansSlice(traces.ResourceSpans())) assert.EqualValues(t, traces, traces.Clone()) } func BenchmarkTracesClone(b *testing.B) { traces := NewTraces() - fillTestResourceSpansSlice(traces.ResourceSpans()) + internal.FillTestResourceSpansSlice(internal.ResourceSpansSlice(traces.ResourceSpans())) b.ResetTimer() for n := 0; n < b.N; n++ { clone := traces.Clone()