Skip to content

Commit

Permalink
*Fixes issue #16546 adding support for timestamp in Nano second preci…
Browse files Browse the repository at this point in the history
…sion
  • Loading branch information
ag-ramachandran committed Dec 1, 2022
1 parent 7f9cb88 commit 833999a
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 16 deletions.
4 changes: 2 additions & 2 deletions exporter/azuredataexplorerexporter/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,11 +122,11 @@ This exporter maps OpenTelemetry [trace](https://opentelemetry.io/docs/referenc
The following tables need to be created in the database specified in the configuration.
```kql
.create-merge table <Logs-Table-Name> (Timestamp:datetime, ObservedTimestamp:datetime, TraceId:string, SpanId:string, SeverityText:string, SeverityNumber:int, Body:string, ResourceAttributes:dynamic, LogsAttributes:dynamic)
.create-merge table <Logs-Table-Name> (Timestamp:datetime, ObservedTimestamp:datetime, TraceID:string, SpanID:string, SeverityText:string, SeverityNumber:int, Body:string, ResourceAttributes:dynamic, LogsAttributes:dynamic)

.create-merge table <Metrics-Table-Name> (Timestamp:datetime, MetricName:string, MetricType:string, MetricUnit:string, MetricDescription:string, MetricValue:real, Host:string, ResourceAttributes:dynamic,MetricAttributes:dynamic)

.create-merge table <Traces-Table-Name> (TraceId:string, SpanId:string, ParentId:string, SpanName:string, SpanStatus:string, SpanKind:string, StartTime:datetime, EndTime:datetime, ResourceAttributes:dynamic, TraceAttributes:dynamic, Events:dynamic, Links:dynamic)
.create-merge table <Traces-Table-Name> (TraceID:string, SpanID:string, ParentID:string, SpanName:string, SpanStatus:string, SpanKind:string, StartTime:datetime, EndTime:datetime, ResourceAttributes:dynamic, TraceAttributes:dynamic, Events:dynamic, Links:dynamic)

//Enable streaming ingestion( for managed streaming) for the created tables using
.alter table <Table-Name> policy streamingingestion enable
Expand Down
8 changes: 4 additions & 4 deletions exporter/azuredataexplorerexporter/logsdata_to_adx.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ import (
)

type AdxLog struct {
Timestamp string // The timestamp of the occurrence. Formatted into string as RFC3339
ObservedTimestamp string // The timestamp of logs observed in opentelemetry collector. Formatted into string as RFC3339
Timestamp string // The timestamp of the occurrence. Formatted into string as RFC3339Nano
ObservedTimestamp string // The timestamp of logs observed in opentelemetry collector. Formatted into string as RFC3339Nano
TraceID string // TraceId associated to the log
SpanID string // SpanId associated to the log
SeverityText string // The severity level of the log
Expand All @@ -43,8 +43,8 @@ func mapToAdxLog(resource pcommon.Resource, scope pcommon.InstrumentationScope,
clonedLogAttrib := cloneMap(logAttrib)
copyMap(clonedLogAttrib, getScopeMap(scope))
adxLog := &AdxLog{
Timestamp: logData.Timestamp().AsTime().Format(time.RFC3339),
ObservedTimestamp: logData.ObservedTimestamp().AsTime().Format(time.RFC3339),
Timestamp: logData.Timestamp().AsTime().Format(time.RFC3339Nano),
ObservedTimestamp: logData.ObservedTimestamp().AsTime().Format(time.RFC3339Nano),
TraceID: traceutil.TraceIDToHexOrEmptyString(logData.TraceID()),
SpanID: traceutil.SpanIDToHexOrEmptyString(logData.SpanID()),
SeverityText: logData.SeverityText(),
Expand Down
2 changes: 1 addition & 1 deletion exporter/azuredataexplorerexporter/logsdata_to_adx_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import (
func Test_mapToAdxLog(t *testing.T) {
logger := zap.NewNop()
epoch, _ := time.Parse("2006-01-02T15:04:05Z07:00", "1970-01-01T00:00:00Z")
defaultTime := pcommon.NewTimestampFromTime(epoch).AsTime().Format(time.RFC3339)
defaultTime := pcommon.NewTimestampFromTime(epoch).AsTime().Format(time.RFC3339Nano)
tmap := make(map[string]interface{})
tmap["key"] = "value"
tmap[hostkey] = testhost
Expand Down
4 changes: 2 additions & 2 deletions exporter/azuredataexplorerexporter/metricsdata_to_adx.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ const (

// This is derived from the specification https://opentelemetry.io/docs/reference/specification/metrics/datamodel/
type AdxMetric struct {
Timestamp string // The timestamp of the occurrence. A metric is measured at a point of time. Formatted into string as RFC3339
Timestamp string // The timestamp of the occurrence. A metric is measured at a point of time. Formatted into string as RFC3339Nano
// Including name, the Metric object is defined by the following properties:
MetricName string // Name of the metric field
MetricType string // The data point type (e.g. Sum, Gauge, Histogram ExponentialHistogram, Summary)
Expand Down Expand Up @@ -81,7 +81,7 @@ func mapToAdxMetric(res pcommon.Resource, md pmetric.Metric, scopeattrs map[stri
desc = md.Description()
}
return &AdxMetric{
Timestamp: times.Format(time.RFC3339),
Timestamp: times.Format(time.RFC3339Nano),
MetricName: name,
MetricType: mt.String(),
MetricUnit: md.Unit(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ const (
// The timestamps used for the tests
var tsUnix = time.Unix(time.Now().Unix(), time.Now().UnixNano())
var ts = pcommon.NewTimestampFromTime(tsUnix)
var tstr = ts.AsTime().Format(time.RFC3339)
var tstr = ts.AsTime().Format(time.RFC3339Nano)

// the histogram values and distribution for the tests
var distributionBounds = []float64{1, 2, 4}
Expand Down
10 changes: 5 additions & 5 deletions exporter/azuredataexplorerexporter/tracesdata_to_adx.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@ type AdxTrace struct {
SpanName string // The SpanName of the Trace
SpanStatus string // The SpanStatus associated to the Trace
SpanKind string // The SpanKind of the Trace
StartTime string // The start time of the occurrence. Formatted into string as RFC3339
EndTime string // The end time of the occurrence. Formatted into string as RFC3339
StartTime string // The start time of the occurrence. Formatted into string as RFC3339Nano
EndTime string // The end time of the occurrence. Formatted into string as RFC3339Nano
ResourceAttributes map[string]interface{} // JSON Resource attributes that can then be parsed.
TraceAttributes map[string]interface{} // JSON attributes that can then be parsed.
Events []*Event // Array containing the events in a span
Expand Down Expand Up @@ -64,8 +64,8 @@ func mapToAdxTrace(resource pcommon.Resource, scope pcommon.InstrumentationScope
SpanName: spanData.Name(),
SpanStatus: traceutil.StatusCodeStr(spanData.Status().Code()),
SpanKind: traceutil.SpanKindStr(spanData.Kind()),
StartTime: spanData.StartTimestamp().AsTime().Format(time.RFC3339),
EndTime: spanData.EndTimestamp().AsTime().Format(time.RFC3339),
StartTime: spanData.StartTimestamp().AsTime().Format(time.RFC3339Nano),
EndTime: spanData.EndTimestamp().AsTime().Format(time.RFC3339Nano),
ResourceAttributes: resource.Attributes().AsRaw(),
TraceAttributes: clonedTraceAttrib,
Events: getEventsData(spanData),
Expand All @@ -78,7 +78,7 @@ func getEventsData(sd ptrace.Span) []*Event {

for i := 0; i < sd.Events().Len(); i++ {
event := &Event{
Timestamp: sd.Events().At(i).Timestamp().AsTime().Format(time.RFC3339),
Timestamp: sd.Events().At(i).Timestamp().AsTime().Format(time.RFC3339Nano),
EventName: sd.Events().At(i).Name(),
EventAttributes: sd.Events().At(i).Attributes().AsRaw(),
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import (

func Test_mapToAdxTrace(t *testing.T) {
epoch, _ := time.Parse("2006-01-02T15:04:05Z07:00", "1970-01-01T00:00:00Z")
defaultTime := pcommon.NewTimestampFromTime(epoch).AsTime().Format(time.RFC3339)
defaultTime := pcommon.NewTimestampFromTime(epoch).AsTime().Format(time.RFC3339Nano)
tmap := make(map[string]interface{})
tmap["key"] = "value"
tmap[hostkey] = testhost
Expand Down

0 comments on commit 833999a

Please sign in to comment.