From 7b6ed21a1531519a3b4d6cc3efd0c6a4697eb35c Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 13 Mar 2022 17:09:15 +0530 Subject: [PATCH 01/51] Logging SDK. --- opentelemetry-api/Cargo.toml | 1 + opentelemetry-api/src/global/error_handler.rs | 12 + opentelemetry-api/src/lib.rs | 4 + opentelemetry-api/src/log/mod.rs | 64 + opentelemetry-otlp/Cargo.toml | 1 + opentelemetry-otlp/src/lib.rs | 5 + opentelemetry-otlp/src/log.rs | 472 ++++++ opentelemetry-proto/Cargo.toml | 2 +- opentelemetry-proto/src/proto.rs | 6 + opentelemetry-proto/src/proto/grpcio/logs.rs | 1317 +++++++++++++++++ .../src/proto/grpcio/logs_service.rs | 337 +++++ .../src/proto/grpcio/logs_service_grpc.rs | 69 + opentelemetry-proto/src/transform/common.rs | 34 +- opentelemetry-proto/src/transform/logs.rs | 333 +++++ opentelemetry-proto/src/transform/mod.rs | 3 + opentelemetry-proto/src/transform/traces.rs | 6 +- opentelemetry-sdk/Cargo.toml | 3 +- opentelemetry-sdk/src/export/log/mod.rs | 33 + opentelemetry-sdk/src/export/log/stdout.rs | 158 ++ opentelemetry-sdk/src/export/mod.rs | 4 + opentelemetry-sdk/src/lib.rs | 4 + opentelemetry-sdk/src/log/config.rs | 8 + opentelemetry-sdk/src/log/log_emitter.rs | 218 +++ opentelemetry-sdk/src/log/log_processor.rs | 368 +++++ opentelemetry-sdk/src/log/mod.rs | 13 + opentelemetry-sdk/src/log/record.rs | 91 ++ opentelemetry-sdk/src/log/runtime.rs | 286 ++++ opentelemetry/Cargo.toml | 1 + 28 files changed, 3845 insertions(+), 8 deletions(-) create mode 100644 opentelemetry-api/src/log/mod.rs create mode 100644 opentelemetry-otlp/src/log.rs create mode 100644 opentelemetry-proto/src/proto/grpcio/logs.rs create mode 100644 opentelemetry-proto/src/proto/grpcio/logs_service.rs create mode 100644 opentelemetry-proto/src/proto/grpcio/logs_service_grpc.rs create mode 100644 opentelemetry-proto/src/transform/logs.rs create mode 100644 opentelemetry-sdk/src/export/log/mod.rs create mode 100644 opentelemetry-sdk/src/export/log/stdout.rs create mode 100644 opentelemetry-sdk/src/log/config.rs create mode 100644 opentelemetry-sdk/src/log/log_emitter.rs create mode 100644 opentelemetry-sdk/src/log/log_processor.rs create mode 100644 opentelemetry-sdk/src/log/mod.rs create mode 100644 opentelemetry-sdk/src/log/record.rs create mode 100644 opentelemetry-sdk/src/log/runtime.rs diff --git a/opentelemetry-api/Cargo.toml b/opentelemetry-api/Cargo.toml index 51f08b2553..1837e5b78f 100644 --- a/opentelemetry-api/Cargo.toml +++ b/opentelemetry-api/Cargo.toml @@ -29,3 +29,4 @@ default = ["trace"] trace = ["pin-project-lite"] metrics = ["fnv"] testing = ["trace"] +log = [] \ No newline at end of file diff --git a/opentelemetry-api/src/global/error_handler.rs b/opentelemetry-api/src/global/error_handler.rs index 5ff8783eea..fd11510483 100644 --- a/opentelemetry-api/src/global/error_handler.rs +++ b/opentelemetry-api/src/global/error_handler.rs @@ -1,6 +1,8 @@ use std::sync::PoisonError; use std::sync::RwLock; +#[cfg(feature = "log")] +use crate::log::LogError; #[cfg(feature = "metrics")] use crate::metrics::MetricsError; #[cfg(feature = "trace")] @@ -23,6 +25,13 @@ pub enum Error { #[error(transparent)] /// An issue raised by the metrics module. Metric(#[from] MetricsError), + + #[cfg(feature = "log")] + #[cfg_attr(docsrs, doc(cfg(feature = "log")))] + #[error(transparent)] + /// Failed to export logs. + Log(#[from] LogError), + #[error("{0}")] /// Other types of failures not covered by the variants above. Other(String), @@ -49,6 +58,9 @@ pub fn handle_error>(err: T) { #[cfg(feature = "trace")] #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] Error::Trace(err) => eprintln!("OpenTelemetry trace error occurred. {}", err), + #[cfg(feature = "log")] + #[cfg_attr(docsrs, doc(cfg(feature = "log")))] + Error::Log(err) => eprintln!("OpenTelemetry log error occurred. {}", err), Error::Other(err_msg) => eprintln!("OpenTelemetry error occurred. {}", err_msg), }, } diff --git a/opentelemetry-api/src/lib.rs b/opentelemetry-api/src/lib.rs index e19182726f..ffed020fc0 100644 --- a/opentelemetry-api/src/lib.rs +++ b/opentelemetry-api/src/lib.rs @@ -67,6 +67,10 @@ pub mod propagation; #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] pub mod trace; +#[cfg(feature = "log")] +#[cfg_attr(docsrs, doc(cfg(feature = "log")))] +pub mod log; + #[doc(hidden)] #[cfg(any(feature = "metrics", feature = "trace"))] pub mod time { diff --git a/opentelemetry-api/src/log/mod.rs b/opentelemetry-api/src/log/mod.rs new file mode 100644 index 0000000000..15002ce30e --- /dev/null +++ b/opentelemetry-api/src/log/mod.rs @@ -0,0 +1,64 @@ +//! # OpenTelemetry Logs API + +use crate::ExportError; +use futures_channel::{mpsc::TrySendError, oneshot::Canceled}; +use std::time::Duration; +use thiserror::Error; + +/// Describe the result of operations in log SDK. +pub type LogResult = Result; + +#[derive(Error, Debug)] +#[non_exhaustive] +/// Errors returned by the log SDK. +pub enum LogError { + /// Export failed with the error returned by the exporter. + #[error("Exporter {} encountered the following errors: {0}", .0.exporter_name())] + ExportFailed(Box), + + /// Export failed to finish after certain period and processor stopped the export. + #[error("Exporter timed out after {} seconds", .0.as_secs())] + ExportTimedOut(Duration), + + /// Other errors propagated from log SDK that weren't covered above. + #[error(transparent)] + Other(#[from] Box), +} + +impl From for LogError +where + T: ExportError, +{ + fn from(err: T) -> Self { + LogError::ExportFailed(Box::new(err)) + } +} + +impl From> for LogError { + fn from(err: TrySendError) -> Self { + LogError::Other(Box::new(err.into_send_error())) + } +} + +impl From for LogError { + fn from(err: Canceled) -> Self { + LogError::Other(Box::new(err)) + } +} + +impl From for LogError { + fn from(err_msg: String) -> Self { + LogError::Other(Box::new(Custom(err_msg))) + } +} + +impl From<&'static str> for LogError { + fn from(err_msg: &'static str) -> Self { + LogError::Other(Box::new(Custom(err_msg.into()))) + } +} + +/// Wrap type for string +#[derive(Error, Debug)] +#[error("{0}")] +struct Custom(String); diff --git a/opentelemetry-otlp/Cargo.toml b/opentelemetry-otlp/Cargo.toml index 9fbd124d80..221da61e65 100644 --- a/opentelemetry-otlp/Cargo.toml +++ b/opentelemetry-otlp/Cargo.toml @@ -62,6 +62,7 @@ tokio = { version = "1.0", features = ["macros", "rt-multi-thread"] } # telemetry pillars and functions trace = ["opentelemetry_api/trace", "opentelemetry_sdk/trace", "opentelemetry-proto/traces"] metrics = ["opentelemetry_api/metrics", "opentelemetry_sdk/metrics", "opentelemetry-proto/metrics", "grpc-tonic"] +log = ["opentelemetry/log", "opentelemetry-proto/logs"] # add ons serialize = ["serde"] diff --git a/opentelemetry-otlp/src/lib.rs b/opentelemetry-otlp/src/lib.rs index ccb54d2c95..385870cb7a 100644 --- a/opentelemetry-otlp/src/lib.rs +++ b/opentelemetry-otlp/src/lib.rs @@ -181,6 +181,8 @@ #![cfg_attr(test, deny(warnings))] mod exporter; +#[cfg(feature = "log")] +mod log; #[cfg(feature = "metrics")] mod metric; #[cfg(feature = "trace")] @@ -200,6 +202,9 @@ pub use crate::metric::{ OTEL_EXPORTER_OTLP_METRICS_ENDPOINT, OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, }; +#[cfg(feature = "log")] +pub use crate::log::*; + pub use crate::exporter::{ HasExportConfig, WithExportConfig, OTEL_EXPORTER_OTLP_ENDPOINT, OTEL_EXPORTER_OTLP_ENDPOINT_DEFAULT, OTEL_EXPORTER_OTLP_PROTOCOL, diff --git a/opentelemetry-otlp/src/log.rs b/opentelemetry-otlp/src/log.rs new file mode 100644 index 0000000000..4343c86632 --- /dev/null +++ b/opentelemetry-otlp/src/log.rs @@ -0,0 +1,472 @@ +//! OTLP - Log Exporter +//! +//! Defines a [LogExporter] to send logs via the OpenTelemetry Protocol (OTLP) + +#[cfg(feature = "grpc-tonic")] +use { + crate::exporter::tonic::{TonicConfig, TonicExporterBuilder}, + opentelemetry_proto::tonic::collector::logs::v1::{ + logs_service_client::LogsServiceClient as TonicLogsServiceClient, + ExportLogsServiceRequest as TonicRequest, + }, + tonic::{ + metadata::{KeyAndValueRef, MetadataMap}, + transport::Channel as TonicChannel, + Request, + }, +}; + +#[cfg(feature = "grpc-sys")] +use { + crate::exporter::grpcio::{GrpcioConfig, GrpcioExporterBuilder}, + grpcio::{ + CallOption, Channel as GrpcChannel, ChannelBuilder, ChannelCredentialsBuilder, Environment, + MetadataBuilder, + }, + opentelemetry_proto::grpcio::{ + logs_service::ExportLogsServiceRequest as GrpcRequest, + logs_service_grpc::LogsServiceClient as GrpcioLogServiceClient, + }, + std::sync::Arc, +}; + +#[cfg(feature = "http-proto")] +use { + crate::exporter::http::{HttpConfig, HttpExporterBuilder}, + http::{ + header::{HeaderName, HeaderValue, CONTENT_TYPE}, + Method, Uri, + }, + opentelemetry_http::HttpClient, + opentelemetry_proto::tonic::collector::logs::v1::ExportLogsServiceRequest as ProstRequest, + prost::Message, + std::convert::TryFrom, +}; + +#[cfg(any(feature = "grpc-sys", feature = "http-proto"))] +use std::collections::HashMap; + +use crate::exporter::ExportConfig; +use crate::OtlpPipeline; +use async_trait::async_trait; +use std::{ + fmt::{self, Debug}, + time::Duration, +}; + +use opentelemetry::{ + log::LogError, + sdk::{self, export::log::ResourceLog, log::LogRuntime}, +}; + +impl OtlpPipeline { + /// Create a OTLP logging pipeline. + pub fn logging(self) -> OtlpLogPipeline { + OtlpLogPipeline::default() + } +} + +/// OTLP log exporter builder +#[derive(Debug)] +#[allow(clippy::large_enum_variant)] +#[non_exhaustive] +pub enum LogExporterBuilder { + /// Tonic log exporter builder + #[cfg(feature = "grpc-tonic")] + Tonic(TonicExporterBuilder), + /// Grpc log exporter builder + #[cfg(feature = "grpc-sys")] + Grpcio(GrpcioExporterBuilder), + /// Http log exporter builder + #[cfg(feature = "http-proto")] + Http(HttpExporterBuilder), +} + +impl LogExporterBuilder { + /// Build a OTLP log exporter using the given configuration. + pub fn build_log_exporter(self) -> Result { + match self { + #[cfg(feature = "grpc-tonic")] + LogExporterBuilder::Tonic(builder) => Ok(match builder.channel { + Some(channel) => LogExporter::from_tonic_channel( + builder.exporter_config, + builder.tonic_config, + channel, + ), + None => LogExporter::new_tonic(builder.exporter_config, builder.tonic_config), + }?), + #[cfg(feature = "grpc-sys")] + LogExporterBuilder::Grpcio(builder) => Ok(LogExporter::new_grpcio( + builder.exporter_config, + builder.grpcio_config, + )), + #[cfg(feature = "http-proto")] + LogExporterBuilder::Http(builder) => Ok(LogExporter::new_http( + builder.exporter_config, + builder.http_config, + )?), + } + } +} + +#[cfg(feature = "grpc-tonic")] +impl From for LogExporterBuilder { + fn from(exporter: TonicExporterBuilder) -> Self { + LogExporterBuilder::Tonic(exporter) + } +} + +#[cfg(feature = "grpc-sys")] +impl From for LogExporterBuilder { + fn from(exporter: GrpcioExporterBuilder) -> Self { + LogExporterBuilder::Grpcio(exporter) + } +} + +#[cfg(feature = "http-proto")] +impl From for LogExporterBuilder { + fn from(exporter: HttpExporterBuilder) -> Self { + LogExporterBuilder::Http(exporter) + } +} + +/// OTLP exporter that sends log data +pub enum LogExporter { + #[cfg(feature = "grpc-tonic")] + /// Log Exporter using tonic as grpc layer. + Tonic { + /// Duration of timeout when sending logs to backend. + timeout: Duration, + /// Additional headers of the outbound requests. + metadata: Option, + /// The Grpc log exporter + log_exporter: TonicLogsServiceClient, + }, + #[cfg(feature = "grpc-sys")] + /// Log Exporter using grpcio as grpc layer + Grpcio { + /// Duration of timeout when sending logs to backend. + timeout: Duration, + /// Additional headers of the outbound requests. + headers: Option>, + /// The Grpc log exporter + log_exporter: GrpcioLogServiceClient, + }, + #[cfg(feature = "http-proto")] + /// Log Exporter using HTTP transport + Http { + /// Duration of timeout when sending logs to backend. + timeout: Duration, + /// Additional headers of the outbound requests. + headers: Option>, + /// The Collector URL + collector_endpoint: Uri, + /// The HTTP log exporter + log_exporter: Option>, + }, +} + +impl Debug for LogExporter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + #[cfg(feature = "grpc-tonic")] + LogExporter::Tonic { + metadata, timeout, .. + } => f + .debug_struct("Exporter") + .field("metadata", &metadata) + .field("timeout", &timeout) + .field("log_exporter", &"LogServiceClient") + .finish(), + #[cfg(feature = "grpc-sys")] + LogExporter::Grpcio { + headers, timeout, .. + } => f + .debug_struct("Exporter") + .field("headers", &headers) + .field("timeout", &timeout) + .field("log_exporter", &"LogServiceClient") + .finish(), + #[cfg(feature = "http-proto")] + LogExporter::Http { + headers, timeout, .. + } => f + .debug_struct("Exporter") + .field("headers", &headers) + .field("timeout", &timeout) + .field("log_exporter", &"LogServiceClient") + .finish(), + } + } +} + +impl LogExporter { + /// Builds a new log exporter with the given configuration. + #[cfg(feature = "grpc-tonic")] + pub fn new_tonic( + config: ExportConfig, + tonic_config: TonicConfig, + ) -> Result { + let endpoint = TonicChannel::from_shared(config.endpoint.clone())?; + + #[cfg(feature = "tls")] + let channel = match tonic_config.tls_config.as_ref() { + Some(tls_config) => endpoint.tls_config(tls_config.clone())?, + None => endpoint, + } + .timeout(config.timeout) + .connect_lazy(); + + #[cfg(not(feature = "tls"))] + let channel = endpoint.timeout(config.timeout).connect_lazy(); + + LogExporter::from_tonic_channel(config, tonic_config, channel) + } + + /// Builds a new log exporter with given tonic channel. + /// + /// This allows users to bring their own custom channel like UDS. + /// However, users MUST make sure the [`ExportConfig::timeout`] is + /// the same as the channel's timeout. + #[cfg(feature = "grpc-tonic")] + pub fn from_tonic_channel( + config: ExportConfig, + tonic_config: TonicConfig, + channel: tonic::transport::Channel, + ) -> Result { + Ok(LogExporter::Tonic { + timeout: config.timeout, + metadata: tonic_config.metadata, + log_exporter: TonicLogsServiceClient::new(channel), + }) + } + + /// Builds a new log exporter with the given configuration + #[cfg(feature = "grpc-sys")] + pub fn new_grpcio(config: ExportConfig, grpcio_config: GrpcioConfig) -> Self { + let mut builder: ChannelBuilder = ChannelBuilder::new(Arc::new(Environment::new( + grpcio_config.completion_queue_count, + ))); + + if let Some(compression) = grpcio_config.compression { + builder = builder.default_compression_algorithm(compression.into()); + } + + let channel: GrpcChannel = match (grpcio_config.credentials, grpcio_config.use_tls) { + (None, Some(true)) => builder.secure_connect( + config.endpoint.as_str(), + ChannelCredentialsBuilder::new().build(), + ), + (None, _) => builder.connect(config.endpoint.as_str()), + (Some(credentials), _) => builder.secure_connect( + config.endpoint.as_str(), + ChannelCredentialsBuilder::new() + .cert(credentials.cert.into(), credentials.key.into()) + .build(), + ), + }; + + LogExporter::Grpcio { + log_exporter: GrpcioLogServiceClient::new(channel), + timeout: config.timeout, + headers: grpcio_config.headers, + } + } + + /// Builds a new log exporter with the given configuration + #[cfg(feature = "http-proto")] + pub fn new_http(config: ExportConfig, http_config: HttpConfig) -> Result { + let url: Uri = config + .endpoint + .parse() + .map_err::(Into::into)?; + + Ok(LogExporter::Http { + log_exporter: http_config.client, + timeout: config.timeout, + collector_endpoint: url, + headers: http_config.headers, + }) + } +} + +#[async_trait] +impl opentelemetry::sdk::export::log::LogExporter for LogExporter { + async fn export(&mut self, batch: Vec) -> opentelemetry::log::LogResult<()> { + match self { + #[cfg(feature = "grpc-sys")] + LogExporter::Grpcio { + timeout, + headers, + log_exporter, + } => { + let request = GrpcRequest { + resource_logs: protobuf::RepeatedField::from_vec( + batch.into_iter().map(Into::into).collect(), + ), + unknown_fields: Default::default(), + cached_size: Default::default(), + }; + + let mut call_options = CallOption::default().timeout(*timeout); + + if let Some(headers) = headers.clone() { + let mut metadata_builder: MetadataBuilder = MetadataBuilder::new(); + + for (key, value) in headers { + let _ = metadata_builder.add_str(key.as_str(), value.as_str()); + } + + call_options = call_options.headers(metadata_builder.build()); + } + + let receiver = log_exporter + .export_async_opt(&request, call_options) + .map_err::(Into::into)?; + receiver.await.map_err::(Into::into)?; + Ok(()) + } + #[cfg(feature = "grpc-tonic")] + LogExporter::Tonic { + log_exporter, + metadata, + .. + } => { + let mut request = Request::new(TonicRequest { + resource_logs: batch.into_iter().map(Into::into).collect(), + }); + + if let Some(metadata) = metadata { + for key_and_value in metadata.iter() { + match key_and_value { + KeyAndValueRef::Ascii(key, value) => { + request.metadata_mut().append(key, value.to_owned()) + } + KeyAndValueRef::Binary(key, value) => { + request.metadata_mut().append_bin(key, value.to_owned()) + } + }; + } + } + + log_exporter + .to_owned() + .export(request) + .await + .map_err::(Into::into)?; + + Ok(()) + } + + #[cfg(feature = "http-proto")] + LogExporter::Http { + log_exporter, + collector_endpoint, + headers, + .. + } => { + let req = ProstRequest { + resource_logs: batch.into_iter().map(Into::into).collect(), + }; + + let mut buf = vec![]; + req.encode(&mut buf) + .map_err::(Into::into)?; + + let mut request = http::Request::builder() + .method(Method::POST) + .uri(collector_endpoint.clone()) + .header(CONTENT_TYPE, "application/x-protobuf") + .body(buf) + .map_err::(Into::into)?; + + if let Some(headers) = headers.clone() { + for (k, val) in headers { + let value = HeaderValue::from_str(val.as_ref()) + .map_err::(Into::into)?; + let key = + HeaderName::try_from(&k).map_err::(Into::into)?; + request.headers_mut().insert(key, value); + } + } + + if let Some(client) = log_exporter { + client.send(request).await?; + Ok(()) + } else { + Err(crate::Error::NoHttpClient.into()) + } + } + } + } +} + +/// Recommended configuration for an OTLP exporter pipeline. +#[derive(Default, Debug)] +pub struct OtlpLogPipeline { + exporter_builder: Option, + log_config: Option, +} + +impl OtlpLogPipeline { + /// Set the OTLP log exporter builder. + pub fn with_exporter>(mut self, pipeline: B) -> Self { + self.exporter_builder = Some(pipeline.into()); + self + } + + /// Returns a [`LogEmitter`] with the name `opentelemetry-otlp` and the + /// current crate version, using the configured log exporter. + /// + /// [`LogEmitter`]: opentelemetry::log::LogEmitter + pub fn simple(self) -> Result { + Ok(build_simple_with_exporter( + self.exporter_builder + .ok_or(crate::Error::NoExporterBuilder)? + .build_log_exporter()?, + self.log_config, + )) + } + + /// Returns a [`LogEmitter`] with the name `opentelemetry-otlp` and the + /// current crate version, using the configured log exporter and a + /// batch log processor. + /// + /// [`LogEmitter`]: opentelemetry::log::LogEmitter + pub fn batch(self, runtime: R) -> Result { + Ok(build_batch_with_exporter( + self.exporter_builder + .ok_or(crate::Error::NoExporterBuilder)? + .build_log_exporter()?, + self.log_config, + runtime, + )) + } +} + +fn build_simple_with_exporter( + exporter: LogExporter, + log_config: Option, +) -> sdk::log::LogEmitter { + let mut provider_builder = + sdk::log::LogEmitterProvider::builder().with_simple_exporter(exporter); + if let Some(config) = log_config { + provider_builder = provider_builder.with_config(config); + } + let provider = provider_builder.build(); + provider.versioned_log_emitter("opentelemetry-otlp", Some(env!("CARGO_PKG_VERSION"))) +} + +fn build_batch_with_exporter( + exporter: LogExporter, + log_config: Option, + runtime: R, +) -> sdk::log::LogEmitter { + let mut provider_builder = + sdk::log::LogEmitterProvider::builder().with_batch_exporter(exporter, runtime); + if let Some(config) = log_config { + provider_builder = provider_builder.with_config(config); + } + let provider = provider_builder.build(); + provider.versioned_log_emitter("opentelemetry-otlp", Some(env!("CARGO_PKG_VERSION"))) +} diff --git a/opentelemetry-proto/Cargo.toml b/opentelemetry-proto/Cargo.toml index 8bb231fc86..cb18ea6d41 100644 --- a/opentelemetry-proto/Cargo.toml +++ b/opentelemetry-proto/Cargo.toml @@ -47,7 +47,7 @@ grpcio = { version = "0.12", optional = true } tonic = { version = "0.9.0", optional = true } prost = { version = "0.11.0", optional = true } protobuf = { version = "2.18", optional = true } # todo: update to 3.0 so we have docs for generated types. -opentelemetry = { version = "0.19", default-features = false, features = ["trace", "metrics"], path = "../opentelemetry" } +opentelemetry = { version = "0.19", default-features = false, features = ["trace", "metrics", "log"], path = "../opentelemetry" } futures = { version = "0.3", default-features = false, features = ["std"] } futures-util = { version = "0.3", default-features = false, features = ["std"] } serde = { version = "1.0", optional = true } diff --git a/opentelemetry-proto/src/proto.rs b/opentelemetry-proto/src/proto.rs index 5a16c8298b..dd8f0d99aa 100644 --- a/opentelemetry-proto/src/proto.rs +++ b/opentelemetry-proto/src/proto.rs @@ -72,6 +72,12 @@ pub mod tonic { /// Generated files using [`grpcio`](https://docs.rs/crate/grpcio) and [`protobuf`](https://docs.rs/crate/protobuf/latest) pub mod grpcio { pub mod common; + #[cfg(feature = "logs")] + pub mod logs; + #[cfg(feature = "logs")] + pub mod logs_service; + #[cfg(feature = "logs")] + pub mod logs_service_grpc; #[cfg(feature = "metrics")] pub mod metrics; #[cfg(feature = "metrics")] diff --git a/opentelemetry-proto/src/proto/grpcio/logs.rs b/opentelemetry-proto/src/proto/grpcio/logs.rs new file mode 100644 index 0000000000..ea9cf72748 --- /dev/null +++ b/opentelemetry-proto/src/proto/grpcio/logs.rs @@ -0,0 +1,1317 @@ +// This file is generated by rust-protobuf 2.27.1. Do not edit +// @generated + +// https://github.com/rust-lang/rust-clippy/issues/702 +#![allow(unknown_lints)] +#![allow(clippy::all)] + +#![allow(unused_attributes)] +#![cfg_attr(rustfmt, rustfmt::skip)] + +#![allow(box_pointers)] +#![allow(dead_code)] +#![allow(missing_docs)] +#![allow(non_camel_case_types)] +#![allow(non_snake_case)] +#![allow(non_upper_case_globals)] +#![allow(trivial_casts)] +#![allow(unused_imports)] +#![allow(unused_results)] +//! Generated file from `opentelemetry/proto/logs/v1/logs.proto` + +/// Generated files are compatible only with the same version +/// of protobuf runtime. +// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_27_1; + +#[derive(PartialEq,Clone,Default)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub struct ResourceLogs { + // message fields + pub resource: ::protobuf::SingularPtrField, + pub instrumentation_library_logs: ::protobuf::RepeatedField, + pub schema_url: ::std::string::String, + // special fields + #[cfg_attr(feature = "with-serde", serde(skip))] + pub unknown_fields: ::protobuf::UnknownFields, + #[cfg_attr(feature = "with-serde", serde(skip))] + pub cached_size: ::protobuf::CachedSize, +} + +impl<'a> ::std::default::Default for &'a ResourceLogs { + fn default() -> &'a ResourceLogs { + ::default_instance() + } +} + +impl ResourceLogs { + pub fn new() -> ResourceLogs { + ::std::default::Default::default() + } + + // .opentelemetry.proto.resource.v1.Resource resource = 1; + + + pub fn get_resource(&self) -> &super::resource::Resource { + self.resource.as_ref().unwrap_or_else(|| ::default_instance()) + } + pub fn clear_resource(&mut self) { + self.resource.clear(); + } + + pub fn has_resource(&self) -> bool { + self.resource.is_some() + } + + // Param is passed by value, moved + pub fn set_resource(&mut self, v: super::resource::Resource) { + self.resource = ::protobuf::SingularPtrField::some(v); + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_resource(&mut self) -> &mut super::resource::Resource { + if self.resource.is_none() { + self.resource.set_default(); + } + self.resource.as_mut().unwrap() + } + + // Take field + pub fn take_resource(&mut self) -> super::resource::Resource { + self.resource.take().unwrap_or_else(|| super::resource::Resource::new()) + } + + // repeated .opentelemetry.proto.logs.v1.InstrumentationLibraryLogs instrumentation_library_logs = 2; + + + pub fn get_instrumentation_library_logs(&self) -> &[InstrumentationLibraryLogs] { + &self.instrumentation_library_logs + } + pub fn clear_instrumentation_library_logs(&mut self) { + self.instrumentation_library_logs.clear(); + } + + // Param is passed by value, moved + pub fn set_instrumentation_library_logs(&mut self, v: ::protobuf::RepeatedField) { + self.instrumentation_library_logs = v; + } + + // Mutable pointer to the field. + pub fn mut_instrumentation_library_logs(&mut self) -> &mut ::protobuf::RepeatedField { + &mut self.instrumentation_library_logs + } + + // Take field + pub fn take_instrumentation_library_logs(&mut self) -> ::protobuf::RepeatedField { + ::std::mem::replace(&mut self.instrumentation_library_logs, ::protobuf::RepeatedField::new()) + } + + // string schema_url = 3; + + + pub fn get_schema_url(&self) -> &str { + &self.schema_url + } + pub fn clear_schema_url(&mut self) { + self.schema_url.clear(); + } + + // Param is passed by value, moved + pub fn set_schema_url(&mut self, v: ::std::string::String) { + self.schema_url = v; + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_schema_url(&mut self) -> &mut ::std::string::String { + &mut self.schema_url + } + + // Take field + pub fn take_schema_url(&mut self) -> ::std::string::String { + ::std::mem::replace(&mut self.schema_url, ::std::string::String::new()) + } +} + +impl ::protobuf::Message for ResourceLogs { + fn is_initialized(&self) -> bool { + for v in &self.resource { + if !v.is_initialized() { + return false; + } + }; + for v in &self.instrumentation_library_logs { + if !v.is_initialized() { + return false; + } + }; + true + } + + fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { + while !is.eof()? { + let (field_number, wire_type) = is.read_tag_unpack()?; + match field_number { + 1 => { + ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.resource)?; + }, + 2 => { + ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.instrumentation_library_logs)?; + }, + 3 => { + ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.schema_url)?; + }, + _ => { + ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; + }, + }; + } + ::std::result::Result::Ok(()) + } + + // Compute sizes of nested messages + #[allow(unused_variables)] + fn compute_size(&self) -> u32 { + let mut my_size = 0; + if let Some(ref v) = self.resource.as_ref() { + let len = v.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + } + for value in &self.instrumentation_library_logs { + let len = value.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + }; + if !self.schema_url.is_empty() { + my_size += ::protobuf::rt::string_size(3, &self.schema_url); + } + my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); + self.cached_size.set(my_size); + my_size + } + + fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + if let Some(ref v) = self.resource.as_ref() { + os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + } + for v in &self.instrumentation_library_logs { + os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + }; + if !self.schema_url.is_empty() { + os.write_string(3, &self.schema_url)?; + } + os.write_unknown_fields(self.get_unknown_fields())?; + ::std::result::Result::Ok(()) + } + + fn get_cached_size(&self) -> u32 { + self.cached_size.get() + } + + fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { + &self.unknown_fields + } + + fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { + &mut self.unknown_fields + } + + fn as_any(&self) -> &dyn (::std::any::Any) { + self as &dyn (::std::any::Any) + } + fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { + self as &mut dyn (::std::any::Any) + } + fn into_any(self: ::std::boxed::Box) -> ::std::boxed::Box { + self + } + + fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { + Self::descriptor_static() + } + + fn new() -> ResourceLogs { + ResourceLogs::new() + } + + fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + let mut fields = ::std::vec::Vec::new(); + fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "resource", + |m: &ResourceLogs| { &m.resource }, + |m: &mut ResourceLogs| { &mut m.resource }, + )); + fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "instrumentation_library_logs", + |m: &ResourceLogs| { &m.instrumentation_library_logs }, + |m: &mut ResourceLogs| { &mut m.instrumentation_library_logs }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( + "schema_url", + |m: &ResourceLogs| { &m.schema_url }, + |m: &mut ResourceLogs| { &mut m.schema_url }, + )); + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "ResourceLogs", + fields, + file_descriptor_proto() + ) + }) + } + + fn default_instance() -> &'static ResourceLogs { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(ResourceLogs::new) + } +} + +impl ::protobuf::Clear for ResourceLogs { + fn clear(&mut self) { + self.resource.clear(); + self.instrumentation_library_logs.clear(); + self.schema_url.clear(); + self.unknown_fields.clear(); + } +} + +impl ::std::fmt::Debug for ResourceLogs { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::protobuf::text_format::fmt(self, f) + } +} + +impl ::protobuf::reflect::ProtobufValue for ResourceLogs { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Message(self) + } +} + +#[derive(PartialEq,Clone,Default)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub struct InstrumentationLibraryLogs { + // message fields + pub instrumentation_library: ::protobuf::SingularPtrField, + pub logs: ::protobuf::RepeatedField, + pub schema_url: ::std::string::String, + // special fields + #[cfg_attr(feature = "with-serde", serde(skip))] + pub unknown_fields: ::protobuf::UnknownFields, + #[cfg_attr(feature = "with-serde", serde(skip))] + pub cached_size: ::protobuf::CachedSize, +} + +impl<'a> ::std::default::Default for &'a InstrumentationLibraryLogs { + fn default() -> &'a InstrumentationLibraryLogs { + ::default_instance() + } +} + +impl InstrumentationLibraryLogs { + pub fn new() -> InstrumentationLibraryLogs { + ::std::default::Default::default() + } + + // .opentelemetry.proto.common.v1.InstrumentationLibrary instrumentation_library = 1; + + + pub fn get_instrumentation_library(&self) -> &super::common::InstrumentationLibrary { + self.instrumentation_library.as_ref().unwrap_or_else(|| ::default_instance()) + } + pub fn clear_instrumentation_library(&mut self) { + self.instrumentation_library.clear(); + } + + pub fn has_instrumentation_library(&self) -> bool { + self.instrumentation_library.is_some() + } + + // Param is passed by value, moved + pub fn set_instrumentation_library(&mut self, v: super::common::InstrumentationLibrary) { + self.instrumentation_library = ::protobuf::SingularPtrField::some(v); + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_instrumentation_library(&mut self) -> &mut super::common::InstrumentationLibrary { + if self.instrumentation_library.is_none() { + self.instrumentation_library.set_default(); + } + self.instrumentation_library.as_mut().unwrap() + } + + // Take field + pub fn take_instrumentation_library(&mut self) -> super::common::InstrumentationLibrary { + self.instrumentation_library.take().unwrap_or_else(|| super::common::InstrumentationLibrary::new()) + } + + // repeated .opentelemetry.proto.logs.v1.LogRecord logs = 2; + + + pub fn get_logs(&self) -> &[LogRecord] { + &self.logs + } + pub fn clear_logs(&mut self) { + self.logs.clear(); + } + + // Param is passed by value, moved + pub fn set_logs(&mut self, v: ::protobuf::RepeatedField) { + self.logs = v; + } + + // Mutable pointer to the field. + pub fn mut_logs(&mut self) -> &mut ::protobuf::RepeatedField { + &mut self.logs + } + + // Take field + pub fn take_logs(&mut self) -> ::protobuf::RepeatedField { + ::std::mem::replace(&mut self.logs, ::protobuf::RepeatedField::new()) + } + + // string schema_url = 3; + + + pub fn get_schema_url(&self) -> &str { + &self.schema_url + } + pub fn clear_schema_url(&mut self) { + self.schema_url.clear(); + } + + // Param is passed by value, moved + pub fn set_schema_url(&mut self, v: ::std::string::String) { + self.schema_url = v; + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_schema_url(&mut self) -> &mut ::std::string::String { + &mut self.schema_url + } + + // Take field + pub fn take_schema_url(&mut self) -> ::std::string::String { + ::std::mem::replace(&mut self.schema_url, ::std::string::String::new()) + } +} + +impl ::protobuf::Message for InstrumentationLibraryLogs { + fn is_initialized(&self) -> bool { + for v in &self.instrumentation_library { + if !v.is_initialized() { + return false; + } + }; + for v in &self.logs { + if !v.is_initialized() { + return false; + } + }; + true + } + + fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { + while !is.eof()? { + let (field_number, wire_type) = is.read_tag_unpack()?; + match field_number { + 1 => { + ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.instrumentation_library)?; + }, + 2 => { + ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.logs)?; + }, + 3 => { + ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.schema_url)?; + }, + _ => { + ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; + }, + }; + } + ::std::result::Result::Ok(()) + } + + // Compute sizes of nested messages + #[allow(unused_variables)] + fn compute_size(&self) -> u32 { + let mut my_size = 0; + if let Some(ref v) = self.instrumentation_library.as_ref() { + let len = v.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + } + for value in &self.logs { + let len = value.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + }; + if !self.schema_url.is_empty() { + my_size += ::protobuf::rt::string_size(3, &self.schema_url); + } + my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); + self.cached_size.set(my_size); + my_size + } + + fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + if let Some(ref v) = self.instrumentation_library.as_ref() { + os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + } + for v in &self.logs { + os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + }; + if !self.schema_url.is_empty() { + os.write_string(3, &self.schema_url)?; + } + os.write_unknown_fields(self.get_unknown_fields())?; + ::std::result::Result::Ok(()) + } + + fn get_cached_size(&self) -> u32 { + self.cached_size.get() + } + + fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { + &self.unknown_fields + } + + fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { + &mut self.unknown_fields + } + + fn as_any(&self) -> &dyn (::std::any::Any) { + self as &dyn (::std::any::Any) + } + fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { + self as &mut dyn (::std::any::Any) + } + fn into_any(self: ::std::boxed::Box) -> ::std::boxed::Box { + self + } + + fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { + Self::descriptor_static() + } + + fn new() -> InstrumentationLibraryLogs { + InstrumentationLibraryLogs::new() + } + + fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + let mut fields = ::std::vec::Vec::new(); + fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "instrumentation_library", + |m: &InstrumentationLibraryLogs| { &m.instrumentation_library }, + |m: &mut InstrumentationLibraryLogs| { &mut m.instrumentation_library }, + )); + fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "logs", + |m: &InstrumentationLibraryLogs| { &m.logs }, + |m: &mut InstrumentationLibraryLogs| { &mut m.logs }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( + "schema_url", + |m: &InstrumentationLibraryLogs| { &m.schema_url }, + |m: &mut InstrumentationLibraryLogs| { &mut m.schema_url }, + )); + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "InstrumentationLibraryLogs", + fields, + file_descriptor_proto() + ) + }) + } + + fn default_instance() -> &'static InstrumentationLibraryLogs { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(InstrumentationLibraryLogs::new) + } +} + +impl ::protobuf::Clear for InstrumentationLibraryLogs { + fn clear(&mut self) { + self.instrumentation_library.clear(); + self.logs.clear(); + self.schema_url.clear(); + self.unknown_fields.clear(); + } +} + +impl ::std::fmt::Debug for InstrumentationLibraryLogs { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::protobuf::text_format::fmt(self, f) + } +} + +impl ::protobuf::reflect::ProtobufValue for InstrumentationLibraryLogs { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Message(self) + } +} + +#[derive(PartialEq,Clone,Default)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub struct LogRecord { + // message fields + pub time_unix_nano: u64, + pub severity_number: SeverityNumber, + pub severity_text: ::std::string::String, + pub name: ::std::string::String, + pub body: ::protobuf::SingularPtrField, + pub attributes: ::protobuf::RepeatedField, + pub dropped_attributes_count: u32, + pub flags: u32, + pub trace_id: ::std::vec::Vec, + pub span_id: ::std::vec::Vec, + // special fields + #[cfg_attr(feature = "with-serde", serde(skip))] + pub unknown_fields: ::protobuf::UnknownFields, + #[cfg_attr(feature = "with-serde", serde(skip))] + pub cached_size: ::protobuf::CachedSize, +} + +impl<'a> ::std::default::Default for &'a LogRecord { + fn default() -> &'a LogRecord { + ::default_instance() + } +} + +impl LogRecord { + pub fn new() -> LogRecord { + ::std::default::Default::default() + } + + // fixed64 time_unix_nano = 1; + + + pub fn get_time_unix_nano(&self) -> u64 { + self.time_unix_nano + } + pub fn clear_time_unix_nano(&mut self) { + self.time_unix_nano = 0; + } + + // Param is passed by value, moved + pub fn set_time_unix_nano(&mut self, v: u64) { + self.time_unix_nano = v; + } + + // .opentelemetry.proto.logs.v1.SeverityNumber severity_number = 2; + + + pub fn get_severity_number(&self) -> SeverityNumber { + self.severity_number + } + pub fn clear_severity_number(&mut self) { + self.severity_number = SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED; + } + + // Param is passed by value, moved + pub fn set_severity_number(&mut self, v: SeverityNumber) { + self.severity_number = v; + } + + // string severity_text = 3; + + + pub fn get_severity_text(&self) -> &str { + &self.severity_text + } + pub fn clear_severity_text(&mut self) { + self.severity_text.clear(); + } + + // Param is passed by value, moved + pub fn set_severity_text(&mut self, v: ::std::string::String) { + self.severity_text = v; + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_severity_text(&mut self) -> &mut ::std::string::String { + &mut self.severity_text + } + + // Take field + pub fn take_severity_text(&mut self) -> ::std::string::String { + ::std::mem::replace(&mut self.severity_text, ::std::string::String::new()) + } + + // string name = 4; + + + pub fn get_name(&self) -> &str { + &self.name + } + pub fn clear_name(&mut self) { + self.name.clear(); + } + + // Param is passed by value, moved + pub fn set_name(&mut self, v: ::std::string::String) { + self.name = v; + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_name(&mut self) -> &mut ::std::string::String { + &mut self.name + } + + // Take field + pub fn take_name(&mut self) -> ::std::string::String { + ::std::mem::replace(&mut self.name, ::std::string::String::new()) + } + + // .opentelemetry.proto.common.v1.AnyValue body = 5; + + + pub fn get_body(&self) -> &super::common::AnyValue { + self.body.as_ref().unwrap_or_else(|| ::default_instance()) + } + pub fn clear_body(&mut self) { + self.body.clear(); + } + + pub fn has_body(&self) -> bool { + self.body.is_some() + } + + // Param is passed by value, moved + pub fn set_body(&mut self, v: super::common::AnyValue) { + self.body = ::protobuf::SingularPtrField::some(v); + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_body(&mut self) -> &mut super::common::AnyValue { + if self.body.is_none() { + self.body.set_default(); + } + self.body.as_mut().unwrap() + } + + // Take field + pub fn take_body(&mut self) -> super::common::AnyValue { + self.body.take().unwrap_or_else(|| super::common::AnyValue::new()) + } + + // repeated .opentelemetry.proto.common.v1.KeyValue attributes = 6; + + + pub fn get_attributes(&self) -> &[super::common::KeyValue] { + &self.attributes + } + pub fn clear_attributes(&mut self) { + self.attributes.clear(); + } + + // Param is passed by value, moved + pub fn set_attributes(&mut self, v: ::protobuf::RepeatedField) { + self.attributes = v; + } + + // Mutable pointer to the field. + pub fn mut_attributes(&mut self) -> &mut ::protobuf::RepeatedField { + &mut self.attributes + } + + // Take field + pub fn take_attributes(&mut self) -> ::protobuf::RepeatedField { + ::std::mem::replace(&mut self.attributes, ::protobuf::RepeatedField::new()) + } + + // uint32 dropped_attributes_count = 7; + + + pub fn get_dropped_attributes_count(&self) -> u32 { + self.dropped_attributes_count + } + pub fn clear_dropped_attributes_count(&mut self) { + self.dropped_attributes_count = 0; + } + + // Param is passed by value, moved + pub fn set_dropped_attributes_count(&mut self, v: u32) { + self.dropped_attributes_count = v; + } + + // fixed32 flags = 8; + + + pub fn get_flags(&self) -> u32 { + self.flags + } + pub fn clear_flags(&mut self) { + self.flags = 0; + } + + // Param is passed by value, moved + pub fn set_flags(&mut self, v: u32) { + self.flags = v; + } + + // bytes trace_id = 9; + + + pub fn get_trace_id(&self) -> &[u8] { + &self.trace_id + } + pub fn clear_trace_id(&mut self) { + self.trace_id.clear(); + } + + // Param is passed by value, moved + pub fn set_trace_id(&mut self, v: ::std::vec::Vec) { + self.trace_id = v; + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_trace_id(&mut self) -> &mut ::std::vec::Vec { + &mut self.trace_id + } + + // Take field + pub fn take_trace_id(&mut self) -> ::std::vec::Vec { + ::std::mem::replace(&mut self.trace_id, ::std::vec::Vec::new()) + } + + // bytes span_id = 10; + + + pub fn get_span_id(&self) -> &[u8] { + &self.span_id + } + pub fn clear_span_id(&mut self) { + self.span_id.clear(); + } + + // Param is passed by value, moved + pub fn set_span_id(&mut self, v: ::std::vec::Vec) { + self.span_id = v; + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_span_id(&mut self) -> &mut ::std::vec::Vec { + &mut self.span_id + } + + // Take field + pub fn take_span_id(&mut self) -> ::std::vec::Vec { + ::std::mem::replace(&mut self.span_id, ::std::vec::Vec::new()) + } +} + +impl ::protobuf::Message for LogRecord { + fn is_initialized(&self) -> bool { + for v in &self.body { + if !v.is_initialized() { + return false; + } + }; + for v in &self.attributes { + if !v.is_initialized() { + return false; + } + }; + true + } + + fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { + while !is.eof()? { + let (field_number, wire_type) = is.read_tag_unpack()?; + match field_number { + 1 => { + if wire_type != ::protobuf::wire_format::WireTypeFixed64 { + return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); + } + let tmp = is.read_fixed64()?; + self.time_unix_nano = tmp; + }, + 2 => { + ::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.severity_number, 2, &mut self.unknown_fields)? + }, + 3 => { + ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.severity_text)?; + }, + 4 => { + ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?; + }, + 5 => { + ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.body)?; + }, + 6 => { + ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.attributes)?; + }, + 7 => { + if wire_type != ::protobuf::wire_format::WireTypeVarint { + return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); + } + let tmp = is.read_uint32()?; + self.dropped_attributes_count = tmp; + }, + 8 => { + if wire_type != ::protobuf::wire_format::WireTypeFixed32 { + return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); + } + let tmp = is.read_fixed32()?; + self.flags = tmp; + }, + 9 => { + ::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.trace_id)?; + }, + 10 => { + ::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.span_id)?; + }, + _ => { + ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; + }, + }; + } + ::std::result::Result::Ok(()) + } + + // Compute sizes of nested messages + #[allow(unused_variables)] + fn compute_size(&self) -> u32 { + let mut my_size = 0; + if self.time_unix_nano != 0 { + my_size += 9; + } + if self.severity_number != SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED { + my_size += ::protobuf::rt::enum_size(2, self.severity_number); + } + if !self.severity_text.is_empty() { + my_size += ::protobuf::rt::string_size(3, &self.severity_text); + } + if !self.name.is_empty() { + my_size += ::protobuf::rt::string_size(4, &self.name); + } + if let Some(ref v) = self.body.as_ref() { + let len = v.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + } + for value in &self.attributes { + let len = value.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + }; + if self.dropped_attributes_count != 0 { + my_size += ::protobuf::rt::value_size(7, self.dropped_attributes_count, ::protobuf::wire_format::WireTypeVarint); + } + if self.flags != 0 { + my_size += 5; + } + if !self.trace_id.is_empty() { + my_size += ::protobuf::rt::bytes_size(9, &self.trace_id); + } + if !self.span_id.is_empty() { + my_size += ::protobuf::rt::bytes_size(10, &self.span_id); + } + my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); + self.cached_size.set(my_size); + my_size + } + + fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + if self.time_unix_nano != 0 { + os.write_fixed64(1, self.time_unix_nano)?; + } + if self.severity_number != SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED { + os.write_enum(2, ::protobuf::ProtobufEnum::value(&self.severity_number))?; + } + if !self.severity_text.is_empty() { + os.write_string(3, &self.severity_text)?; + } + if !self.name.is_empty() { + os.write_string(4, &self.name)?; + } + if let Some(ref v) = self.body.as_ref() { + os.write_tag(5, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + } + for v in &self.attributes { + os.write_tag(6, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + }; + if self.dropped_attributes_count != 0 { + os.write_uint32(7, self.dropped_attributes_count)?; + } + if self.flags != 0 { + os.write_fixed32(8, self.flags)?; + } + if !self.trace_id.is_empty() { + os.write_bytes(9, &self.trace_id)?; + } + if !self.span_id.is_empty() { + os.write_bytes(10, &self.span_id)?; + } + os.write_unknown_fields(self.get_unknown_fields())?; + ::std::result::Result::Ok(()) + } + + fn get_cached_size(&self) -> u32 { + self.cached_size.get() + } + + fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { + &self.unknown_fields + } + + fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { + &mut self.unknown_fields + } + + fn as_any(&self) -> &dyn (::std::any::Any) { + self as &dyn (::std::any::Any) + } + fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { + self as &mut dyn (::std::any::Any) + } + fn into_any(self: ::std::boxed::Box) -> ::std::boxed::Box { + self + } + + fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { + Self::descriptor_static() + } + + fn new() -> LogRecord { + LogRecord::new() + } + + fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + let mut fields = ::std::vec::Vec::new(); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeFixed64>( + "time_unix_nano", + |m: &LogRecord| { &m.time_unix_nano }, + |m: &mut LogRecord| { &mut m.time_unix_nano }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum>( + "severity_number", + |m: &LogRecord| { &m.severity_number }, + |m: &mut LogRecord| { &mut m.severity_number }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( + "severity_text", + |m: &LogRecord| { &m.severity_text }, + |m: &mut LogRecord| { &mut m.severity_text }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( + "name", + |m: &LogRecord| { &m.name }, + |m: &mut LogRecord| { &mut m.name }, + )); + fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "body", + |m: &LogRecord| { &m.body }, + |m: &mut LogRecord| { &mut m.body }, + )); + fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "attributes", + |m: &LogRecord| { &m.attributes }, + |m: &mut LogRecord| { &mut m.attributes }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint32>( + "dropped_attributes_count", + |m: &LogRecord| { &m.dropped_attributes_count }, + |m: &mut LogRecord| { &mut m.dropped_attributes_count }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeFixed32>( + "flags", + |m: &LogRecord| { &m.flags }, + |m: &mut LogRecord| { &mut m.flags }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>( + "trace_id", + |m: &LogRecord| { &m.trace_id }, + |m: &mut LogRecord| { &mut m.trace_id }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>( + "span_id", + |m: &LogRecord| { &m.span_id }, + |m: &mut LogRecord| { &mut m.span_id }, + )); + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "LogRecord", + fields, + file_descriptor_proto() + ) + }) + } + + fn default_instance() -> &'static LogRecord { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(LogRecord::new) + } +} + +impl ::protobuf::Clear for LogRecord { + fn clear(&mut self) { + self.time_unix_nano = 0; + self.severity_number = SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED; + self.severity_text.clear(); + self.name.clear(); + self.body.clear(); + self.attributes.clear(); + self.dropped_attributes_count = 0; + self.flags = 0; + self.trace_id.clear(); + self.span_id.clear(); + self.unknown_fields.clear(); + } +} + +impl ::std::fmt::Debug for LogRecord { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::protobuf::text_format::fmt(self, f) + } +} + +impl ::protobuf::reflect::ProtobufValue for LogRecord { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Message(self) + } +} + +#[derive(Clone,PartialEq,Eq,Debug,Hash)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub enum SeverityNumber { + SEVERITY_NUMBER_UNSPECIFIED = 0, + SEVERITY_NUMBER_TRACE = 1, + SEVERITY_NUMBER_TRACE2 = 2, + SEVERITY_NUMBER_TRACE3 = 3, + SEVERITY_NUMBER_TRACE4 = 4, + SEVERITY_NUMBER_DEBUG = 5, + SEVERITY_NUMBER_DEBUG2 = 6, + SEVERITY_NUMBER_DEBUG3 = 7, + SEVERITY_NUMBER_DEBUG4 = 8, + SEVERITY_NUMBER_INFO = 9, + SEVERITY_NUMBER_INFO2 = 10, + SEVERITY_NUMBER_INFO3 = 11, + SEVERITY_NUMBER_INFO4 = 12, + SEVERITY_NUMBER_WARN = 13, + SEVERITY_NUMBER_WARN2 = 14, + SEVERITY_NUMBER_WARN3 = 15, + SEVERITY_NUMBER_WARN4 = 16, + SEVERITY_NUMBER_ERROR = 17, + SEVERITY_NUMBER_ERROR2 = 18, + SEVERITY_NUMBER_ERROR3 = 19, + SEVERITY_NUMBER_ERROR4 = 20, + SEVERITY_NUMBER_FATAL = 21, + SEVERITY_NUMBER_FATAL2 = 22, + SEVERITY_NUMBER_FATAL3 = 23, + SEVERITY_NUMBER_FATAL4 = 24, +} + +impl ::protobuf::ProtobufEnum for SeverityNumber { + fn value(&self) -> i32 { + *self as i32 + } + + fn from_i32(value: i32) -> ::std::option::Option { + match value { + 0 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED), + 1 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_TRACE), + 2 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_TRACE2), + 3 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_TRACE3), + 4 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_TRACE4), + 5 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_DEBUG), + 6 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_DEBUG2), + 7 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_DEBUG3), + 8 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_DEBUG4), + 9 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_INFO), + 10 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_INFO2), + 11 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_INFO3), + 12 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_INFO4), + 13 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_WARN), + 14 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_WARN2), + 15 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_WARN3), + 16 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_WARN4), + 17 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_ERROR), + 18 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_ERROR2), + 19 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_ERROR3), + 20 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_ERROR4), + 21 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_FATAL), + 22 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_FATAL2), + 23 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_FATAL3), + 24 => ::std::option::Option::Some(SeverityNumber::SEVERITY_NUMBER_FATAL4), + _ => ::std::option::Option::None + } + } + + fn values() -> &'static [Self] { + static values: &'static [SeverityNumber] = &[ + SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED, + SeverityNumber::SEVERITY_NUMBER_TRACE, + SeverityNumber::SEVERITY_NUMBER_TRACE2, + SeverityNumber::SEVERITY_NUMBER_TRACE3, + SeverityNumber::SEVERITY_NUMBER_TRACE4, + SeverityNumber::SEVERITY_NUMBER_DEBUG, + SeverityNumber::SEVERITY_NUMBER_DEBUG2, + SeverityNumber::SEVERITY_NUMBER_DEBUG3, + SeverityNumber::SEVERITY_NUMBER_DEBUG4, + SeverityNumber::SEVERITY_NUMBER_INFO, + SeverityNumber::SEVERITY_NUMBER_INFO2, + SeverityNumber::SEVERITY_NUMBER_INFO3, + SeverityNumber::SEVERITY_NUMBER_INFO4, + SeverityNumber::SEVERITY_NUMBER_WARN, + SeverityNumber::SEVERITY_NUMBER_WARN2, + SeverityNumber::SEVERITY_NUMBER_WARN3, + SeverityNumber::SEVERITY_NUMBER_WARN4, + SeverityNumber::SEVERITY_NUMBER_ERROR, + SeverityNumber::SEVERITY_NUMBER_ERROR2, + SeverityNumber::SEVERITY_NUMBER_ERROR3, + SeverityNumber::SEVERITY_NUMBER_ERROR4, + SeverityNumber::SEVERITY_NUMBER_FATAL, + SeverityNumber::SEVERITY_NUMBER_FATAL2, + SeverityNumber::SEVERITY_NUMBER_FATAL3, + SeverityNumber::SEVERITY_NUMBER_FATAL4, + ]; + values + } + + fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + ::protobuf::reflect::EnumDescriptor::new_pb_name::("SeverityNumber", file_descriptor_proto()) + }) + } +} + +impl ::std::marker::Copy for SeverityNumber { +} + +impl ::std::default::Default for SeverityNumber { + fn default() -> Self { + SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED + } +} + +impl ::protobuf::reflect::ProtobufValue for SeverityNumber { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self)) + } +} + +#[derive(Clone,PartialEq,Eq,Debug,Hash)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub enum LogRecordFlags { + LOG_RECORD_FLAG_UNSPECIFIED = 0, + LOG_RECORD_FLAG_TRACE_FLAGS_MASK = 255, +} + +impl ::protobuf::ProtobufEnum for LogRecordFlags { + fn value(&self) -> i32 { + *self as i32 + } + + fn from_i32(value: i32) -> ::std::option::Option { + match value { + 0 => ::std::option::Option::Some(LogRecordFlags::LOG_RECORD_FLAG_UNSPECIFIED), + 255 => ::std::option::Option::Some(LogRecordFlags::LOG_RECORD_FLAG_TRACE_FLAGS_MASK), + _ => ::std::option::Option::None + } + } + + fn values() -> &'static [Self] { + static values: &'static [LogRecordFlags] = &[ + LogRecordFlags::LOG_RECORD_FLAG_UNSPECIFIED, + LogRecordFlags::LOG_RECORD_FLAG_TRACE_FLAGS_MASK, + ]; + values + } + + fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + ::protobuf::reflect::EnumDescriptor::new_pb_name::("LogRecordFlags", file_descriptor_proto()) + }) + } +} + +impl ::std::marker::Copy for LogRecordFlags { +} + +impl ::std::default::Default for LogRecordFlags { + fn default() -> Self { + LogRecordFlags::LOG_RECORD_FLAG_UNSPECIFIED + } +} + +impl ::protobuf::reflect::ProtobufValue for LogRecordFlags { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self)) + } +} + +static file_descriptor_proto_data: &'static [u8] = b"\ + \n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.log\ + s.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/pr\ + oto/resource/v1/resource.proto\"\xef\x01\n\x0cResourceLogs\x12E\n\x08res\ + ource\x18\x01\x20\x01(\x0b2).opentelemetry.proto.resource.v1.ResourceR\ + \x08resource\x12y\n\x1cinstrumentation_library_logs\x18\x02\x20\x03(\x0b\ + 27.opentelemetry.proto.logs.v1.InstrumentationLibraryLogsR\x1ainstrument\ + ationLibraryLogs\x12\x1d\n\nschema_url\x18\x03\x20\x01(\tR\tschemaUrl\"\ + \xe7\x01\n\x1aInstrumentationLibraryLogs\x12n\n\x17instrumentation_libra\ + ry\x18\x01\x20\x01(\x0b25.opentelemetry.proto.common.v1.InstrumentationL\ + ibraryR\x16instrumentationLibrary\x12:\n\x04logs\x18\x02\x20\x03(\x0b2&.\ + opentelemetry.proto.logs.v1.LogRecordR\x04logs\x12\x1d\n\nschema_url\x18\ + \x03\x20\x01(\tR\tschemaUrl\"\xca\x03\n\tLogRecord\x12$\n\x0etime_unix_n\ + ano\x18\x01\x20\x01(\x06R\x0ctimeUnixNano\x12T\n\x0fseverity_number\x18\ + \x02\x20\x01(\x0e2+.opentelemetry.proto.logs.v1.SeverityNumberR\x0esever\ + ityNumber\x12#\n\rseverity_text\x18\x03\x20\x01(\tR\x0cseverityText\x12\ + \x12\n\x04name\x18\x04\x20\x01(\tR\x04name\x12;\n\x04body\x18\x05\x20\ + \x01(\x0b2'.opentelemetry.proto.common.v1.AnyValueR\x04body\x12G\n\nattr\ + ibutes\x18\x06\x20\x03(\x0b2'.opentelemetry.proto.common.v1.KeyValueR\na\ + ttributes\x128\n\x18dropped_attributes_count\x18\x07\x20\x01(\rR\x16drop\ + pedAttributesCount\x12\x14\n\x05flags\x18\x08\x20\x01(\x07R\x05flags\x12\ + \x19\n\x08trace_id\x18\t\x20\x01(\x0cR\x07traceId\x12\x17\n\x07span_id\ + \x18\n\x20\x01(\x0cR\x06spanId*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\ + \x1bSEVERITY_NUMBER_UNSPECIFIED\x10\0\x12\x19\n\x15SEVERITY_NUMBER_TRACE\ + \x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERI\ + TY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\ + \x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEB\ + UG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEV\ + ERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\ + \x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\ + \x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERIT\ + Y_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\ + \x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\ + \x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NU\ + MBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\ + \n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\ + \x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERI\ + TY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*X\n\ + \x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAG_UNSPECIFIED\x10\0\x12%\n\ + \x20LOG_RECORD_FLAG_TRACE_FLAGS_MASK\x10\xff\x01Bk\n\x1eio.opentelemetry\ + .proto.logs.v1B\tLogsProtoP\x01Z = ::protobuf::rt::LazyV2::INIT; + +fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto { + ::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap() +} + +pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto { + file_descriptor_proto_lazy.get(|| { + parse_descriptor_proto() + }) +} diff --git a/opentelemetry-proto/src/proto/grpcio/logs_service.rs b/opentelemetry-proto/src/proto/grpcio/logs_service.rs new file mode 100644 index 0000000000..365e7bb2e8 --- /dev/null +++ b/opentelemetry-proto/src/proto/grpcio/logs_service.rs @@ -0,0 +1,337 @@ +// This file is generated by rust-protobuf 2.27.1. Do not edit +// @generated + +// https://github.com/rust-lang/rust-clippy/issues/702 +#![allow(unknown_lints)] +#![allow(clippy::all)] + +#![allow(unused_attributes)] +#![cfg_attr(rustfmt, rustfmt::skip)] + +#![allow(box_pointers)] +#![allow(dead_code)] +#![allow(missing_docs)] +#![allow(non_camel_case_types)] +#![allow(non_snake_case)] +#![allow(non_upper_case_globals)] +#![allow(trivial_casts)] +#![allow(unused_imports)] +#![allow(unused_results)] +//! Generated file from `opentelemetry/proto/collector/logs/v1/logs_service.proto` + +/// Generated files are compatible only with the same version +/// of protobuf runtime. +// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_27_1; + +#[derive(PartialEq,Clone,Default)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub struct ExportLogsServiceRequest { + // message fields + pub resource_logs: ::protobuf::RepeatedField, + // special fields + #[cfg_attr(feature = "with-serde", serde(skip))] + pub unknown_fields: ::protobuf::UnknownFields, + #[cfg_attr(feature = "with-serde", serde(skip))] + pub cached_size: ::protobuf::CachedSize, +} + +impl<'a> ::std::default::Default for &'a ExportLogsServiceRequest { + fn default() -> &'a ExportLogsServiceRequest { + ::default_instance() + } +} + +impl ExportLogsServiceRequest { + pub fn new() -> ExportLogsServiceRequest { + ::std::default::Default::default() + } + + // repeated .opentelemetry.proto.logs.v1.ResourceLogs resource_logs = 1; + + + pub fn get_resource_logs(&self) -> &[super::logs::ResourceLogs] { + &self.resource_logs + } + pub fn clear_resource_logs(&mut self) { + self.resource_logs.clear(); + } + + // Param is passed by value, moved + pub fn set_resource_logs(&mut self, v: ::protobuf::RepeatedField) { + self.resource_logs = v; + } + + // Mutable pointer to the field. + pub fn mut_resource_logs(&mut self) -> &mut ::protobuf::RepeatedField { + &mut self.resource_logs + } + + // Take field + pub fn take_resource_logs(&mut self) -> ::protobuf::RepeatedField { + ::std::mem::replace(&mut self.resource_logs, ::protobuf::RepeatedField::new()) + } +} + +impl ::protobuf::Message for ExportLogsServiceRequest { + fn is_initialized(&self) -> bool { + for v in &self.resource_logs { + if !v.is_initialized() { + return false; + } + }; + true + } + + fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { + while !is.eof()? { + let (field_number, wire_type) = is.read_tag_unpack()?; + match field_number { + 1 => { + ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.resource_logs)?; + }, + _ => { + ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; + }, + }; + } + ::std::result::Result::Ok(()) + } + + // Compute sizes of nested messages + #[allow(unused_variables)] + fn compute_size(&self) -> u32 { + let mut my_size = 0; + for value in &self.resource_logs { + let len = value.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + }; + my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); + self.cached_size.set(my_size); + my_size + } + + fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + for v in &self.resource_logs { + os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + }; + os.write_unknown_fields(self.get_unknown_fields())?; + ::std::result::Result::Ok(()) + } + + fn get_cached_size(&self) -> u32 { + self.cached_size.get() + } + + fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { + &self.unknown_fields + } + + fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { + &mut self.unknown_fields + } + + fn as_any(&self) -> &dyn (::std::any::Any) { + self as &dyn (::std::any::Any) + } + fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { + self as &mut dyn (::std::any::Any) + } + fn into_any(self: ::std::boxed::Box) -> ::std::boxed::Box { + self + } + + fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { + Self::descriptor_static() + } + + fn new() -> ExportLogsServiceRequest { + ExportLogsServiceRequest::new() + } + + fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + let mut fields = ::std::vec::Vec::new(); + fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "resource_logs", + |m: &ExportLogsServiceRequest| { &m.resource_logs }, + |m: &mut ExportLogsServiceRequest| { &mut m.resource_logs }, + )); + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "ExportLogsServiceRequest", + fields, + file_descriptor_proto() + ) + }) + } + + fn default_instance() -> &'static ExportLogsServiceRequest { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(ExportLogsServiceRequest::new) + } +} + +impl ::protobuf::Clear for ExportLogsServiceRequest { + fn clear(&mut self) { + self.resource_logs.clear(); + self.unknown_fields.clear(); + } +} + +impl ::std::fmt::Debug for ExportLogsServiceRequest { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::protobuf::text_format::fmt(self, f) + } +} + +impl ::protobuf::reflect::ProtobufValue for ExportLogsServiceRequest { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Message(self) + } +} + +#[derive(PartialEq,Clone,Default)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub struct ExportLogsServiceResponse { + // special fields + #[cfg_attr(feature = "with-serde", serde(skip))] + pub unknown_fields: ::protobuf::UnknownFields, + #[cfg_attr(feature = "with-serde", serde(skip))] + pub cached_size: ::protobuf::CachedSize, +} + +impl<'a> ::std::default::Default for &'a ExportLogsServiceResponse { + fn default() -> &'a ExportLogsServiceResponse { + ::default_instance() + } +} + +impl ExportLogsServiceResponse { + pub fn new() -> ExportLogsServiceResponse { + ::std::default::Default::default() + } +} + +impl ::protobuf::Message for ExportLogsServiceResponse { + fn is_initialized(&self) -> bool { + true + } + + fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { + while !is.eof()? { + let (field_number, wire_type) = is.read_tag_unpack()?; + match field_number { + _ => { + ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; + }, + }; + } + ::std::result::Result::Ok(()) + } + + // Compute sizes of nested messages + #[allow(unused_variables)] + fn compute_size(&self) -> u32 { + let mut my_size = 0; + my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); + self.cached_size.set(my_size); + my_size + } + + fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + os.write_unknown_fields(self.get_unknown_fields())?; + ::std::result::Result::Ok(()) + } + + fn get_cached_size(&self) -> u32 { + self.cached_size.get() + } + + fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { + &self.unknown_fields + } + + fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { + &mut self.unknown_fields + } + + fn as_any(&self) -> &dyn (::std::any::Any) { + self as &dyn (::std::any::Any) + } + fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { + self as &mut dyn (::std::any::Any) + } + fn into_any(self: ::std::boxed::Box) -> ::std::boxed::Box { + self + } + + fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { + Self::descriptor_static() + } + + fn new() -> ExportLogsServiceResponse { + ExportLogsServiceResponse::new() + } + + fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + let fields = ::std::vec::Vec::new(); + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "ExportLogsServiceResponse", + fields, + file_descriptor_proto() + ) + }) + } + + fn default_instance() -> &'static ExportLogsServiceResponse { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(ExportLogsServiceResponse::new) + } +} + +impl ::protobuf::Clear for ExportLogsServiceResponse { + fn clear(&mut self) { + self.unknown_fields.clear(); + } +} + +impl ::std::fmt::Debug for ExportLogsServiceResponse { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::protobuf::text_format::fmt(self, f) + } +} + +impl ::protobuf::reflect::ProtobufValue for ExportLogsServiceResponse { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Message(self) + } +} + +static file_descriptor_proto_data: &'static [u8] = b"\ + \n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentele\ + metry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\ + \"j\n\x18ExportLogsServiceRequest\x12N\n\rresource_logs\x18\x01\x20\x03(\ + \x0b2).opentelemetry.proto.logs.v1.ResourceLogsR\x0cresourceLogs\"\x1b\n\ + \x19ExportLogsServiceResponse2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\ + \x06Export\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceR\ + equest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRespo\ + nse\"\0B\x86\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServ\ + iceProtoP\x01ZFgithub.com/open-telemetry/opentelemetry-proto/gen/go/coll\ + ector/logs/v1b\x06proto3\ +"; + +static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; + +fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto { + ::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap() +} + +pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto { + file_descriptor_proto_lazy.get(|| { + parse_descriptor_proto() + }) +} diff --git a/opentelemetry-proto/src/proto/grpcio/logs_service_grpc.rs b/opentelemetry-proto/src/proto/grpcio/logs_service_grpc.rs new file mode 100644 index 0000000000..6f91702fa9 --- /dev/null +++ b/opentelemetry-proto/src/proto/grpcio/logs_service_grpc.rs @@ -0,0 +1,69 @@ +// This file is generated. Do not edit +// @generated + +// https://github.com/Manishearth/rust-clippy/issues/702 +#![allow(unknown_lints)] +#![allow(clippy::all)] + +#![allow(box_pointers)] +#![allow(dead_code)] +#![allow(missing_docs)] +#![allow(non_camel_case_types)] +#![allow(non_snake_case)] +#![allow(non_upper_case_globals)] +#![allow(trivial_casts)] +#![allow(unsafe_code)] +#![allow(unused_imports)] +#![allow(unused_results)] + +const METHOD_LOGS_SERVICE_EXPORT: ::grpcio::Method = ::grpcio::Method { + ty: ::grpcio::MethodType::Unary, + name: "/opentelemetry.proto.collector.logs.v1.LogsService/Export", + req_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de }, + resp_mar: ::grpcio::Marshaller { ser: ::grpcio::pb_ser, de: ::grpcio::pb_de }, +}; + +#[derive(Clone)] +pub struct LogsServiceClient { + client: ::grpcio::Client, +} + +impl LogsServiceClient { + pub fn new(channel: ::grpcio::Channel) -> Self { + LogsServiceClient { + client: ::grpcio::Client::new(channel), + } + } + + pub fn export_opt(&self, req: &super::logs_service::ExportLogsServiceRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result { + self.client.unary_call(&METHOD_LOGS_SERVICE_EXPORT, req, opt) + } + + pub fn export(&self, req: &super::logs_service::ExportLogsServiceRequest) -> ::grpcio::Result { + self.export_opt(req, ::grpcio::CallOption::default()) + } + + pub fn export_async_opt(&self, req: &super::logs_service::ExportLogsServiceRequest, opt: ::grpcio::CallOption) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver> { + self.client.unary_call_async(&METHOD_LOGS_SERVICE_EXPORT, req, opt) + } + + pub fn export_async(&self, req: &super::logs_service::ExportLogsServiceRequest) -> ::grpcio::Result<::grpcio::ClientUnaryReceiver> { + self.export_async_opt(req, ::grpcio::CallOption::default()) + } + pub fn spawn(&self, f: F) where F: ::futures::Future + Send + 'static { + self.client.spawn(f) + } +} + +pub trait LogsService { + fn export(&mut self, ctx: ::grpcio::RpcContext, req: super::logs_service::ExportLogsServiceRequest, sink: ::grpcio::UnarySink); +} + +pub fn create_logs_service(s: S) -> ::grpcio::Service { + let mut builder = ::grpcio::ServiceBuilder::new(); + let mut instance = s; + builder = builder.add_unary_handler(&METHOD_LOGS_SERVICE_EXPORT, move |ctx, req, resp| { + instance.export(ctx, req, resp) + }); + builder.build() +} diff --git a/opentelemetry-proto/src/transform/common.rs b/opentelemetry-proto/src/transform/common.rs index 013cd68d78..7c51dfba85 100644 --- a/opentelemetry-proto/src/transform/common.rs +++ b/opentelemetry-proto/src/transform/common.rs @@ -1,7 +1,7 @@ -#[cfg(feature = "traces")] +#[cfg(any(feature = "traces", feature = "logs"))] use std::time::{Duration, SystemTime, UNIX_EPOCH}; -#[cfg(feature = "traces")] +#[cfg(any(feature = "traces", feature = "logs"))] pub(crate) fn to_nanos(time: SystemTime) -> u64 { time.duration_since(UNIX_EPOCH) .unwrap_or_else(|_| Duration::from_secs(0)) @@ -13,7 +13,10 @@ pub mod tonic { use crate::proto::tonic::common::v1::{ any_value, AnyValue, ArrayValue, InstrumentationScope, KeyValue, }; - use opentelemetry::{sdk::trace::EvictedHashMap, Array, Value}; + use opentelemetry::{ + sdk::{trace::EvictedHashMap, Resource}, + Array, Value, + }; use std::borrow::Cow; impl From for InstrumentationScope { @@ -102,6 +105,18 @@ pub mod tonic { ArrayValue { values } } + + #[cfg(any(feature = "traces", feature = "logs"))] + pub(crate) fn resource_attributes(resource: Option<&Resource>) -> Attributes { + resource + .map(|res| { + res.iter() + .map(|(k, v)| opentelemetry::KeyValue::new(k.clone(), v.clone())) + .collect::>() + }) + .unwrap_or_default() + .into() + } } #[cfg(feature = "gen-protoc")] @@ -188,4 +203,17 @@ pub mod grpcio { array_value.set_values(values); array_value } + + #[cfg(any(feature = "traces", feature = "logs"))] + pub(crate) fn resource_attributes(resource: Option<&Resource>) -> Attributes { + resource + .map(|resource| { + resource + .iter() + .map(|(k, v)| opentelemetry::KeyValue::new(k.clone(), v.clone())) + .collect::>() + }) + .unwrap_or_default() + .into() + } } diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs new file mode 100644 index 0000000000..ac8e7b47ab --- /dev/null +++ b/opentelemetry-proto/src/transform/logs.rs @@ -0,0 +1,333 @@ +use crate::transform::common::to_nanos; +use std::borrow::Cow; + +#[cfg(feature = "gen-tonic")] +pub mod tonic { + use std::collections::BTreeMap; + + use opentelemetry::sdk::log::{Any, Severity}; + + use crate::{ + tonic::{ + common::v1::{any_value::Value, AnyValue, ArrayValue, KeyValue, KeyValueList}, + logs::v1::{InstrumentationLibraryLogs, LogRecord, ResourceLogs, SeverityNumber}, + resource::v1::Resource, + }, + transform::common::tonic::resource_attributes, + }; + + use super::*; + + impl From for Value { + fn from(value: Any) -> Self { + match value { + Any::Double(f) => Value::DoubleValue(f), + Any::Int(i) => Value::IntValue(i), + Any::String(s) => Value::StringValue(s), + Any::Boolean(b) => Value::BoolValue(b), + Any::ListAny(v) => Value::ArrayValue(ArrayValue { + values: v + .into_iter() + .map(|v| AnyValue { + value: Some(v.into()), + }) + .collect(), + }), + Any::Map(m) => Value::KvlistValue(KeyValueList { + values: m + .into_iter() + .map(|(key, value)| KeyValue { + key: key.into_owned(), + value: Some(AnyValue { + value: Some(value.into()), + }), + }) + .collect(), + }), + Any::Bytes(v) => Value::BytesValue(v), + } + } + } + + impl From for SeverityNumber { + fn from(number: Severity) -> Self { + match number { + Severity::Trace => SeverityNumber::Trace, + Severity::Trace2 => SeverityNumber::Trace2, + Severity::Trace3 => SeverityNumber::Trace3, + Severity::Trace4 => SeverityNumber::Trace4, + Severity::Debug => SeverityNumber::Debug, + Severity::Debug2 => SeverityNumber::Debug2, + Severity::Debug3 => SeverityNumber::Debug3, + Severity::Debug4 => SeverityNumber::Debug4, + Severity::Info => SeverityNumber::Info, + Severity::Info2 => SeverityNumber::Info2, + Severity::Info3 => SeverityNumber::Info3, + Severity::Info4 => SeverityNumber::Info4, + Severity::Warn => SeverityNumber::Warn, + Severity::Warn2 => SeverityNumber::Warn2, + Severity::Warn3 => SeverityNumber::Warn3, + Severity::Warn4 => SeverityNumber::Warn4, + Severity::Error => SeverityNumber::Error, + Severity::Error2 => SeverityNumber::Error2, + Severity::Error3 => SeverityNumber::Error3, + Severity::Error4 => SeverityNumber::Error4, + Severity::Fatal => SeverityNumber::Fatal, + Severity::Fatal2 => SeverityNumber::Fatal2, + Severity::Fatal3 => SeverityNumber::Fatal3, + Severity::Fatal4 => SeverityNumber::Fatal4, + } + } + } + + fn attributes_to_keyvalue(attributes: BTreeMap, Any>) -> Vec { + attributes + .into_iter() + .map(|(key, value)| KeyValue { + key: key.into_owned(), + value: Some(AnyValue { + value: Some(value.into()), + }), + }) + .collect() + } + + impl From for LogRecord { + fn from(log_record: opentelemetry::sdk::log::LogRecord) -> Self { + let trace_context = log_record.trace_context.as_ref(); + + let record = LogRecord { + time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or(0), + severity_number: log_record + .severity_number + .map(SeverityNumber::from) + .map(Into::into) + .unwrap_or_default(), + severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), + name: log_record.name.map(Into::into).unwrap_or(String::from("")), + body: Some(AnyValue { + value: log_record.body.map(Into::into), + }), + attributes: log_record + .attributes + .map(attributes_to_keyvalue) + .unwrap_or_default(), + dropped_attributes_count: 0, + flags: trace_context + .map(|ctx| { + ctx.trace_flags + .map(|flags| flags.to_u8() as u32) + .unwrap_or_default() + }) + .unwrap_or_default(), + span_id: trace_context + .map(|ctx| ctx.span_id.to_bytes().to_vec()) + .unwrap_or_default(), + trace_id: trace_context + .map(|ctx| ctx.trace_id.to_bytes().to_vec()) + .unwrap_or_default(), + }; + record + } + } + + impl From for ResourceLogs { + fn from(resource_log: opentelemetry::sdk::export::log::ResourceLog) -> Self { + ResourceLogs { + resource: Some(Resource { + attributes: resource_attributes( + resource_log.resource.as_ref().map(AsRef::as_ref), + ) + .0, + dropped_attributes_count: 0, + }), + schema_url: "".to_string(), + instrumentation_library_logs: vec![InstrumentationLibraryLogs { + schema_url: resource_log + .instrumentation + .schema_url + .clone() + .map(Into::into) + .unwrap_or_default(), + instrumentation_library: Some(resource_log.instrumentation.into()), + logs: vec![resource_log.record.into()], + }], + } + } + } +} + +#[cfg(feature = "gen-protoc")] +pub mod grpcio { + use std::collections::BTreeMap; + + use crate::{ + proto::grpcio::{ + common::{AnyValue, AnyValue_oneof_value, ArrayValue, KeyValue, KeyValueList}, + logs::{InstrumentationLibraryLogs, LogRecord, ResourceLogs, SeverityNumber}, + resource::Resource, + }, + transform::common::grpcio::resource_attributes, + }; + use opentelemetry::sdk::log::{Any, Severity}; + use protobuf::{RepeatedField, SingularPtrField}; + + use super::*; + + impl From for AnyValue_oneof_value { + fn from(value: Any) -> Self { + match value { + Any::Double(f) => AnyValue_oneof_value::double_value(f), + Any::Int(i) => AnyValue_oneof_value::int_value(i), + Any::String(s) => AnyValue_oneof_value::string_value(s), + Any::Boolean(b) => AnyValue_oneof_value::bool_value(b), + Any::ListAny(v) => AnyValue_oneof_value::array_value(ArrayValue { + values: RepeatedField::from_vec( + v.into_iter() + .map(|v| AnyValue { + value: Some(v.into()), + ..Default::default() + }) + .collect(), + ), + ..Default::default() + }), + Any::Map(m) => AnyValue_oneof_value::kvlist_value(KeyValueList { + values: RepeatedField::from_vec( + m.into_iter() + .map(|(key, value)| KeyValue { + key: key.into_owned(), + value: SingularPtrField::some(AnyValue { + value: Some(value.into()), + ..Default::default() + }), + ..Default::default() + }) + .collect(), + ), + ..Default::default() + }), + Any::Bytes(v) => AnyValue_oneof_value::bytes_value(v), + } + } + } + + impl From for SeverityNumber { + fn from(number: Severity) -> Self { + match number { + Severity::Trace => SeverityNumber::SEVERITY_NUMBER_TRACE, + Severity::Trace2 => SeverityNumber::SEVERITY_NUMBER_TRACE2, + Severity::Trace3 => SeverityNumber::SEVERITY_NUMBER_TRACE3, + Severity::Trace4 => SeverityNumber::SEVERITY_NUMBER_TRACE4, + Severity::Debug => SeverityNumber::SEVERITY_NUMBER_DEBUG, + Severity::Debug2 => SeverityNumber::SEVERITY_NUMBER_DEBUG2, + Severity::Debug3 => SeverityNumber::SEVERITY_NUMBER_DEBUG3, + Severity::Debug4 => SeverityNumber::SEVERITY_NUMBER_DEBUG4, + Severity::Info => SeverityNumber::SEVERITY_NUMBER_INFO, + Severity::Info2 => SeverityNumber::SEVERITY_NUMBER_INFO2, + Severity::Info3 => SeverityNumber::SEVERITY_NUMBER_INFO3, + Severity::Info4 => SeverityNumber::SEVERITY_NUMBER_INFO4, + Severity::Warn => SeverityNumber::SEVERITY_NUMBER_WARN, + Severity::Warn2 => SeverityNumber::SEVERITY_NUMBER_WARN2, + Severity::Warn3 => SeverityNumber::SEVERITY_NUMBER_WARN3, + Severity::Warn4 => SeverityNumber::SEVERITY_NUMBER_WARN4, + Severity::Error => SeverityNumber::SEVERITY_NUMBER_ERROR, + Severity::Error2 => SeverityNumber::SEVERITY_NUMBER_ERROR2, + Severity::Error3 => SeverityNumber::SEVERITY_NUMBER_ERROR3, + Severity::Error4 => SeverityNumber::SEVERITY_NUMBER_ERROR4, + Severity::Fatal => SeverityNumber::SEVERITY_NUMBER_FATAL, + Severity::Fatal2 => SeverityNumber::SEVERITY_NUMBER_FATAL2, + Severity::Fatal3 => SeverityNumber::SEVERITY_NUMBER_FATAL3, + Severity::Fatal4 => SeverityNumber::SEVERITY_NUMBER_FATAL4, + } + } + } + + fn attributes_to_keyvalue(attributes: BTreeMap, Any>) -> Vec { + attributes + .into_iter() + .map(|(key, value)| KeyValue { + key: key.into_owned(), + value: SingularPtrField::some(AnyValue { + value: Some(value.into()), + ..Default::default() + }), + ..Default::default() + }) + .collect() + } + + impl From for LogRecord { + fn from(log_record: opentelemetry::sdk::log::LogRecord) -> Self { + let trace_context = log_record.trace_context.as_ref(); + + LogRecord { + time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or(0), + severity_number: log_record + .severity_number + .map(SeverityNumber::from) + .map(Into::into) + .unwrap_or_default(), + severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), + name: log_record.name.map(Into::into).unwrap_or(String::from("")), + body: SingularPtrField::some(AnyValue { + value: log_record.body.map(Into::into), + ..Default::default() + }), + attributes: RepeatedField::from_vec( + log_record + .attributes + .map(attributes_to_keyvalue) + .unwrap_or_default(), + ), + dropped_attributes_count: 0, + flags: trace_context + .map(|ctx| { + ctx.trace_flags + .map(|flags| flags.to_u8() as u32) + .unwrap_or_default() + }) + .unwrap_or_default(), + span_id: trace_context + .map(|ctx| ctx.span_id.to_bytes().to_vec()) + .unwrap_or_default(), + trace_id: trace_context + .map(|ctx| ctx.trace_id.to_bytes().to_vec()) + .unwrap_or_default(), + ..Default::default() + } + } + } + + impl From for ResourceLogs { + fn from(resource_log: opentelemetry::sdk::export::log::ResourceLog) -> Self { + ResourceLogs { + resource: SingularPtrField::some(Resource { + attributes: resource_attributes( + resource_log.resource.as_ref().map(AsRef::as_ref), + ) + .0, + dropped_attributes_count: 0, + ..Default::default() + }), + schema_url: "".to_string(), + instrumentation_library_logs: RepeatedField::from_vec(vec![ + InstrumentationLibraryLogs { + schema_url: resource_log + .instrumentation + .schema_url + .clone() + .map(Into::into) + .unwrap_or_default(), + instrumentation_library: SingularPtrField::some( + resource_log.instrumentation.into(), + ), + logs: RepeatedField::from_vec(vec![resource_log.record.into()]), + ..Default::default() + }, + ]), + ..Default::default() + } + } + } +} diff --git a/opentelemetry-proto/src/transform/mod.rs b/opentelemetry-proto/src/transform/mod.rs index 985d05d107..36d19281a9 100644 --- a/opentelemetry-proto/src/transform/mod.rs +++ b/opentelemetry-proto/src/transform/mod.rs @@ -6,5 +6,8 @@ pub mod metrics; #[cfg(feature = "traces")] pub mod traces; +#[cfg(feature = "logs")] +pub mod logs; + #[cfg(feature = "zpages")] pub mod tracez; diff --git a/opentelemetry-proto/src/transform/traces.rs b/opentelemetry-proto/src/transform/traces.rs index f91989fa1f..98234c814c 100644 --- a/opentelemetry-proto/src/transform/traces.rs +++ b/opentelemetry-proto/src/transform/traces.rs @@ -1,6 +1,6 @@ use crate::transform::common::to_nanos; -use opentelemetry::sdk::{self, export::trace::SpanData}; -use opentelemetry::trace::{Link, SpanId, SpanKind}; +use opentelemetry::sdk::export::trace::SpanData; +use opentelemetry::trace::{Link, SpanId, SpanKind, Status}; #[cfg(feature = "gen-tonic")] pub mod tonic { @@ -125,7 +125,7 @@ pub mod grpcio { ResourceSpans, ScopeSpans, Span, Span_Event, Span_Link, Span_SpanKind, Status, Status_StatusCode, }; - use crate::transform::common::grpcio::Attributes; + use crate::transform::common::grpcio::{resource_attributes, Attributes}; use opentelemetry::trace; use protobuf::{RepeatedField, SingularPtrField}; diff --git a/opentelemetry-sdk/Cargo.toml b/opentelemetry-sdk/Cargo.toml index 062733a29e..00b8ded225 100644 --- a/opentelemetry-sdk/Cargo.toml +++ b/opentelemetry-sdk/Cargo.toml @@ -46,7 +46,8 @@ default = ["trace"] trace = ["opentelemetry_api/trace", "crossbeam-channel", "rand", "async-trait", "percent-encoding"] jaeger_remote_sampler = ["trace", "opentelemetry-http", "http", "serde", "serde_json", "url"] metrics = ["opentelemetry_api/metrics", "regex", "ordered-float"] -testing = ["opentelemetry_api/testing", "trace", "metrics", "rt-async-std", "rt-tokio", "rt-tokio-current-thread", "tokio/macros", "tokio/rt-multi-thread"] +log = ["opentelemetry_api/log", "crossbeam-channel", "async-trait"] +testing = ["opentelemetry_api/testing", "trace", "metrics", "log", "rt-async-std", "rt-tokio", "rt-tokio-current-thread", "tokio/macros", "tokio/rt-multi-thread"] rt-tokio = ["tokio", "tokio-stream"] rt-tokio-current-thread = ["tokio", "tokio-stream"] rt-async-std = ["async-std"] diff --git a/opentelemetry-sdk/src/export/log/mod.rs b/opentelemetry-sdk/src/export/log/mod.rs new file mode 100644 index 0000000000..53fa37debb --- /dev/null +++ b/opentelemetry-sdk/src/export/log/mod.rs @@ -0,0 +1,33 @@ +//! Log exporters +use crate::{log::LogRecord, Resource}; +use async_trait::async_trait; +use opentelemetry_api::log::LogError; +use opentelemetry_api::{log::LogResult, InstrumentationLibrary}; +use std::{fmt::Debug, sync::Arc}; + +pub mod stdout; + +/// `LogExporter` defines the interface that log exporters should implement. +#[async_trait] +pub trait LogExporter: Send + Debug { + /// Exports a batch of `ResourceLogs`. + async fn export(&mut self, batch: Vec) -> LogResult<()>; + /// Shuts down the expoter. + fn shutdown(&mut self) {} +} + +/// `ResourceLog` associates a [`LogRecord`] with a [`Resource`] and +/// [`InstrumentationLibrary`]. +#[derive(Debug)] +#[non_exhaustive] +pub struct ResourceLog { + /// Log record + pub record: LogRecord, + /// Resource for the emitter who produced this `ResourceLog`. + pub resource: Option>, + /// Instrumentation details for the emitter who produced this `ResourceLog`. + pub instrumentation: InstrumentationLibrary, +} + +/// Describes the result of an export. +pub type ExportResult = Result<(), LogError>; diff --git a/opentelemetry-sdk/src/export/log/stdout.rs b/opentelemetry-sdk/src/export/log/stdout.rs new file mode 100644 index 0000000000..39ee2e5bc5 --- /dev/null +++ b/opentelemetry-sdk/src/export/log/stdout.rs @@ -0,0 +1,158 @@ +//! # Stdout Log Exporter +//! +//! The stdout [`LogExporter`] writes debug printed [`LogRecord`]s to its configured +//! [`Write`] instance. By default it will write to [`Stdout`]. +//! +//! [`LogExporter`]: super::LogExporter +//! [`LogRecord`]: crate::log::LogRecord +//! [`Write`]: std::io::Write +//! [`Stdout`]: std::io::Stdout +//! +//! # Examples +//! +//! ```no_run +//! use opentelemetry_api::global::shutdown_tracer_provider; +//! use opentelemetry_api::trace::Tracer; +//! use opentelemetry_sdk::export::trace::stdout; +//! +//! fn main() { +//! let tracer = stdout::new_pipeline() +//! .with_pretty_print(true) +//! .install_simple(); +//! +//! tracer.in_span("doing_work", |cx| { +//! // Traced app logic here... +//! }); +//! +//! shutdown_tracer_provider(); // sending remaining spans +//! } +//! ``` +use crate::export::{ + log::{ExportResult, LogExporter, ResourceLog}, + ExportError, +}; +use async_trait::async_trait; +use opentelemetry_api::log::LogError; +use std::fmt::Debug; +use std::io::{stdout, Stdout, Write}; + +/// Pipeline builder +#[derive(Debug)] +pub struct PipelineBuilder { + pretty_print: bool, + log_config: Option, + writer: W, +} + +/// Create a new stdout exporter pipeline builder. +pub fn new_pipeline() -> PipelineBuilder { + PipelineBuilder::default() +} + +impl Default for PipelineBuilder { + /// Return the default pipeline builder. + fn default() -> Self { + Self { + pretty_print: false, + log_config: None, + writer: stdout(), + } + } +} + +impl PipelineBuilder { + /// Specify the pretty print setting. + pub fn with_pretty_print(mut self, pretty_print: bool) -> Self { + self.pretty_print = pretty_print; + self + } + + /// Assign the SDK trace configuration. + pub fn with_trace_config(mut self, config: crate::log::Config) -> Self { + self.log_config = Some(config); + self + } + + /// Specify the writer to use. + pub fn with_writer(self, writer: T) -> PipelineBuilder { + PipelineBuilder { + pretty_print: self.pretty_print, + log_config: self.log_config, + writer, + } + } +} + +impl PipelineBuilder +where + W: Write + Debug + Send + 'static, +{ + /// Install the stdout exporter pipeline with the recommended defaults. + pub fn install_simple(mut self) -> crate::log::LogEmitter { + let exporter = Exporter::new(self.writer, self.pretty_print); + + let mut provider_builder = + crate::log::LogEmitterProvider::builder().with_simple_exporter(exporter); + if let Some(config) = self.log_config.take() { + provider_builder = provider_builder.with_config(config); + } + let provider = provider_builder.build(); + + provider.versioned_log_emitter("opentelemetry", Some(env!("CARGO_PKG_VERSION"))) + } +} + +/// A [`SpanExporter`] that writes to [`Stdout`] or other configured [`Write`]. +/// +/// [`SpanExporter`]: super::SpanExporter +/// [`Write`]: std::io::Write +/// [`Stdout`]: std::io::Stdout +#[derive(Debug)] +pub struct Exporter { + writer: W, + pretty_print: bool, +} + +impl Exporter { + /// Create a new stdout `Exporter`. + pub fn new(writer: W, pretty_print: bool) -> Self { + Self { + writer, + pretty_print, + } + } +} + +#[async_trait] +impl LogExporter for Exporter +where + W: Write + Debug + Send + 'static, +{ + /// Export spans to stdout + async fn export(&mut self, batch: Vec) -> ExportResult { + for span in batch { + if self.pretty_print { + self.writer + .write_all(format!("{:#?}\n", span).as_bytes()) + .map_err(|err| LogError::ExportFailed(Box::new(Error::from(err))))?; + } else { + self.writer + .write_all(format!("{:?}\n", span).as_bytes()) + .map_err(|err| LogError::ExportFailed(Box::new(Error::from(err))))?; + } + } + + Ok(()) + } +} + +/// Stdout exporter's error +#[derive(thiserror::Error, Debug)] +#[error(transparent)] +struct Error(#[from] std::io::Error); + +impl ExportError for Error { + fn exporter_name(&self) -> &'static str { + "stdout" + } +} diff --git a/opentelemetry-sdk/src/export/mod.rs b/opentelemetry-sdk/src/export/mod.rs index d32d3cf018..999c4c9919 100644 --- a/opentelemetry-sdk/src/export/mod.rs +++ b/opentelemetry-sdk/src/export/mod.rs @@ -1,5 +1,9 @@ //! Telemetry Export +#[cfg(feature = "log")] +#[cfg_attr(docsrs, doc(cfg(feature = "log")))] +pub mod log; + #[cfg(feature = "trace")] #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] pub mod trace; diff --git a/opentelemetry-sdk/src/lib.rs b/opentelemetry-sdk/src/lib.rs index ee1d0d1e20..dba2a25282 100644 --- a/opentelemetry-sdk/src/lib.rs +++ b/opentelemetry-sdk/src/lib.rs @@ -103,6 +103,9 @@ pub mod export; mod instrumentation; +#[cfg(feature = "log")] +#[cfg_attr(docsrs, doc(cfg(feature = "log")))] +pub mod log; #[cfg(feature = "metrics")] #[cfg_attr(docsrs, doc(cfg(feature = "metrics")))] pub mod metrics; @@ -117,6 +120,7 @@ pub mod testing; #[cfg(feature = "trace")] #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] pub mod trace; + #[doc(hidden)] pub mod util; diff --git a/opentelemetry-sdk/src/log/config.rs b/opentelemetry-sdk/src/log/config.rs new file mode 100644 index 0000000000..9ba655ae2a --- /dev/null +++ b/opentelemetry-sdk/src/log/config.rs @@ -0,0 +1,8 @@ +use std::sync::Arc; + +/// Log emitter configuration. +#[derive(Debug, Default)] +pub struct Config { + /// Contains attributes representing an entity that produces telemetry. + pub resource: Option>, +} diff --git a/opentelemetry-sdk/src/log/log_emitter.rs b/opentelemetry-sdk/src/log/log_emitter.rs new file mode 100644 index 0000000000..18dff84e4c --- /dev/null +++ b/opentelemetry-sdk/src/log/log_emitter.rs @@ -0,0 +1,218 @@ +use super::{BatchLogProcessor, Config, LogProcessor, LogRecord, LogRuntime, SimpleLogProcessor}; +use crate::{ + export::log::{LogExporter, ResourceLog}, + resource::{EnvResourceDetector, SdkProvidedResourceDetector}, + Resource, +}; +use opentelemetry_api::{log::LogResult, InstrumentationLibrary}; +use std::{ + borrow::Cow, + sync::{Arc, Weak}, + time::Duration, +}; + +#[derive(Debug)] +/// Creator for `LogEmitter` instances. +pub struct LogEmitterProvider { + inner: Arc, +} + +/// Default log emitter name if empty string is provided. +const DEFAULT_COMPONENT_NAME: &str = "rust.opentelemetry.io/sdk/logemitter"; + +impl LogEmitterProvider { + /// Build a new log emitter provider. + pub(crate) fn new(inner: Arc) -> Self { + LogEmitterProvider { inner } + } + + /// Create a new `LogEmitterProvider` builder. + pub fn builder() -> Builder { + Builder::default() + } + + /// Create a new `LogEmitter`. + pub fn log_emitter(&self, name: impl Into>) -> LogEmitter { + self.versioned_log_emitter(name, Some(env!("CARGO_PKG_VERSION"))) + } + + /// Create a new version `LogEmitter` instance. + pub fn versioned_log_emitter( + &self, + name: impl Into>, + version: Option<&'static str>, + ) -> LogEmitter { + let name = name.into(); + + let component_name = if name.is_empty() { + Cow::Borrowed(DEFAULT_COMPONENT_NAME) + } else { + name + }; + + LogEmitter::new( + InstrumentationLibrary::new(component_name, version.map(Into::into), None), + Arc::downgrade(&self.inner), + ) + } + + /// Config associated with this provider. + pub fn config(&self) -> &Config { + &self.inner.config + } + + /// Log processors associated with this provider. + pub fn log_processors(&self) -> &Vec> { + &self.inner.processors + } + + /// Force flush all remaining logs in log processors and return results. + pub fn force_flush(&self) -> Vec> { + self.log_processors() + .iter() + .map(|processor| processor.force_flush()) + .collect() + } +} + +#[derive(Debug)] +pub(crate) struct LogEmitterProviderInner { + processors: Vec>, + config: Config, +} + +#[derive(Debug)] +/// Builder for provider attributes. +pub struct Builder { + processors: Vec>, + config: Config, + sdk_provided_resource: Resource, +} + +impl Default for Builder { + fn default() -> Self { + Builder { + processors: Default::default(), + config: Default::default(), + sdk_provided_resource: Resource::from_detectors( + Duration::from_secs(0), + vec![ + Box::new(SdkProvidedResourceDetector), + Box::new(EnvResourceDetector::new()), + ], + ), + } + } +} + +impl Builder { + /// The `LogExporter` that this provider should use. + pub fn with_simple_exporter(self, exporter: T) -> Self { + let mut processors = self.processors; + processors.push(Box::new(SimpleLogProcessor::new(Box::new(exporter)))); + + Builder { processors, ..self } + } + + /// The `LogExporter` setup using a default `BatchLogProcessor` that this provider should use. + pub fn with_batch_exporter( + self, + exporter: T, + runtime: R, + ) -> Self { + let batch = BatchLogProcessor::builder(exporter, runtime).build(); + self.with_log_processor(batch) + } + + /// The `LogProcessor` that this provider should use. + pub fn with_log_processor(self, processor: T) -> Self { + let mut processors = self.processors; + processors.push(Box::new(processor)); + + Builder { processors, ..self } + } + + /// The `Config` that this provider should use. + pub fn with_config(self, config: Config) -> Self { + Builder { config, ..self } + } + + /// Return the clone of sdk provided resource. + /// + /// See + /// for details. + pub fn sdk_provided_resource(&self) -> Resource { + self.sdk_provided_resource.clone() + } + + /// Create a new provider from this configuration. + pub fn build(self) -> LogEmitterProvider { + let mut config = self.config; + config.resource = match config.resource { + None => Some(Arc::new(self.sdk_provided_resource)), + Some(resource) => { + if resource.is_empty() { + None + } else { + Some(Arc::new(self.sdk_provided_resource.merge(resource))) + } + } + }; + + LogEmitterProvider { + inner: Arc::new(LogEmitterProviderInner { + processors: self.processors, + config, + }), + } + } +} + +#[derive(Debug)] +/// The object for emitting [`LogRecord`]s. +/// +/// [`LogRecord`]: crate::LogRecord +pub struct LogEmitter { + instrumentation_lib: InstrumentationLibrary, + provider: Weak, +} + +impl LogEmitter { + pub(crate) fn new( + instrumentation_lib: InstrumentationLibrary, + provider: Weak, + ) -> Self { + LogEmitter { + instrumentation_lib, + provider, + } + } + + /// LogEmitterProvider associated with this tracer. + pub fn provider(&self) -> Option { + self.provider.upgrade().map(LogEmitterProvider::new) + } + + /// Instrumentation library information of this tracer. + pub fn instrumentation_library(&self) -> &InstrumentationLibrary { + &self.instrumentation_lib + } + + /// Emit a `LogRecord`. + pub fn emit(&self, record: LogRecord) { + let provider = match self.provider() { + Some(provider) => provider, + None => return, + }; + + let config = provider.config(); + for processor in provider.log_processors() { + let data = ResourceLog { + record: record.clone(), + resource: config.resource.clone(), + instrumentation: self.instrumentation_lib.clone(), + }; + processor.emit(data); + } + } +} diff --git a/opentelemetry-sdk/src/log/log_processor.rs b/opentelemetry-sdk/src/log/log_processor.rs new file mode 100644 index 0000000000..40cea3b01f --- /dev/null +++ b/opentelemetry-sdk/src/log/log_processor.rs @@ -0,0 +1,368 @@ +use super::LogRuntime; +use crate::{ + export::log::{ExportResult, LogExporter, ResourceLog}, + log::TrySend, +}; +use futures_channel::oneshot; +use futures_util::{ + future::{self, Either}, + {pin_mut, stream, StreamExt as _}, +}; +use opentelemetry_api::{ + global, + log::{LogError, LogResult}, +}; +use std::thread; +use std::{ + fmt::{self, Debug, Formatter}, + time::Duration, +}; + +/// The interface for plugging into a [`LogEmitter`]. +/// +/// [`LogEmitter`]: crate::LogEmitter +pub trait LogProcessor: Debug { + /// Called when a log record is ready to processed and exported. + fn emit(&self, data: ResourceLog); + /// Force the logs lying in the cache to be exported. + fn force_flush(&self) -> LogResult<()>; + /// Shuts down the processor. + fn shutdown(&mut self) -> LogResult<()>; +} + +/// A [`LogProcessor`] that exports synchronously when logs are emitted. +/// +/// # Examples +/// +/// Note that the simple processor exports synchronously every time a log is +/// emitted. If you find this limiting, consider the batch processor instead. +#[derive(Debug)] +pub struct SimpleLogProcessor { + sender: crossbeam_channel::Sender>, + shutdown: crossbeam_channel::Receiver<()>, +} + +impl SimpleLogProcessor { + pub(crate) fn new(mut exporter: Box) -> Self { + let (log_tx, log_rx) = crossbeam_channel::unbounded(); + let (shutdown_tx, shutdown_rx) = crossbeam_channel::bounded(0); + + let _ = thread::Builder::new() + .name("opentelemetry-exporter".to_string()) + .spawn(move || { + while let Ok(Some(log)) = log_rx.recv() { + if let Err(err) = futures_executor::block_on(exporter.export(vec![log])) { + global::handle_error(err); + } + } + + exporter.shutdown(); + + if let Err(err) = shutdown_tx.send(()) { + global::handle_error(LogError::from(format!( + "could not send shutdown: {:?}", + err + ))); + } + }); + + SimpleLogProcessor { + sender: log_tx, + shutdown: shutdown_rx, + } + } +} + +impl LogProcessor for SimpleLogProcessor { + fn emit(&self, data: ResourceLog) { + if let Err(err) = self.sender.send(Some(data)) { + global::handle_error(LogError::from(format!("error processing log {:?}", err))); + } + } + + fn force_flush(&self) -> LogResult<()> { + // Ignored since all logs in Simple Processor will be exported as they ended. + Ok(()) + } + + fn shutdown(&mut self) -> LogResult<()> { + if self.sender.send(None).is_ok() { + if let Err(err) = self.shutdown.recv() { + global::handle_error(LogError::from(format!( + "error shutting down log processor: {:?}", + err + ))) + } + } + + Ok(()) + } +} + +/// A [`LogProcessor`] that asynchronously buffers log records and reports +/// them at a preconfigured interval. +pub struct BatchLogProcessor { + message_sender: R::Sender, +} + +impl Debug for BatchLogProcessor { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("BatchLogProcessor") + .field("message_sender", &self.message_sender) + .finish() + } +} + +impl LogProcessor for BatchLogProcessor { + fn emit(&self, data: ResourceLog) { + let result = self.message_sender.try_send(BatchMessage::ExportLog(data)); + + if let Err(err) = result { + global::handle_error(err); + } + } + + fn force_flush(&self) -> LogResult<()> { + let (res_sender, res_receiver) = oneshot::channel(); + self.message_sender + .try_send(BatchMessage::Flush(Some(res_sender)))?; + + futures_executor::block_on(res_receiver) + .map_err(|err| LogError::Other(err.into())) + .and_then(|identity| identity) + } + + fn shutdown(&mut self) -> LogResult<()> { + let (res_sender, res_receiver) = oneshot::channel(); + self.message_sender + .try_send(BatchMessage::Shutdown(res_sender))?; + + futures_executor::block_on(res_receiver) + .map_err(|err| LogError::Other(err.into())) + .and_then(|identity| identity) + } +} + +impl BatchLogProcessor { + pub(crate) fn new(mut exporter: Box, config: BatchConfig, runtime: R) -> Self { + let (message_sender, message_receiver) = + runtime.batch_message_channel(config.max_queue_size); + let ticker = runtime + .interval(config.scheduled_delay) + .map(|_| BatchMessage::Flush(None)); + let timeout_runtime = runtime.clone(); + + // Spawn worker process via user-defined spawn function. + runtime.spawn(Box::pin(async move { + let mut spans = Vec::new(); + let mut messages = Box::pin(stream::select(message_receiver, ticker)); + + while let Some(message) = messages.next().await { + match message { + // Span has finished, add to buffer of pending spans. + BatchMessage::ExportLog(span) => { + spans.push(span); + + if spans.len() == config.max_export_batch_size { + let result = export_with_timeout( + config.max_export_timeout, + exporter.as_mut(), + &timeout_runtime, + spans.split_off(0), + ) + .await; + + if let Err(err) = result { + global::handle_error(err); + } + } + } + // Span batch interval time reached or a force flush has been invoked, export current spans. + BatchMessage::Flush(res_channel) => { + let result = export_with_timeout( + config.max_export_timeout, + exporter.as_mut(), + &timeout_runtime, + spans.split_off(0), + ) + .await; + + if let Some(channel) = res_channel { + if let Err(result) = channel.send(result) { + global::handle_error(LogError::from(format!( + "failed to send flush result: {:?}", + result + ))); + } + } else if let Err(err) = result { + global::handle_error(err); + } + } + // Stream has terminated or processor is shutdown, return to finish execution. + BatchMessage::Shutdown(ch) => { + let result = export_with_timeout( + config.max_export_timeout, + exporter.as_mut(), + &timeout_runtime, + spans.split_off(0), + ) + .await; + + exporter.shutdown(); + + if let Err(result) = ch.send(result) { + global::handle_error(LogError::from(format!( + "failed to send batch processor shutdown result: {:?}", + result + ))); + } + + break; + } + } + } + })); + + // Return batch processor with link to worker + BatchLogProcessor { message_sender } + } + + /// Create a new batch processor builder + pub fn builder(exporter: E, runtime: R) -> BatchLogProcessorBuilder + where + E: LogExporter, + { + BatchLogProcessorBuilder { + exporter, + config: BatchConfig::default(), + runtime, + } + } +} + +async fn export_with_timeout( + time_out: Duration, + exporter: &mut E, + runtime: &R, + batch: Vec, +) -> ExportResult +where + R: LogRuntime, + E: LogExporter + ?Sized, +{ + if batch.is_empty() { + return Ok(()); + } + + let export = exporter.export(batch); + let timeout = runtime.delay(time_out); + pin_mut!(export); + pin_mut!(timeout); + match future::select(export, timeout).await { + Either::Left((export_res, _)) => export_res, + Either::Right((_, _)) => ExportResult::Err(LogError::ExportTimedOut(time_out)), + } +} + +/// Batch log processor configuration +#[derive(Debug)] +pub struct BatchConfig { + /// The maximum queue size to buffer logs for delayed processing. If the + /// queue gets full it drops the logs. The default value of is 2048. + max_queue_size: usize, + + /// The delay interval in milliseconds between two consecutive processing + /// of batches. The default value is 5 seconds. + scheduled_delay: Duration, + + /// The maximum number of logs to process in a single batch. If there are + /// more than one batch worth of logs then it processes multiple batches + /// of logs one batch after the other without any delay. The default value + /// is 512. + max_export_batch_size: usize, + + /// The maximum duration to export a batch of data. + max_export_timeout: Duration, +} + +impl Default for BatchConfig { + fn default() -> Self { + BatchConfig { + max_queue_size: 2_048, + scheduled_delay: Duration::from_millis(5_000), + max_export_batch_size: 512, + max_export_timeout: Duration::from_millis(30_000), + } + } +} + +/// A builder for creating [`BatchLogProcessor`] instances. +/// +#[derive(Debug)] +pub struct BatchLogProcessorBuilder { + exporter: E, + config: BatchConfig, + runtime: R, +} + +impl BatchLogProcessorBuilder +where + E: LogExporter + 'static, + R: LogRuntime, +{ + /// Set max queue size for batches + pub fn with_max_queue_size(self, size: usize) -> Self { + let mut config = self.config; + config.max_queue_size = size; + + BatchLogProcessorBuilder { config, ..self } + } + + /// Set scheduled delay for batches + pub fn with_scheduled_delay(self, delay: Duration) -> Self { + let mut config = self.config; + config.scheduled_delay = delay; + + BatchLogProcessorBuilder { config, ..self } + } + + /// Set max timeout for exporting. + pub fn with_max_timeout(self, timeout: Duration) -> Self { + let mut config = self.config; + config.max_export_timeout = timeout; + + BatchLogProcessorBuilder { config, ..self } + } + + /// Set max export size for batches, should always less than or equals to max queue size. + /// + /// If input is larger than max queue size, will lower it to be equal to max queue size + pub fn with_max_export_batch_size(self, size: usize) -> Self { + let mut config = self.config; + if size > config.max_queue_size { + config.max_export_batch_size = config.max_queue_size; + } else { + config.max_export_batch_size = size; + } + + BatchLogProcessorBuilder { config, ..self } + } + + /// Build a batch processor + pub fn build(self) -> BatchLogProcessor { + BatchLogProcessor::new(Box::new(self.exporter), self.config, self.runtime) + } +} + +/// Messages sent between application thread and batch log processor's work thread. +#[allow(clippy::large_enum_variant)] +#[derive(Debug)] +pub enum BatchMessage { + /// Export logs, usually called when the log is emitted. + ExportLog(ResourceLog), + /// Flush the current buffer to the backend, it can be triggered by + /// pre configured interval or a call to `force_push` function. + Flush(Option>), + /// Shut down the worker thread, push all logs in buffer to the backend. + Shutdown(oneshot::Sender), +} diff --git a/opentelemetry-sdk/src/log/mod.rs b/opentelemetry-sdk/src/log/mod.rs new file mode 100644 index 0000000000..d8a7ef3428 --- /dev/null +++ b/opentelemetry-sdk/src/log/mod.rs @@ -0,0 +1,13 @@ +//! # OpenTelemetry Log SDK + +mod config; +mod log_emitter; +mod log_processor; +mod record; +mod runtime; + +pub use config::*; +pub use log_emitter::*; +pub use log_processor::*; +pub use record::*; +pub use runtime::*; diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs new file mode 100644 index 0000000000..ea22c2e029 --- /dev/null +++ b/opentelemetry-sdk/src/log/record.rs @@ -0,0 +1,91 @@ +use std::{borrow::Cow, collections::BTreeMap, time::SystemTime}; + +use opentelemetry_api::trace::{SpanId, TraceFlags, TraceId}; + +#[derive(Debug, Clone, Default)] +#[non_exhaustive] +/// LogRecord represents all data carrier by a log record, and +/// is provided to `LogExporter`s as input. +pub struct LogRecord { + /// Record timestamp + pub timestamp: Option, + + /// Trace context for logs associated with spans + pub trace_context: Option, + + /// The original severity string from the source + pub severity_text: Option>, + /// The corresponding severity value, normalized + pub severity_number: Option, + + /// Record name + pub name: Option>, + /// Record body + pub body: Option, + + /// Resource attributes for the entity that produced this record + pub resource: Option, Any>>, + /// Additional attributes associated with this record + pub attributes: Option, Any>>, +} + +/// TraceContext stores the trace data for logs that have an associated +/// span. +#[derive(Debug, Clone)] +#[non_exhaustive] +pub struct TraceContext { + /// Trace id + pub trace_id: TraceId, + /// Span Id + pub span_id: SpanId, + /// Trace flags + pub trace_flags: Option, +} + +/// Value types for representing arbitrary values in a log record. +#[derive(Debug, Clone)] +pub enum Any { + /// An integer value + Int(i64), + /// A double value + Double(f64), + /// A string value + String(String), + /// A boolean value + Boolean(bool), + /// A byte array + Bytes(Vec), + /// An array of `Any` values + ListAny(Vec), + /// A map of string keys to `Any` values, arbitrarily nested. + Map(BTreeMap, Any>), +} + +/// A normalized severity value. +#[derive(Debug, Copy, Clone)] +pub enum Severity { + Trace = 1, + Trace2 = 2, + Trace3 = 3, + Trace4 = 4, + Debug = 5, + Debug2 = 6, + Debug3 = 7, + Debug4 = 8, + Info = 9, + Info2 = 10, + Info3 = 11, + Info4 = 12, + Warn = 13, + Warn2 = 14, + Warn3 = 15, + Warn4 = 16, + Error = 17, + Error2 = 18, + Error3 = 19, + Error4 = 20, + Fatal = 21, + Fatal2 = 22, + Fatal3 = 23, + Fatal4 = 24, +} diff --git a/opentelemetry-sdk/src/log/runtime.rs b/opentelemetry-sdk/src/log/runtime.rs new file mode 100644 index 0000000000..e708ae919b --- /dev/null +++ b/opentelemetry-sdk/src/log/runtime.rs @@ -0,0 +1,286 @@ +//! # Log Runtime +//! Log runtime is an extension to [`Runtime`]. Currently it provides a channel that used +//! by [`BatchLognProcessor`]. +//! +//! [`BatchLogProcessor`]: crate::log::BatchLogProcessor +//! [`Runtime`]: crate::runtime::Runtime +use crate::log::BatchMessage; +#[cfg(feature = "rt-async-std")] +use crate::runtime::AsyncStd; +use crate::runtime::Runtime; +#[cfg(feature = "rt-tokio")] +use crate::runtime::Tokio; +#[cfg(feature = "rt-tokio-current-thread")] +use crate::runtime::TokioCurrentThread; +use futures_util::stream::Stream; +use opentelemetry_api::log::LogError; +use std::fmt::Debug; + +#[cfg(any( + feature = "rt-tokio", + feature = "rt-tokio-current-thread", + feature = "rt-async-std" +))] +const CHANNEL_FULL_ERROR: &str = + "cannot send log record to the batch log processor because the channel is full"; +#[cfg(any( + feature = "rt-tokio", + feature = "rt-tokio-current-thread", + feature = "rt-async-std" +))] +const CHANNEL_CLOSED_ERROR: &str = + "cannot send log record to the batch log processor because the channel is closed"; + +/// Log runtime is an extension to [`Runtime`]. Currently it provides a channel that used +/// by [`BatchLogProcessor`]. +/// +/// [`BatchSpanProcessor`]: crate::log::BatchLogProcessor +/// [`Runtime`]: crate::runtime::Runtime +pub trait LogRuntime: Runtime { + /// A future stream to receive the batch messages from channels. + type Receiver: Stream + Send; + + /// A batch messages sender that could be sent across thread safely. + type Sender: TrySend + Debug; + + /// Return the sender and receiver used to send batch message between tasks. + fn batch_message_channel(&self, capacity: usize) -> (Self::Sender, Self::Receiver); +} + +/// TrySend is an abstraction of sender that is capable to send BatchMessage with reference. +pub trait TrySend: Sync + Send { + /// Try to send one batch message to worker thread. + /// + /// It can fail because either the receiver has closed or the buffer is full. + fn try_send(&self, item: BatchMessage) -> Result<(), LogError>; +} + +#[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] +impl TrySend for tokio::sync::mpsc::Sender { + fn try_send(&self, item: BatchMessage) -> Result<(), LogError> { + self.try_send(item).map_err(|err| match err { + tokio::sync::mpsc::error::TrySendError::Full(_) => LogError::from(CHANNEL_FULL_ERROR), + tokio::sync::mpsc::error::TrySendError::Closed(_) => { + LogError::from(CHANNEL_CLOSED_ERROR) + } + }) + } +} + +#[cfg(feature = "rt-tokio")] +#[cfg_attr(docsrs, doc(cfg(feature = "rt-tokio")))] +impl LogRuntime for Tokio { + type Receiver = tokio_stream::wrappers::ReceiverStream; + type Sender = tokio::sync::mpsc::Sender; + + fn batch_message_channel(&self, capacity: usize) -> (Self::Sender, Self::Receiver) { + let (sender, receiver) = tokio::sync::mpsc::channel(capacity); + ( + sender, + tokio_stream::wrappers::ReceiverStream::new(receiver), + ) + } +} + +#[cfg(feature = "rt-tokio-current-thread")] +#[cfg_attr(docsrs, doc(cfg(feature = "rt-tokio-current-thread")))] +impl LogRuntime for TokioCurrentThread { + type Receiver = tokio_stream::wrappers::ReceiverStream; + type Sender = tokio::sync::mpsc::Sender; + + fn batch_message_channel(&self, capacity: usize) -> (Self::Sender, Self::Receiver) { + let (sender, receiver) = tokio::sync::mpsc::channel(capacity); + ( + sender, + tokio_stream::wrappers::ReceiverStream::new(receiver), + ) + } +} + +#[cfg(feature = "rt-async-std")] +impl TrySend for async_std::channel::Sender { + fn try_send(&self, item: BatchMessage) -> Result<(), LogError> { + self.try_send(item).map_err(|err| match err { + async_std::channel::TrySendError::Full(_) => LogError::from(CHANNEL_FULL_ERROR), + async_std::channel::TrySendError::Closed(_) => LogError::from(CHANNEL_CLOSED_ERROR), + }) + } +} + +#[cfg(feature = "rt-async-std")] +#[cfg_attr(docsrs, doc(cfg(feature = "rt-async-std")))] +impl LogRuntime for AsyncStd { + type Receiver = async_std::channel::Receiver; + type Sender = async_std::channel::Sender; + + fn batch_message_channel(&self, capacity: usize) -> (Self::Sender, Self::Receiver) { + async_std::channel::bounded(capacity) + } +} + +#[cfg(test)] +// Note that all tests here should be marked as ignore so that it won't be picked up by default We +// need to run those tests one by one as the GlobalLogrProvider is a shared object between +// threads Use cargo test -- --ignored --test-threads=1 to run those tests. +mod tests { + #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] + use crate::log::LogRuntime; + #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] + use crate::runtime; + #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] + use opentelemetry_api::global::*; + use std::sync::Arc; + use std::{fmt::Debug, io::Write, sync::Mutex}; + + #[derive(Debug)] + struct AssertWriter { + buf: Arc>>, + } + + #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] + impl AssertWriter { + fn new() -> AssertWriter { + AssertWriter { + buf: Arc::new(Mutex::new(Vec::new())), + } + } + + fn len(&self) -> usize { + self.buf + .lock() + .expect("cannot acquire the lock of assert writer") + .len() + } + } + + impl Write for AssertWriter { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + let mut buffer = self + .buf + .lock() + .expect("cannot acquire the lock of assert writer"); + buffer.write(buf) + } + + fn flush(&mut self) -> std::io::Result<()> { + let mut buffer = self + .buf + .lock() + .expect("cannot acquire the lock of assert writer"); + buffer.flush() + } + } + + impl Clone for AssertWriter { + fn clone(&self) -> Self { + AssertWriter { + buf: self.buf.clone(), + } + } + } + + #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] + fn build_batch_log_emitter_provider( + assert_writer: AssertWriter, + runtime: R, + ) -> crate::log::LogEmitterProvider { + use crate::log::LogEmitterProvider; + let exporter = crate::export::log::stdout::Exporter::new(assert_writer, true); + LogEmitterProvider::builder() + .with_batch_exporter(exporter, runtime) + .build() + } + + #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] + fn build_simple_log_emitter_provider( + assert_writer: AssertWriter, + ) -> crate::log::LogEmitterProvider { + use crate::log::LogEmitterProvider; + let exporter = crate::export::log::stdout::Exporter::new(assert_writer, true); + LogEmitterProvider::builder() + .with_simple_exporter(exporter) + .build() + } + + #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] + async fn test_create_provider_in_tokio(runtime: R) -> AssertWriter { + use crate::log::LogRecord; + + let buffer = AssertWriter::new(); + let provider = build_batch_log_emitter_provider(buffer.clone(), runtime); + let emitter = provider.log_emitter("opentelemetery"); + + emitter.emit(LogRecord::default()); + + buffer + } + + // When using `tokio::spawn` to spawn the worker task in batch processor + // + // multiple -> no shut down -> not export + // multiple -> shut down -> export + // single -> no shutdown -> not export + // single -> shutdown -> hang forever + + // When using |fut| tokio::task::spawn_blocking(|| futures::executor::block_on(fut)) + // to spawn the worker task in batch processor + // + // multiple -> no shutdown -> hang forever + // multiple -> shut down -> export + // single -> shut down -> export + // single -> no shutdown -> hang forever + + // Test if the multiple thread tokio runtime could exit successfully when not force flushing logs + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + #[ignore = "requires --test-threads=1"] + #[cfg(feature = "rt-tokio")] + async fn test_create_provider_multiple_thread_tokio() { + let assert_writer = test_create_provider_in_tokio(runtime::Tokio).await; + assert_eq!(assert_writer.len(), 0); + } + + // Test if the multiple thread tokio runtime could exit successfully when force flushing logs + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + #[ignore = "requires --test-threads=1"] + #[cfg(feature = "rt-tokio")] + async fn test_create_provider_multiple_thread_tokio_shutdown() { + let assert_writer = test_create_provider_in_tokio(runtime::Tokio).await; + assert!(assert_writer.len() > 0); + } + + // Test use simple processor in single thread tokio runtime. + // Expected to see the logs being exported to buffer + #[tokio::test] + #[ignore = "requires --test-threads=1"] + #[cfg(feature = "rt-tokio")] + async fn test_create_provider_single_thread_tokio_with_simple_processor() { + use crate::log::LogRecord; + + let assert_writer = AssertWriter::new(); + let provider = build_simple_log_emitter_provider(assert_writer.clone()); + let emitter = provider.log_emitter("opentelemetry"); + + emitter.emit(LogRecord::default()); + + assert!(assert_writer.len() > 0); + } + + // Test if the single thread tokio runtime could exit successfully when not force flushing logs + #[tokio::test] + #[ignore = "requires --test-threads=1"] + #[cfg(feature = "rt-tokio-current-thread")] + async fn test_create_provider_single_thread_tokio() { + let assert_writer = test_create_provider_in_tokio(runtime::TokioCurrentThread).await; + assert_eq!(assert_writer.len(), 0) + } + + // Test if the single thread tokio runtime could exit successfully when force flushing logs + #[tokio::test] + #[ignore = "requires --test-threads=1"] + #[cfg(feature = "rt-tokio-current-thread")] + async fn test_create_provider_single_thread_tokio_shutdown() { + let assert_writer = test_create_provider_in_tokio(runtime::TokioCurrentThread).await; + shutdown_tracer_provider(); + assert!(assert_writer.len() > 0); + } +} diff --git a/opentelemetry/Cargo.toml b/opentelemetry/Cargo.toml index 25d15b0b2c..fc0ea962a3 100644 --- a/opentelemetry/Cargo.toml +++ b/opentelemetry/Cargo.toml @@ -28,6 +28,7 @@ opentelemetry_sdk = { version = "0.19", path = "../opentelemetry-sdk" } default = ["trace"] trace = ["opentelemetry_api/trace", "opentelemetry_sdk/trace"] metrics = ["opentelemetry_api/metrics", "opentelemetry_sdk/metrics"] +log = ["opentelemetry_sdk/log"] testing = ["opentelemetry_api/testing", "opentelemetry_sdk/testing"] rt-tokio = ["opentelemetry_sdk/rt-tokio"] rt-tokio-current-thread = ["opentelemetry_sdk/rt-tokio-current-thread"] From 56edfb588e36a619ac4440b41d1529793c06a9b0 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 13 Mar 2022 19:23:33 +0530 Subject: [PATCH 02/51] Add From implementations for Any. --- opentelemetry-sdk/src/log/record.rs | 33 +++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index ea22c2e029..456be5d048 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -61,6 +61,39 @@ pub enum Any { Map(BTreeMap, Any>), } +macro_rules! impl_trivial_from { + ($t:ty, $variant:path) => { + impl From<$t> for Any { + fn from(val: $t) -> Any { + $variant(val.into()) + } + } + }; +} + +impl_trivial_from!(i8, Any::Int); +impl_trivial_from!(i16, Any::Int); +impl_trivial_from!(i32, Any::Int); +impl_trivial_from!(i64, Any::Int); + +impl_trivial_from!(u8, Any::Int); +impl_trivial_from!(u16, Any::Int); +impl_trivial_from!(u32, Any::Int); + +impl_trivial_from!(String, Any::String); +impl_trivial_from!(Cow<'static, str>, Any::String); +impl_trivial_from!(&str, Any::String); + +impl_trivial_from!(bool, Any::Boolean); + +impl> From> for Any { + /// Converts a list of `Into` values into a [`Any::ListAny`] + /// value. + fn from(val: Vec) -> Any { + Any::ListAny(val.into_iter().map(Into::into).collect()) + } +} + /// A normalized severity value. #[derive(Debug, Copy, Clone)] pub enum Severity { From 54a41da62abf12ea8a74820d0686f11b56705c8e Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 13 Mar 2022 19:26:43 +0530 Subject: [PATCH 03/51] Fix docs links. --- opentelemetry-sdk/src/export/log/stdout.rs | 4 ++-- opentelemetry-sdk/src/log/log_emitter.rs | 2 +- opentelemetry-sdk/src/log/log_processor.rs | 2 +- opentelemetry-sdk/src/log/runtime.rs | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/opentelemetry-sdk/src/export/log/stdout.rs b/opentelemetry-sdk/src/export/log/stdout.rs index 39ee2e5bc5..0e44c07b89 100644 --- a/opentelemetry-sdk/src/export/log/stdout.rs +++ b/opentelemetry-sdk/src/export/log/stdout.rs @@ -102,9 +102,9 @@ where } } -/// A [`SpanExporter`] that writes to [`Stdout`] or other configured [`Write`]. +/// A [`LogExporter`] that writes to [`Stdout`] or other configured [`Write`]. /// -/// [`SpanExporter`]: super::SpanExporter +/// [`LogExporter`]: super::LogExporter /// [`Write`]: std::io::Write /// [`Stdout`]: std::io::Stdout #[derive(Debug)] diff --git a/opentelemetry-sdk/src/log/log_emitter.rs b/opentelemetry-sdk/src/log/log_emitter.rs index 18dff84e4c..3d25f4a0a7 100644 --- a/opentelemetry-sdk/src/log/log_emitter.rs +++ b/opentelemetry-sdk/src/log/log_emitter.rs @@ -171,7 +171,7 @@ impl Builder { #[derive(Debug)] /// The object for emitting [`LogRecord`]s. /// -/// [`LogRecord`]: crate::LogRecord +/// [`LogRecord`]: crate::log::LogRecord pub struct LogEmitter { instrumentation_lib: InstrumentationLibrary, provider: Weak, diff --git a/opentelemetry-sdk/src/log/log_processor.rs b/opentelemetry-sdk/src/log/log_processor.rs index 40cea3b01f..f5e705a3d4 100644 --- a/opentelemetry-sdk/src/log/log_processor.rs +++ b/opentelemetry-sdk/src/log/log_processor.rs @@ -20,7 +20,7 @@ use std::{ /// The interface for plugging into a [`LogEmitter`]. /// -/// [`LogEmitter`]: crate::LogEmitter +/// [`LogEmitter`]: crate::log::LogEmitter pub trait LogProcessor: Debug { /// Called when a log record is ready to processed and exported. fn emit(&self, data: ResourceLog); diff --git a/opentelemetry-sdk/src/log/runtime.rs b/opentelemetry-sdk/src/log/runtime.rs index e708ae919b..fb368f45da 100644 --- a/opentelemetry-sdk/src/log/runtime.rs +++ b/opentelemetry-sdk/src/log/runtime.rs @@ -1,6 +1,6 @@ //! # Log Runtime //! Log runtime is an extension to [`Runtime`]. Currently it provides a channel that used -//! by [`BatchLognProcessor`]. +//! by [`BatchLogProcessor`]. //! //! [`BatchLogProcessor`]: crate::log::BatchLogProcessor //! [`Runtime`]: crate::runtime::Runtime @@ -34,7 +34,7 @@ const CHANNEL_CLOSED_ERROR: &str = /// Log runtime is an extension to [`Runtime`]. Currently it provides a channel that used /// by [`BatchLogProcessor`]. /// -/// [`BatchSpanProcessor`]: crate::log::BatchLogProcessor +/// [`BatchLogProcessor`]: crate::log::BatchLogProcessor /// [`Runtime`]: crate::runtime::Runtime pub trait LogRuntime: Runtime { /// A future stream to receive the batch messages from channels. From 6d0875cef14d502df388069a14b958c66331e20f Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 13 Mar 2022 19:37:06 +0530 Subject: [PATCH 04/51] Add Into impls for f64 and f32. --- opentelemetry-sdk/src/log/record.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 456be5d048..88b45ab636 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -80,6 +80,9 @@ impl_trivial_from!(u8, Any::Int); impl_trivial_from!(u16, Any::Int); impl_trivial_from!(u32, Any::Int); +impl_trivial_from!(f64, Any::Double); +impl_trivial_from!(f32, Any::Double); + impl_trivial_from!(String, Any::String); impl_trivial_from!(Cow<'static, str>, Any::String); impl_trivial_from!(&str, Any::String); From 1a2b5322facf832af12883fe6ae9fd022d8f43f3 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 13 Mar 2022 21:57:56 +0530 Subject: [PATCH 05/51] Support converting serde_json::Value to Any. --- opentelemetry-sdk/Cargo.toml | 2 +- opentelemetry-sdk/src/log/record.rs | 33 +++++++++++++++++++++++++++-- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/opentelemetry-sdk/Cargo.toml b/opentelemetry-sdk/Cargo.toml index 00b8ded225..64549fdec6 100644 --- a/opentelemetry-sdk/Cargo.toml +++ b/opentelemetry-sdk/Cargo.toml @@ -46,7 +46,7 @@ default = ["trace"] trace = ["opentelemetry_api/trace", "crossbeam-channel", "rand", "async-trait", "percent-encoding"] jaeger_remote_sampler = ["trace", "opentelemetry-http", "http", "serde", "serde_json", "url"] metrics = ["opentelemetry_api/metrics", "regex", "ordered-float"] -log = ["opentelemetry_api/log", "crossbeam-channel", "async-trait"] +log = ["opentelemetry_api/log", "crossbeam-channel", "async-trait", "serde_json"] testing = ["opentelemetry_api/testing", "trace", "metrics", "log", "rt-async-std", "rt-tokio", "rt-tokio-current-thread", "tokio/macros", "tokio/rt-multi-thread"] rt-tokio = ["tokio", "tokio-stream"] rt-tokio-current-thread = ["tokio", "tokio-stream"] diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 88b45ab636..c5dd13c3cf 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -1,6 +1,6 @@ -use std::{borrow::Cow, collections::BTreeMap, time::SystemTime}; - use opentelemetry_api::trace::{SpanId, TraceFlags, TraceId}; +use serde_json::{Number, Value}; +use std::{borrow::Cow, collections::BTreeMap, time::SystemTime}; #[derive(Debug, Clone, Default)] #[non_exhaustive] @@ -97,6 +97,35 @@ impl> From> for Any { } } +impl From for Any { + fn from(val: Value) -> Any { + match val { + Value::Null => Any::String("".to_string()), + Value::Bool(b) => Any::Boolean(b), + Value::Number(number) => number.into(), + Value::String(string) => string.into(), + Value::Array(vec) => vec.into(), + Value::Object(map) => Any::Map( + map.into_iter() + .map(|(key, value)| (key.into(), value.into())) + .collect(), + ), + } + } +} + +impl From for Any { + fn from(val: Number) -> Any { + if val.is_u64() { + Any::Int(val.as_u64().unwrap() as i64) + } else if val.is_i64() { + Any::Int(val.as_i64().unwrap()) + } else { + Any::Double(val.as_f64().unwrap()) + } + } +} + /// A normalized severity value. #[derive(Debug, Copy, Clone)] pub enum Severity { From c7631dd13979de1317bcc7d27586e55ce8f68b52 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 13 Mar 2022 21:58:08 +0530 Subject: [PATCH 06/51] Fix docs link. --- opentelemetry-otlp/src/log.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-otlp/src/log.rs b/opentelemetry-otlp/src/log.rs index 4343c86632..8cbeb2bfc1 100644 --- a/opentelemetry-otlp/src/log.rs +++ b/opentelemetry-otlp/src/log.rs @@ -418,7 +418,7 @@ impl OtlpLogPipeline { /// Returns a [`LogEmitter`] with the name `opentelemetry-otlp` and the /// current crate version, using the configured log exporter. /// - /// [`LogEmitter`]: opentelemetry::log::LogEmitter + /// [`LogEmitter`]: opentelemetry::sdk::log::LogEmitter pub fn simple(self) -> Result { Ok(build_simple_with_exporter( self.exporter_builder From 8c23c09216d37011325ca60741e0e59c7f599e1d Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 14 Mar 2022 13:13:27 +0530 Subject: [PATCH 07/51] Remove unused dependency. --- opentelemetry-proto/src/transform/traces.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-proto/src/transform/traces.rs b/opentelemetry-proto/src/transform/traces.rs index 98234c814c..d23ef02aca 100644 --- a/opentelemetry-proto/src/transform/traces.rs +++ b/opentelemetry-proto/src/transform/traces.rs @@ -1,6 +1,6 @@ use crate::transform::common::to_nanos; use opentelemetry::sdk::export::trace::SpanData; -use opentelemetry::trace::{Link, SpanId, SpanKind, Status}; +use opentelemetry::trace::{Link, SpanId, SpanKind}; #[cfg(feature = "gen-tonic")] pub mod tonic { From 085b5987390a914ba6d8ebcb6f502eb5e9f0f4ac Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 14 Mar 2022 18:44:27 +0530 Subject: [PATCH 08/51] Add LogRecordBuilder, documentation for Severity. --- opentelemetry-sdk/src/log/record.rs | 155 +++++++++++++++++++++++++++- 1 file changed, 154 insertions(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index c5dd13c3cf..bdcc2df4c8 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -1,4 +1,4 @@ -use opentelemetry_api::trace::{SpanId, TraceFlags, TraceId}; +use opentelemetry_api::trace::{SpanContext, SpanId, TraceContextExt, TraceFlags, TraceId}; use serde_json::{Number, Value}; use std::{borrow::Cow, collections::BTreeMap, time::SystemTime}; @@ -129,28 +129,181 @@ impl From for Any { /// A normalized severity value. #[derive(Debug, Copy, Clone)] pub enum Severity { + /// TRACE Trace = 1, + /// TRACE2 Trace2 = 2, + /// TRACE3 Trace3 = 3, + /// TRACE4 Trace4 = 4, + /// DEBUG Debug = 5, + /// DEBUG2 Debug2 = 6, + /// DEBUG3 Debug3 = 7, + /// DEBUG4 Debug4 = 8, + /// INFO Info = 9, + /// INFO2 Info2 = 10, + /// INFO3 Info3 = 11, + /// INFO4 Info4 = 12, + /// WARN Warn = 13, + /// WARN2 Warn2 = 14, + /// WARN3 Warn3 = 15, + /// WARN4 Warn4 = 16, + /// ERROR Error = 17, + /// ERROR2 Error2 = 18, + /// ERROR3 Error3 = 19, + /// ERROR4 Error4 = 20, + /// FATAL Fatal = 21, + /// FATAL2 Fatal2 = 22, + /// FATAL3 Fatal3 = 23, + /// FATAL4 Fatal4 = 24, } + +/// A builder for [`LogRecord`] values. +#[derive(Debug, Clone)] +pub struct LogRecordBuilder { + record: LogRecord, +} + +impl LogRecordBuilder { + /// Assign timestamp + pub fn with_timestamp(self, timestamp: SystemTime) -> Self { + Self { + record: LogRecord { + timestamp: Some(timestamp), + ..self.record + }, + } + } + + /// Assign the record's [`TraceContext`] + pub fn with_span_context(self, span_context: &SpanContext) -> Self { + Self { + record: LogRecord { + trace_context: Some(TraceContext { + span_id: span_context.span_id(), + trace_id: span_context.trace_id(), + trace_flags: Some(span_context.trace_flags()), + }), + ..self.record + }, + } + } + + /// Assign the record's [`TraceContext`] from a `TraceContextExt` trait + pub fn with_context(self, context: &T) -> Self + where + T: TraceContextExt, + { + if context.has_active_span() { + self.with_span_context(context.span().span_context()) + } else { + self + } + } + + /// Assign severity text + pub fn with_severity_text(self, severity: T) -> Self + where + T: Into>, + { + Self { + record: LogRecord { + severity_text: Some(severity.into()), + ..self.record + }, + } + } + + /// Assign severity number + pub fn with_severity_number(self, severity: Severity) -> Self { + Self { + record: LogRecord { + severity_number: Some(severity), + ..self.record + }, + } + } + + /// Assign name + pub fn with_name(self, name: T) -> Self + where + T: Into>, + { + Self { + record: LogRecord { + name: Some(name.into()), + ..self.record + }, + } + } + + /// Assign body + pub fn with_body(self, body: Any) -> Self { + Self { + record: LogRecord { + body: Some(body), + ..self.record + }, + } + } + + /// Assign resource + pub fn with_resource(self, resource: BTreeMap, Any>) -> Self { + Self { + record: LogRecord { + resource: Some(resource), + ..self.record + }, + } + } + + /// Assign attributes, overriding previously set attributes + pub fn with_attributes(self, attributes: BTreeMap, Any>) -> Self { + Self { + record: LogRecord { + attributes: Some(attributes), + ..self.record + }, + } + } + + /// Set a single attribute for this record + pub fn with_attribute(mut self, key: T, value: Any) -> Self + where + T: Into>, + { + if let Some(ref mut map) = self.record.attributes { + map.insert(key.into(), value); + } else { + self.record.attributes = Some(BTreeMap::from([(key.into(), value)])); + } + + self + } + + /// Build the record, consuming the Builder + pub fn build(self) -> LogRecord { + self.record + } +} From 5c8539ec4356149d1afc6feaeff2968240412072 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 14 Mar 2022 18:47:54 +0530 Subject: [PATCH 09/51] Add LogRecordBuilder::new(). --- opentelemetry-sdk/src/log/record.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index bdcc2df4c8..3dc449bf9e 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -29,6 +29,13 @@ pub struct LogRecord { pub attributes: Option, Any>>, } +impl LogRecord { + /// Create a [`LogRecordBuilder`] to create a new Log Record + pub fn builder() -> LogRecordBuilder { + LogRecordBuilder::new() + } +} + /// TraceContext stores the trace data for logs that have an associated /// span. #[derive(Debug, Clone)] @@ -186,6 +193,13 @@ pub struct LogRecordBuilder { } impl LogRecordBuilder { + /// Create a new LogRecordBuilder + pub fn new() -> Self { + Self { + record: Default::default(), + } + } + /// Assign timestamp pub fn with_timestamp(self, timestamp: SystemTime) -> Self { Self { From 1b9f870d2bb95f3a9d70b9d95125d8c4b24509ca Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 14 Mar 2022 18:56:03 +0530 Subject: [PATCH 10/51] with_body: Remove unneeded generic parameter. --- opentelemetry-sdk/src/log/record.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 3dc449bf9e..0f1cbee22e 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -273,7 +273,7 @@ impl LogRecordBuilder { } /// Assign body - pub fn with_body(self, body: Any) -> Self { + pub fn with_body(self, body: Any) -> Self { Self { record: LogRecord { body: Some(body), From 4bb4216c3fe2201a98fb53b6ebf870e0bacbb20f Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 14 Mar 2022 19:09:43 +0530 Subject: [PATCH 11/51] Remove unneeded generic parameters. --- opentelemetry-sdk/src/log/record.rs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 0f1cbee22e..98fbe19bbe 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -250,7 +250,7 @@ impl LogRecordBuilder { } /// Assign severity number - pub fn with_severity_number(self, severity: Severity) -> Self { + pub fn with_severity_number(self, severity: Severity) -> Self { Self { record: LogRecord { severity_number: Some(severity), @@ -303,14 +303,15 @@ impl LogRecordBuilder { } /// Set a single attribute for this record - pub fn with_attribute(mut self, key: T, value: Any) -> Self + pub fn with_attribute(mut self, key: K, value: V) -> Self where - T: Into>, + K: Into>, + V: Into, { if let Some(ref mut map) = self.record.attributes { - map.insert(key.into(), value); + map.insert(key.into(), value.into()); } else { - self.record.attributes = Some(BTreeMap::from([(key.into(), value)])); + self.record.attributes = Some(BTreeMap::from([(key.into(), value.into())])); } self From 4ebe135e638fc136c7c64a31a768d9b2cac3d286 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Tue, 15 Mar 2022 14:44:43 +0530 Subject: [PATCH 12/51] Enforce LogProcessor is Send + Sync. --- opentelemetry-sdk/src/log/log_processor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/log/log_processor.rs b/opentelemetry-sdk/src/log/log_processor.rs index f5e705a3d4..51ccf28783 100644 --- a/opentelemetry-sdk/src/log/log_processor.rs +++ b/opentelemetry-sdk/src/log/log_processor.rs @@ -21,7 +21,7 @@ use std::{ /// The interface for plugging into a [`LogEmitter`]. /// /// [`LogEmitter`]: crate::log::LogEmitter -pub trait LogProcessor: Debug { +pub trait LogProcessor: Send + Sync + Debug { /// Called when a log record is ready to processed and exported. fn emit(&self, data: ResourceLog); /// Force the logs lying in the cache to be exported. From d0016f140f6ace4557411eef36cd906fa809b67d Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Tue, 15 Mar 2022 14:46:10 +0530 Subject: [PATCH 13/51] export: Use the correct variables. --- opentelemetry-sdk/src/export/log/stdout.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/opentelemetry-sdk/src/export/log/stdout.rs b/opentelemetry-sdk/src/export/log/stdout.rs index 0e44c07b89..81828d6b4f 100644 --- a/opentelemetry-sdk/src/export/log/stdout.rs +++ b/opentelemetry-sdk/src/export/log/stdout.rs @@ -130,14 +130,14 @@ where { /// Export spans to stdout async fn export(&mut self, batch: Vec) -> ExportResult { - for span in batch { + for log in batch { if self.pretty_print { self.writer - .write_all(format!("{:#?}\n", span).as_bytes()) + .write_all(format!("{:#?}\n", log).as_bytes()) .map_err(|err| LogError::ExportFailed(Box::new(Error::from(err))))?; } else { self.writer - .write_all(format!("{:?}\n", span).as_bytes()) + .write_all(format!("{:?}\n", log).as_bytes()) .map_err(|err| LogError::ExportFailed(Box::new(Error::from(err))))?; } } From 10c69c26d865fbabed45caa37d4684670e1e6bba Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Tue, 15 Mar 2022 14:46:32 +0530 Subject: [PATCH 14/51] LogEmitterProvider: Enable Clone. Add shutdown, try_shutdown. --- opentelemetry-sdk/src/log/log_emitter.rs | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/log/log_emitter.rs b/opentelemetry-sdk/src/log/log_emitter.rs index 3d25f4a0a7..3efa19f0f8 100644 --- a/opentelemetry-sdk/src/log/log_emitter.rs +++ b/opentelemetry-sdk/src/log/log_emitter.rs @@ -11,7 +11,7 @@ use std::{ time::Duration, }; -#[derive(Debug)] +#[derive(Debug, Clone)] /// Creator for `LogEmitter` instances. pub struct LogEmitterProvider { inner: Arc, @@ -73,6 +73,24 @@ impl LogEmitterProvider { .map(|processor| processor.force_flush()) .collect() } + + /// Shuts down this `LogEmitterProvider`, panicking on failure. + pub fn shutdown(&mut self) -> Vec> { + self.try_shutdown() + .expect("canont shutdown LogEmitterProvider when child LogEmitters are still active") + } + + /// Attempts to shutdown this `LogEmitterProvider`, succeeding only when + /// all cloned `LogEmitterProvider` values have been dropped. + pub fn try_shutdown(&mut self) -> Option>> { + Arc::get_mut(&mut self.inner).map(|inner| { + inner + .processors + .iter_mut() + .map(|processor| processor.shutdown()) + .collect() + }) + } } #[derive(Debug)] From 2f0ae2c38c748448d855bd1e74698dd54a97f495 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Tue, 15 Mar 2022 18:28:38 +0530 Subject: [PATCH 15/51] Remove From implementation for serde_json values. --- opentelemetry-sdk/src/log/record.rs | 30 ----------------------------- 1 file changed, 30 deletions(-) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 98fbe19bbe..5a95bb3ea5 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -1,5 +1,4 @@ use opentelemetry_api::trace::{SpanContext, SpanId, TraceContextExt, TraceFlags, TraceId}; -use serde_json::{Number, Value}; use std::{borrow::Cow, collections::BTreeMap, time::SystemTime}; #[derive(Debug, Clone, Default)] @@ -104,35 +103,6 @@ impl> From> for Any { } } -impl From for Any { - fn from(val: Value) -> Any { - match val { - Value::Null => Any::String("".to_string()), - Value::Bool(b) => Any::Boolean(b), - Value::Number(number) => number.into(), - Value::String(string) => string.into(), - Value::Array(vec) => vec.into(), - Value::Object(map) => Any::Map( - map.into_iter() - .map(|(key, value)| (key.into(), value.into())) - .collect(), - ), - } - } -} - -impl From for Any { - fn from(val: Number) -> Any { - if val.is_u64() { - Any::Int(val.as_u64().unwrap() as i64) - } else if val.is_i64() { - Any::Int(val.as_i64().unwrap()) - } else { - Any::Double(val.as_f64().unwrap()) - } - } -} - /// A normalized severity value. #[derive(Debug, Copy, Clone)] pub enum Severity { From ecf13cf5bea67bcc00a84ff917614cf456e128ff Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 19 Mar 2022 13:20:11 +0530 Subject: [PATCH 16/51] Fix typo. --- opentelemetry-sdk/src/log/record.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 5a95bb3ea5..8533998b84 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -3,7 +3,7 @@ use std::{borrow::Cow, collections::BTreeMap, time::SystemTime}; #[derive(Debug, Clone, Default)] #[non_exhaustive] -/// LogRecord represents all data carrier by a log record, and +/// LogRecord represents all data carried by a log record, and /// is provided to `LogExporter`s as input. pub struct LogRecord { /// Record timestamp From 5c1b0b757c894889cb69ab156f6a96162a925fa5 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Tue, 22 Mar 2022 11:25:06 +0530 Subject: [PATCH 17/51] Add Default impl for LogRecordBuilder. --- opentelemetry-sdk/src/log/record.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 8533998b84..1aff57126b 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -292,3 +292,9 @@ impl LogRecordBuilder { self.record } } + +impl Default for LogRecordBuilder { + fn default() -> Self { + Self::new() + } +} From 092376fe26ba8fd23091d65d03526b7f03fac50d Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Fri, 25 Mar 2022 12:28:36 +0530 Subject: [PATCH 18/51] Update to work with opentelemetry-proto v0.14.0. --- opentelemetry-otlp/src/log.rs | 4 +- opentelemetry-proto/src/proto/grpcio/logs.rs | 321 ++++++++++++++---- .../src/proto/grpcio/logs_service.rs | 5 +- opentelemetry-proto/src/transform/logs.rs | 45 ++- opentelemetry-sdk/src/export/log/mod.rs | 10 +- opentelemetry-sdk/src/export/log/stdout.rs | 4 +- opentelemetry-sdk/src/log/log_emitter.rs | 4 +- opentelemetry-sdk/src/log/log_processor.rs | 30 +- opentelemetry-sdk/src/log/record.rs | 32 +- 9 files changed, 329 insertions(+), 126 deletions(-) diff --git a/opentelemetry-otlp/src/log.rs b/opentelemetry-otlp/src/log.rs index 8cbeb2bfc1..2a9e873ae9 100644 --- a/opentelemetry-otlp/src/log.rs +++ b/opentelemetry-otlp/src/log.rs @@ -56,7 +56,7 @@ use std::{ use opentelemetry::{ log::LogError, - sdk::{self, export::log::ResourceLog, log::LogRuntime}, + sdk::{self, export::log::LogData, log::LogRuntime}, }; impl OtlpPipeline { @@ -292,7 +292,7 @@ impl LogExporter { #[async_trait] impl opentelemetry::sdk::export::log::LogExporter for LogExporter { - async fn export(&mut self, batch: Vec) -> opentelemetry::log::LogResult<()> { + async fn export(&mut self, batch: Vec) -> opentelemetry::log::LogResult<()> { match self { #[cfg(feature = "grpc-sys")] LogExporter::Grpcio { diff --git a/opentelemetry-proto/src/proto/grpcio/logs.rs b/opentelemetry-proto/src/proto/grpcio/logs.rs index ea9cf72748..f334dd1340 100644 --- a/opentelemetry-proto/src/proto/grpcio/logs.rs +++ b/opentelemetry-proto/src/proto/grpcio/logs.rs @@ -23,6 +23,175 @@ /// of protobuf runtime. // const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_27_1; +#[derive(PartialEq,Clone,Default)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub struct LogsData { + // message fields + pub resource_logs: ::protobuf::RepeatedField, + // special fields + #[cfg_attr(feature = "with-serde", serde(skip))] + pub unknown_fields: ::protobuf::UnknownFields, + #[cfg_attr(feature = "with-serde", serde(skip))] + pub cached_size: ::protobuf::CachedSize, +} + +impl<'a> ::std::default::Default for &'a LogsData { + fn default() -> &'a LogsData { + ::default_instance() + } +} + +impl LogsData { + pub fn new() -> LogsData { + ::std::default::Default::default() + } + + // repeated .opentelemetry.proto.logs.v1.ResourceLogs resource_logs = 1; + + + pub fn get_resource_logs(&self) -> &[ResourceLogs] { + &self.resource_logs + } + pub fn clear_resource_logs(&mut self) { + self.resource_logs.clear(); + } + + // Param is passed by value, moved + pub fn set_resource_logs(&mut self, v: ::protobuf::RepeatedField) { + self.resource_logs = v; + } + + // Mutable pointer to the field. + pub fn mut_resource_logs(&mut self) -> &mut ::protobuf::RepeatedField { + &mut self.resource_logs + } + + // Take field + pub fn take_resource_logs(&mut self) -> ::protobuf::RepeatedField { + ::std::mem::replace(&mut self.resource_logs, ::protobuf::RepeatedField::new()) + } +} + +impl ::protobuf::Message for LogsData { + fn is_initialized(&self) -> bool { + for v in &self.resource_logs { + if !v.is_initialized() { + return false; + } + }; + true + } + + fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { + while !is.eof()? { + let (field_number, wire_type) = is.read_tag_unpack()?; + match field_number { + 1 => { + ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.resource_logs)?; + }, + _ => { + ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; + }, + }; + } + ::std::result::Result::Ok(()) + } + + // Compute sizes of nested messages + #[allow(unused_variables)] + fn compute_size(&self) -> u32 { + let mut my_size = 0; + for value in &self.resource_logs { + let len = value.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + }; + my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); + self.cached_size.set(my_size); + my_size + } + + fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + for v in &self.resource_logs { + os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + }; + os.write_unknown_fields(self.get_unknown_fields())?; + ::std::result::Result::Ok(()) + } + + fn get_cached_size(&self) -> u32 { + self.cached_size.get() + } + + fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { + &self.unknown_fields + } + + fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { + &mut self.unknown_fields + } + + fn as_any(&self) -> &dyn (::std::any::Any) { + self as &dyn (::std::any::Any) + } + fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { + self as &mut dyn (::std::any::Any) + } + fn into_any(self: ::std::boxed::Box) -> ::std::boxed::Box { + self + } + + fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { + Self::descriptor_static() + } + + fn new() -> LogsData { + LogsData::new() + } + + fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + let mut fields = ::std::vec::Vec::new(); + fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "resource_logs", + |m: &LogsData| { &m.resource_logs }, + |m: &mut LogsData| { &mut m.resource_logs }, + )); + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "LogsData", + fields, + file_descriptor_proto() + ) + }) + } + + fn default_instance() -> &'static LogsData { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(LogsData::new) + } +} + +impl ::protobuf::Clear for LogsData { + fn clear(&mut self) { + self.resource_logs.clear(); + self.unknown_fields.clear(); + } +} + +impl ::std::fmt::Debug for LogsData { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::protobuf::text_format::fmt(self, f) + } +} + +impl ::protobuf::reflect::ProtobufValue for LogsData { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Message(self) + } +} + #[derive(PartialEq,Clone,Default)] #[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] pub struct ResourceLogs { @@ -296,7 +465,7 @@ impl ::protobuf::reflect::ProtobufValue for ResourceLogs { pub struct InstrumentationLibraryLogs { // message fields pub instrumentation_library: ::protobuf::SingularPtrField, - pub logs: ::protobuf::RepeatedField, + pub log_records: ::protobuf::RepeatedField, pub schema_url: ::std::string::String, // special fields #[cfg_attr(feature = "with-serde", serde(skip))] @@ -349,29 +518,29 @@ impl InstrumentationLibraryLogs { self.instrumentation_library.take().unwrap_or_else(|| super::common::InstrumentationLibrary::new()) } - // repeated .opentelemetry.proto.logs.v1.LogRecord logs = 2; + // repeated .opentelemetry.proto.logs.v1.LogRecord log_records = 2; - pub fn get_logs(&self) -> &[LogRecord] { - &self.logs + pub fn get_log_records(&self) -> &[LogRecord] { + &self.log_records } - pub fn clear_logs(&mut self) { - self.logs.clear(); + pub fn clear_log_records(&mut self) { + self.log_records.clear(); } // Param is passed by value, moved - pub fn set_logs(&mut self, v: ::protobuf::RepeatedField) { - self.logs = v; + pub fn set_log_records(&mut self, v: ::protobuf::RepeatedField) { + self.log_records = v; } // Mutable pointer to the field. - pub fn mut_logs(&mut self) -> &mut ::protobuf::RepeatedField { - &mut self.logs + pub fn mut_log_records(&mut self) -> &mut ::protobuf::RepeatedField { + &mut self.log_records } // Take field - pub fn take_logs(&mut self) -> ::protobuf::RepeatedField { - ::std::mem::replace(&mut self.logs, ::protobuf::RepeatedField::new()) + pub fn take_log_records(&mut self) -> ::protobuf::RepeatedField { + ::std::mem::replace(&mut self.log_records, ::protobuf::RepeatedField::new()) } // string schema_url = 3; @@ -408,7 +577,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { return false; } }; - for v in &self.logs { + for v in &self.log_records { if !v.is_initialized() { return false; } @@ -424,7 +593,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.instrumentation_library)?; }, 2 => { - ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.logs)?; + ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.log_records)?; }, 3 => { ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.schema_url)?; @@ -445,7 +614,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { let len = v.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; } - for value in &self.logs { + for value in &self.log_records { let len = value.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; }; @@ -463,7 +632,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { os.write_raw_varint32(v.get_cached_size())?; v.write_to_with_cached_sizes(os)?; } - for v in &self.logs { + for v in &self.log_records { os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?; os.write_raw_varint32(v.get_cached_size())?; v.write_to_with_cached_sizes(os)?; @@ -515,9 +684,9 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { |m: &mut InstrumentationLibraryLogs| { &mut m.instrumentation_library }, )); fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( - "logs", - |m: &InstrumentationLibraryLogs| { &m.logs }, - |m: &mut InstrumentationLibraryLogs| { &mut m.logs }, + "log_records", + |m: &InstrumentationLibraryLogs| { &m.log_records }, + |m: &mut InstrumentationLibraryLogs| { &mut m.log_records }, )); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( "schema_url", @@ -541,7 +710,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { impl ::protobuf::Clear for InstrumentationLibraryLogs { fn clear(&mut self) { self.instrumentation_library.clear(); - self.logs.clear(); + self.log_records.clear(); self.schema_url.clear(); self.unknown_fields.clear(); } @@ -564,6 +733,7 @@ impl ::protobuf::reflect::ProtobufValue for InstrumentationLibraryLogs { pub struct LogRecord { // message fields pub time_unix_nano: u64, + pub observed_time_unix_nano: u64, pub severity_number: SeverityNumber, pub severity_text: ::std::string::String, pub name: ::std::string::String, @@ -606,6 +776,21 @@ impl LogRecord { self.time_unix_nano = v; } + // fixed64 observed_time_unix_nano = 11; + + + pub fn get_observed_time_unix_nano(&self) -> u64 { + self.observed_time_unix_nano + } + pub fn clear_observed_time_unix_nano(&mut self) { + self.observed_time_unix_nano = 0; + } + + // Param is passed by value, moved + pub fn set_observed_time_unix_nano(&mut self, v: u64) { + self.observed_time_unix_nano = v; + } + // .opentelemetry.proto.logs.v1.SeverityNumber severity_number = 2; @@ -840,6 +1025,13 @@ impl ::protobuf::Message for LogRecord { let tmp = is.read_fixed64()?; self.time_unix_nano = tmp; }, + 11 => { + if wire_type != ::protobuf::wire_format::WireTypeFixed64 { + return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); + } + let tmp = is.read_fixed64()?; + self.observed_time_unix_nano = tmp; + }, 2 => { ::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.severity_number, 2, &mut self.unknown_fields)? }, @@ -890,6 +1082,9 @@ impl ::protobuf::Message for LogRecord { if self.time_unix_nano != 0 { my_size += 9; } + if self.observed_time_unix_nano != 0 { + my_size += 9; + } if self.severity_number != SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED { my_size += ::protobuf::rt::enum_size(2, self.severity_number); } @@ -928,6 +1123,9 @@ impl ::protobuf::Message for LogRecord { if self.time_unix_nano != 0 { os.write_fixed64(1, self.time_unix_nano)?; } + if self.observed_time_unix_nano != 0 { + os.write_fixed64(11, self.observed_time_unix_nano)?; + } if self.severity_number != SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED { os.write_enum(2, ::protobuf::ProtobufEnum::value(&self.severity_number))?; } @@ -1002,6 +1200,11 @@ impl ::protobuf::Message for LogRecord { |m: &LogRecord| { &m.time_unix_nano }, |m: &mut LogRecord| { &mut m.time_unix_nano }, )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeFixed64>( + "observed_time_unix_nano", + |m: &LogRecord| { &m.observed_time_unix_nano }, + |m: &mut LogRecord| { &mut m.observed_time_unix_nano }, + )); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum>( "severity_number", |m: &LogRecord| { &m.severity_number }, @@ -1064,6 +1267,7 @@ impl ::protobuf::Message for LogRecord { impl ::protobuf::Clear for LogRecord { fn clear(&mut self) { self.time_unix_nano = 0; + self.observed_time_unix_nano = 0; self.severity_number = SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED; self.severity_text.clear(); self.name.clear(); @@ -1263,45 +1467,48 @@ impl ::protobuf::reflect::ProtobufValue for LogRecordFlags { static file_descriptor_proto_data: &'static [u8] = b"\ \n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.log\ s.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/pr\ - oto/resource/v1/resource.proto\"\xef\x01\n\x0cResourceLogs\x12E\n\x08res\ - ource\x18\x01\x20\x01(\x0b2).opentelemetry.proto.resource.v1.ResourceR\ - \x08resource\x12y\n\x1cinstrumentation_library_logs\x18\x02\x20\x03(\x0b\ - 27.opentelemetry.proto.logs.v1.InstrumentationLibraryLogsR\x1ainstrument\ - ationLibraryLogs\x12\x1d\n\nschema_url\x18\x03\x20\x01(\tR\tschemaUrl\"\ - \xe7\x01\n\x1aInstrumentationLibraryLogs\x12n\n\x17instrumentation_libra\ - ry\x18\x01\x20\x01(\x0b25.opentelemetry.proto.common.v1.InstrumentationL\ - ibraryR\x16instrumentationLibrary\x12:\n\x04logs\x18\x02\x20\x03(\x0b2&.\ - opentelemetry.proto.logs.v1.LogRecordR\x04logs\x12\x1d\n\nschema_url\x18\ - \x03\x20\x01(\tR\tschemaUrl\"\xca\x03\n\tLogRecord\x12$\n\x0etime_unix_n\ - ano\x18\x01\x20\x01(\x06R\x0ctimeUnixNano\x12T\n\x0fseverity_number\x18\ + oto/resource/v1/resource.proto\"Z\n\x08LogsData\x12N\n\rresource_logs\ + \x18\x01\x20\x03(\x0b2).opentelemetry.proto.logs.v1.ResourceLogsR\x0cres\ + ourceLogs\"\xef\x01\n\x0cResourceLogs\x12E\n\x08resource\x18\x01\x20\x01\ + (\x0b2).opentelemetry.proto.resource.v1.ResourceR\x08resource\x12y\n\x1c\ + instrumentation_library_logs\x18\x02\x20\x03(\x0b27.opentelemetry.proto.\ + logs.v1.InstrumentationLibraryLogsR\x1ainstrumentationLibraryLogs\x12\ + \x1d\n\nschema_url\x18\x03\x20\x01(\tR\tschemaUrl\"\xf4\x01\n\x1aInstrum\ + entationLibraryLogs\x12n\n\x17instrumentation_library\x18\x01\x20\x01(\ + \x0b25.opentelemetry.proto.common.v1.InstrumentationLibraryR\x16instrume\ + ntationLibrary\x12G\n\x0blog_records\x18\x02\x20\x03(\x0b2&.opentelemetr\ + y.proto.logs.v1.LogRecordR\nlogRecords\x12\x1d\n\nschema_url\x18\x03\x20\ + \x01(\tR\tschemaUrl\"\x85\x04\n\tLogRecord\x12$\n\x0etime_unix_nano\x18\ + \x01\x20\x01(\x06R\x0ctimeUnixNano\x125\n\x17observed_time_unix_nano\x18\ + \x0b\x20\x01(\x06R\x14observedTimeUnixNano\x12T\n\x0fseverity_number\x18\ \x02\x20\x01(\x0e2+.opentelemetry.proto.logs.v1.SeverityNumberR\x0esever\ ityNumber\x12#\n\rseverity_text\x18\x03\x20\x01(\tR\x0cseverityText\x12\ - \x12\n\x04name\x18\x04\x20\x01(\tR\x04name\x12;\n\x04body\x18\x05\x20\ - \x01(\x0b2'.opentelemetry.proto.common.v1.AnyValueR\x04body\x12G\n\nattr\ - ibutes\x18\x06\x20\x03(\x0b2'.opentelemetry.proto.common.v1.KeyValueR\na\ - ttributes\x128\n\x18dropped_attributes_count\x18\x07\x20\x01(\rR\x16drop\ - pedAttributesCount\x12\x14\n\x05flags\x18\x08\x20\x01(\x07R\x05flags\x12\ - \x19\n\x08trace_id\x18\t\x20\x01(\x0cR\x07traceId\x12\x17\n\x07span_id\ - \x18\n\x20\x01(\x0cR\x06spanId*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\ - \x1bSEVERITY_NUMBER_UNSPECIFIED\x10\0\x12\x19\n\x15SEVERITY_NUMBER_TRACE\ - \x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERI\ - TY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\ - \x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEB\ - UG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEV\ - ERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\ - \x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\ - \x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERIT\ - Y_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\ - \x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\ - \x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NU\ - MBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\ - \n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\ - \x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERI\ - TY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*X\n\ - \x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAG_UNSPECIFIED\x10\0\x12%\n\ - \x20LOG_RECORD_FLAG_TRACE_FLAGS_MASK\x10\xff\x01Bk\n\x1eio.opentelemetry\ - .proto.logs.v1B\tLogsProtoP\x01Z = ::protobuf::rt::LazyV2::INIT; diff --git a/opentelemetry-proto/src/proto/grpcio/logs_service.rs b/opentelemetry-proto/src/proto/grpcio/logs_service.rs index 365e7bb2e8..782eb25d5c 100644 --- a/opentelemetry-proto/src/proto/grpcio/logs_service.rs +++ b/opentelemetry-proto/src/proto/grpcio/logs_service.rs @@ -319,9 +319,8 @@ static file_descriptor_proto_data: &'static [u8] = b"\ \x19ExportLogsServiceResponse2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\ \x06Export\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceR\ equest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRespo\ - nse\"\0B\x86\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServ\ - iceProtoP\x01ZFgithub.com/open-telemetry/opentelemetry-proto/gen/go/coll\ - ector/logs/v1b\x06proto3\ + nse\"\0Bp\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProt\ + oP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1b\x06proto3\ "; static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index ac8e7b47ab..11dc27c8de 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -23,7 +23,7 @@ pub mod tonic { match value { Any::Double(f) => Value::DoubleValue(f), Any::Int(i) => Value::IntValue(i), - Any::String(s) => Value::StringValue(s), + Any::String(s) => Value::StringValue(s.into()), Any::Boolean(b) => Value::BoolValue(b), Any::ListAny(v) => Value::ArrayValue(ArrayValue { values: v @@ -97,14 +97,17 @@ pub mod tonic { let trace_context = log_record.trace_context.as_ref(); let record = LogRecord { - time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or(0), + time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or_default(), + observed_time_unix_nano: log_record + .observed_timestamp + .map(to_nanos) + .unwrap_or_default(), severity_number: log_record .severity_number .map(SeverityNumber::from) .map(Into::into) .unwrap_or_default(), severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), - name: log_record.name.map(Into::into).unwrap_or(String::from("")), body: Some(AnyValue { value: log_record.body.map(Into::into), }), @@ -126,31 +129,30 @@ pub mod tonic { trace_id: trace_context .map(|ctx| ctx.trace_id.to_bytes().to_vec()) .unwrap_or_default(), + ..Default::default() }; record } } - impl From for ResourceLogs { - fn from(resource_log: opentelemetry::sdk::export::log::ResourceLog) -> Self { + impl From for ResourceLogs { + fn from(log_data: opentelemetry::sdk::export::log::LogData) -> Self { ResourceLogs { resource: Some(Resource { - attributes: resource_attributes( - resource_log.resource.as_ref().map(AsRef::as_ref), - ) - .0, + attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) + .0, dropped_attributes_count: 0, }), schema_url: "".to_string(), instrumentation_library_logs: vec![InstrumentationLibraryLogs { - schema_url: resource_log + schema_url: log_data .instrumentation .schema_url .clone() .map(Into::into) .unwrap_or_default(), - instrumentation_library: Some(resource_log.instrumentation.into()), - logs: vec![resource_log.record.into()], + instrumentation_library: Some(log_data.instrumentation.into()), + log_records: vec![log_data.record.into()], }], } } @@ -179,7 +181,7 @@ pub mod grpcio { match value { Any::Double(f) => AnyValue_oneof_value::double_value(f), Any::Int(i) => AnyValue_oneof_value::int_value(i), - Any::String(s) => AnyValue_oneof_value::string_value(s), + Any::String(s) => AnyValue_oneof_value::string_value(s.into()), Any::Boolean(b) => AnyValue_oneof_value::bool_value(b), Any::ListAny(v) => AnyValue_oneof_value::array_value(ArrayValue { values: RepeatedField::from_vec( @@ -269,7 +271,6 @@ pub mod grpcio { .map(Into::into) .unwrap_or_default(), severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), - name: log_record.name.map(Into::into).unwrap_or(String::from("")), body: SingularPtrField::some(AnyValue { value: log_record.body.map(Into::into), ..Default::default() @@ -299,30 +300,28 @@ pub mod grpcio { } } - impl From for ResourceLogs { - fn from(resource_log: opentelemetry::sdk::export::log::ResourceLog) -> Self { + impl From for ResourceLogs { + fn from(log_data: opentelemetry::sdk::export::log::LogData) -> Self { ResourceLogs { resource: SingularPtrField::some(Resource { - attributes: resource_attributes( - resource_log.resource.as_ref().map(AsRef::as_ref), - ) - .0, + attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) + .0, dropped_attributes_count: 0, ..Default::default() }), schema_url: "".to_string(), instrumentation_library_logs: RepeatedField::from_vec(vec![ InstrumentationLibraryLogs { - schema_url: resource_log + schema_url: log_data .instrumentation .schema_url .clone() .map(Into::into) .unwrap_or_default(), instrumentation_library: SingularPtrField::some( - resource_log.instrumentation.into(), + log_data.instrumentation.into(), ), - logs: RepeatedField::from_vec(vec![resource_log.record.into()]), + log_records: RepeatedField::from_vec(vec![log_data.record.into()]), ..Default::default() }, ]), diff --git a/opentelemetry-sdk/src/export/log/mod.rs b/opentelemetry-sdk/src/export/log/mod.rs index 53fa37debb..a82a0d893e 100644 --- a/opentelemetry-sdk/src/export/log/mod.rs +++ b/opentelemetry-sdk/src/export/log/mod.rs @@ -11,21 +11,21 @@ pub mod stdout; #[async_trait] pub trait LogExporter: Send + Debug { /// Exports a batch of `ResourceLogs`. - async fn export(&mut self, batch: Vec) -> LogResult<()>; + async fn export(&mut self, batch: Vec) -> LogResult<()>; /// Shuts down the expoter. fn shutdown(&mut self) {} } -/// `ResourceLog` associates a [`LogRecord`] with a [`Resource`] and +/// `LogData` associates a [`LogRecord`] with a [`Resource`] and /// [`InstrumentationLibrary`]. #[derive(Debug)] #[non_exhaustive] -pub struct ResourceLog { +pub struct LogData { /// Log record pub record: LogRecord, - /// Resource for the emitter who produced this `ResourceLog`. + /// Resource for the emitter who produced this `LogData`. pub resource: Option>, - /// Instrumentation details for the emitter who produced this `ResourceLog`. + /// Instrumentation details for the emitter who produced this `LogData`. pub instrumentation: InstrumentationLibrary, } diff --git a/opentelemetry-sdk/src/export/log/stdout.rs b/opentelemetry-sdk/src/export/log/stdout.rs index 81828d6b4f..590c7a2542 100644 --- a/opentelemetry-sdk/src/export/log/stdout.rs +++ b/opentelemetry-sdk/src/export/log/stdout.rs @@ -28,7 +28,7 @@ //! } //! ``` use crate::export::{ - log::{ExportResult, LogExporter, ResourceLog}, + log::{ExportResult, LogData, LogExporter}, ExportError, }; use async_trait::async_trait; @@ -129,7 +129,7 @@ where W: Write + Debug + Send + 'static, { /// Export spans to stdout - async fn export(&mut self, batch: Vec) -> ExportResult { + async fn export(&mut self, batch: Vec) -> ExportResult { for log in batch { if self.pretty_print { self.writer diff --git a/opentelemetry-sdk/src/log/log_emitter.rs b/opentelemetry-sdk/src/log/log_emitter.rs index 3efa19f0f8..a9929cd92d 100644 --- a/opentelemetry-sdk/src/log/log_emitter.rs +++ b/opentelemetry-sdk/src/log/log_emitter.rs @@ -1,6 +1,6 @@ use super::{BatchLogProcessor, Config, LogProcessor, LogRecord, LogRuntime, SimpleLogProcessor}; use crate::{ - export::log::{LogExporter, ResourceLog}, + export::log::{LogData, LogExporter}, resource::{EnvResourceDetector, SdkProvidedResourceDetector}, Resource, }; @@ -225,7 +225,7 @@ impl LogEmitter { let config = provider.config(); for processor in provider.log_processors() { - let data = ResourceLog { + let data = LogData { record: record.clone(), resource: config.resource.clone(), instrumentation: self.instrumentation_lib.clone(), diff --git a/opentelemetry-sdk/src/log/log_processor.rs b/opentelemetry-sdk/src/log/log_processor.rs index 51ccf28783..7875ee5354 100644 --- a/opentelemetry-sdk/src/log/log_processor.rs +++ b/opentelemetry-sdk/src/log/log_processor.rs @@ -1,6 +1,6 @@ use super::LogRuntime; use crate::{ - export::log::{ExportResult, LogExporter, ResourceLog}, + export::log::{ExportResult, LogData, LogExporter}, log::TrySend, }; use futures_channel::oneshot; @@ -23,7 +23,7 @@ use std::{ /// [`LogEmitter`]: crate::log::LogEmitter pub trait LogProcessor: Send + Sync + Debug { /// Called when a log record is ready to processed and exported. - fn emit(&self, data: ResourceLog); + fn emit(&self, data: LogData); /// Force the logs lying in the cache to be exported. fn force_flush(&self) -> LogResult<()>; /// Shuts down the processor. @@ -38,7 +38,7 @@ pub trait LogProcessor: Send + Sync + Debug { /// emitted. If you find this limiting, consider the batch processor instead. #[derive(Debug)] pub struct SimpleLogProcessor { - sender: crossbeam_channel::Sender>, + sender: crossbeam_channel::Sender>, shutdown: crossbeam_channel::Receiver<()>, } @@ -74,7 +74,7 @@ impl SimpleLogProcessor { } impl LogProcessor for SimpleLogProcessor { - fn emit(&self, data: ResourceLog) { + fn emit(&self, data: LogData) { if let Err(err) = self.sender.send(Some(data)) { global::handle_error(LogError::from(format!("error processing log {:?}", err))); } @@ -114,7 +114,7 @@ impl Debug for BatchLogProcessor { } impl LogProcessor for BatchLogProcessor { - fn emit(&self, data: ResourceLog) { + fn emit(&self, data: LogData) { let result = self.message_sender.try_send(BatchMessage::ExportLog(data)); if let Err(err) = result { @@ -154,21 +154,21 @@ impl BatchLogProcessor { // Spawn worker process via user-defined spawn function. runtime.spawn(Box::pin(async move { - let mut spans = Vec::new(); + let mut logs = Vec::new(); let mut messages = Box::pin(stream::select(message_receiver, ticker)); while let Some(message) = messages.next().await { match message { // Span has finished, add to buffer of pending spans. - BatchMessage::ExportLog(span) => { - spans.push(span); + BatchMessage::ExportLog(log) => { + logs.push(log); - if spans.len() == config.max_export_batch_size { + if logs.len() == config.max_export_batch_size { let result = export_with_timeout( config.max_export_timeout, exporter.as_mut(), &timeout_runtime, - spans.split_off(0), + logs.split_off(0), ) .await; @@ -177,13 +177,13 @@ impl BatchLogProcessor { } } } - // Span batch interval time reached or a force flush has been invoked, export current spans. + // Log batch interval time reached or a force flush has been invoked, export current spans. BatchMessage::Flush(res_channel) => { let result = export_with_timeout( config.max_export_timeout, exporter.as_mut(), &timeout_runtime, - spans.split_off(0), + logs.split_off(0), ) .await; @@ -204,7 +204,7 @@ impl BatchLogProcessor { config.max_export_timeout, exporter.as_mut(), &timeout_runtime, - spans.split_off(0), + logs.split_off(0), ) .await; @@ -244,7 +244,7 @@ async fn export_with_timeout( time_out: Duration, exporter: &mut E, runtime: &R, - batch: Vec, + batch: Vec, ) -> ExportResult where R: LogRuntime, @@ -359,7 +359,7 @@ where #[derive(Debug)] pub enum BatchMessage { /// Export logs, usually called when the log is emitted. - ExportLog(ResourceLog), + ExportLog(LogData), /// Flush the current buffer to the backend, it can be triggered by /// pre configured interval or a call to `force_push` function. Flush(Option>), diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/log/record.rs index 1aff57126b..c0958c6cf8 100644 --- a/opentelemetry-sdk/src/log/record.rs +++ b/opentelemetry-sdk/src/log/record.rs @@ -9,6 +9,9 @@ pub struct LogRecord { /// Record timestamp pub timestamp: Option, + /// Timestamp for when the record was observed by OpenTelemetry + pub observed_timestamp: Option, + /// Trace context for logs associated with spans pub trace_context: Option, @@ -17,8 +20,6 @@ pub struct LogRecord { /// The corresponding severity value, normalized pub severity_number: Option, - /// Record name - pub name: Option>, /// Record body pub body: Option, @@ -56,7 +57,7 @@ pub enum Any { /// A double value Double(f64), /// A string value - String(String), + String(Cow<'static, str>), /// A boolean value Boolean(bool), /// A byte array @@ -91,7 +92,7 @@ impl_trivial_from!(f32, Any::Double); impl_trivial_from!(String, Any::String); impl_trivial_from!(Cow<'static, str>, Any::String); -impl_trivial_from!(&str, Any::String); +impl_trivial_from!(&'static str, Any::String); impl_trivial_from!(bool, Any::Boolean); @@ -180,6 +181,16 @@ impl LogRecordBuilder { } } + /// Assign observed timestamp + pub fn with_observed_timestamp(self, timestamp: SystemTime) -> Self { + Self { + record: LogRecord { + observed_timestamp: Some(timestamp), + ..self.record + }, + } + } + /// Assign the record's [`TraceContext`] pub fn with_span_context(self, span_context: &SpanContext) -> Self { Self { @@ -229,19 +240,6 @@ impl LogRecordBuilder { } } - /// Assign name - pub fn with_name(self, name: T) -> Self - where - T: Into>, - { - Self { - record: LogRecord { - name: Some(name.into()), - ..self.record - }, - } - } - /// Assign body pub fn with_body(self, body: Any) -> Self { Self { From 3eb34cddc8f600a6e4c8583bf72fef5acb591529 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 28 Mar 2022 16:31:24 +0530 Subject: [PATCH 19/51] Remove tests. --- opentelemetry-sdk/src/log/runtime.rs | 167 --------------------------- 1 file changed, 167 deletions(-) diff --git a/opentelemetry-sdk/src/log/runtime.rs b/opentelemetry-sdk/src/log/runtime.rs index fb368f45da..edf9d59816 100644 --- a/opentelemetry-sdk/src/log/runtime.rs +++ b/opentelemetry-sdk/src/log/runtime.rs @@ -117,170 +117,3 @@ impl LogRuntime for AsyncStd { async_std::channel::bounded(capacity) } } - -#[cfg(test)] -// Note that all tests here should be marked as ignore so that it won't be picked up by default We -// need to run those tests one by one as the GlobalLogrProvider is a shared object between -// threads Use cargo test -- --ignored --test-threads=1 to run those tests. -mod tests { - #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] - use crate::log::LogRuntime; - #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] - use crate::runtime; - #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] - use opentelemetry_api::global::*; - use std::sync::Arc; - use std::{fmt::Debug, io::Write, sync::Mutex}; - - #[derive(Debug)] - struct AssertWriter { - buf: Arc>>, - } - - #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] - impl AssertWriter { - fn new() -> AssertWriter { - AssertWriter { - buf: Arc::new(Mutex::new(Vec::new())), - } - } - - fn len(&self) -> usize { - self.buf - .lock() - .expect("cannot acquire the lock of assert writer") - .len() - } - } - - impl Write for AssertWriter { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - let mut buffer = self - .buf - .lock() - .expect("cannot acquire the lock of assert writer"); - buffer.write(buf) - } - - fn flush(&mut self) -> std::io::Result<()> { - let mut buffer = self - .buf - .lock() - .expect("cannot acquire the lock of assert writer"); - buffer.flush() - } - } - - impl Clone for AssertWriter { - fn clone(&self) -> Self { - AssertWriter { - buf: self.buf.clone(), - } - } - } - - #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] - fn build_batch_log_emitter_provider( - assert_writer: AssertWriter, - runtime: R, - ) -> crate::log::LogEmitterProvider { - use crate::log::LogEmitterProvider; - let exporter = crate::export::log::stdout::Exporter::new(assert_writer, true); - LogEmitterProvider::builder() - .with_batch_exporter(exporter, runtime) - .build() - } - - #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] - fn build_simple_log_emitter_provider( - assert_writer: AssertWriter, - ) -> crate::log::LogEmitterProvider { - use crate::log::LogEmitterProvider; - let exporter = crate::export::log::stdout::Exporter::new(assert_writer, true); - LogEmitterProvider::builder() - .with_simple_exporter(exporter) - .build() - } - - #[cfg(any(feature = "rt-tokio", feature = "rt-tokio-current-thread"))] - async fn test_create_provider_in_tokio(runtime: R) -> AssertWriter { - use crate::log::LogRecord; - - let buffer = AssertWriter::new(); - let provider = build_batch_log_emitter_provider(buffer.clone(), runtime); - let emitter = provider.log_emitter("opentelemetery"); - - emitter.emit(LogRecord::default()); - - buffer - } - - // When using `tokio::spawn` to spawn the worker task in batch processor - // - // multiple -> no shut down -> not export - // multiple -> shut down -> export - // single -> no shutdown -> not export - // single -> shutdown -> hang forever - - // When using |fut| tokio::task::spawn_blocking(|| futures::executor::block_on(fut)) - // to spawn the worker task in batch processor - // - // multiple -> no shutdown -> hang forever - // multiple -> shut down -> export - // single -> shut down -> export - // single -> no shutdown -> hang forever - - // Test if the multiple thread tokio runtime could exit successfully when not force flushing logs - #[tokio::test(flavor = "multi_thread", worker_threads = 2)] - #[ignore = "requires --test-threads=1"] - #[cfg(feature = "rt-tokio")] - async fn test_create_provider_multiple_thread_tokio() { - let assert_writer = test_create_provider_in_tokio(runtime::Tokio).await; - assert_eq!(assert_writer.len(), 0); - } - - // Test if the multiple thread tokio runtime could exit successfully when force flushing logs - #[tokio::test(flavor = "multi_thread", worker_threads = 2)] - #[ignore = "requires --test-threads=1"] - #[cfg(feature = "rt-tokio")] - async fn test_create_provider_multiple_thread_tokio_shutdown() { - let assert_writer = test_create_provider_in_tokio(runtime::Tokio).await; - assert!(assert_writer.len() > 0); - } - - // Test use simple processor in single thread tokio runtime. - // Expected to see the logs being exported to buffer - #[tokio::test] - #[ignore = "requires --test-threads=1"] - #[cfg(feature = "rt-tokio")] - async fn test_create_provider_single_thread_tokio_with_simple_processor() { - use crate::log::LogRecord; - - let assert_writer = AssertWriter::new(); - let provider = build_simple_log_emitter_provider(assert_writer.clone()); - let emitter = provider.log_emitter("opentelemetry"); - - emitter.emit(LogRecord::default()); - - assert!(assert_writer.len() > 0); - } - - // Test if the single thread tokio runtime could exit successfully when not force flushing logs - #[tokio::test] - #[ignore = "requires --test-threads=1"] - #[cfg(feature = "rt-tokio-current-thread")] - async fn test_create_provider_single_thread_tokio() { - let assert_writer = test_create_provider_in_tokio(runtime::TokioCurrentThread).await; - assert_eq!(assert_writer.len(), 0) - } - - // Test if the single thread tokio runtime could exit successfully when force flushing logs - #[tokio::test] - #[ignore = "requires --test-threads=1"] - #[cfg(feature = "rt-tokio-current-thread")] - async fn test_create_provider_single_thread_tokio_shutdown() { - let assert_writer = test_create_provider_in_tokio(runtime::TokioCurrentThread).await; - shutdown_tracer_provider(); - assert!(assert_writer.len() > 0); - } -} From 0e0bbbf8d2cf3dadeb0f59e7aa853f042d62a3af Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Fri, 1 Apr 2022 12:33:50 +0530 Subject: [PATCH 20/51] Avoid using wildcard imports. --- opentelemetry-sdk/src/log/log_processor.rs | 2 +- opentelemetry-sdk/src/log/mod.rs | 13 ++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/opentelemetry-sdk/src/log/log_processor.rs b/opentelemetry-sdk/src/log/log_processor.rs index 7875ee5354..2a90c6e2c4 100644 --- a/opentelemetry-sdk/src/log/log_processor.rs +++ b/opentelemetry-sdk/src/log/log_processor.rs @@ -159,7 +159,7 @@ impl BatchLogProcessor { while let Some(message) = messages.next().await { match message { - // Span has finished, add to buffer of pending spans. + // Log has finished, add to buffer of pending logs. BatchMessage::ExportLog(log) => { logs.push(log); diff --git a/opentelemetry-sdk/src/log/mod.rs b/opentelemetry-sdk/src/log/mod.rs index d8a7ef3428..e47be2a63b 100644 --- a/opentelemetry-sdk/src/log/mod.rs +++ b/opentelemetry-sdk/src/log/mod.rs @@ -6,8 +6,11 @@ mod log_processor; mod record; mod runtime; -pub use config::*; -pub use log_emitter::*; -pub use log_processor::*; -pub use record::*; -pub use runtime::*; +pub use config::Config; +pub use log_emitter::{Builder, LogEmitter, LogEmitterProvider}; +pub use log_processor::{ + BatchConfig, BatchLogProcessor, BatchLogProcessorBuilder, BatchMessage, LogProcessor, + SimpleLogProcessor, +}; +pub use record::{Any, LogRecord, LogRecordBuilder, Severity, TraceContext}; +pub use runtime::{LogRuntime, TrySend}; From 22f5ab5e4cccfb26023657451b5ab4362600c0ea Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 13:23:15 +0530 Subject: [PATCH 21/51] Rename feature/module "log" to "logs". --- opentelemetry-api/Cargo.toml | 2 +- opentelemetry-api/src/global/error_handler.rs | 12 ++++++------ opentelemetry-api/src/lib.rs | 6 +++--- opentelemetry-api/src/{log => logs}/mod.rs | 0 opentelemetry-otlp/src/lib.rs | 6 +++--- opentelemetry-otlp/src/{log.rs => logs.rs} | 0 opentelemetry-sdk/src/export/{log => logs}/mod.rs | 6 +++--- opentelemetry-sdk/src/export/{log => logs}/stdout.rs | 12 ++++++------ opentelemetry-sdk/src/export/mod.rs | 6 +++--- opentelemetry-sdk/src/lib.rs | 6 +++--- opentelemetry-sdk/src/{log => logs}/config.rs | 0 opentelemetry-sdk/src/{log => logs}/log_emitter.rs | 4 ++-- opentelemetry-sdk/src/{log => logs}/log_processor.rs | 6 +++--- opentelemetry-sdk/src/{log => logs}/mod.rs | 0 opentelemetry-sdk/src/{log => logs}/record.rs | 0 opentelemetry-sdk/src/{log => logs}/runtime.rs | 4 ++-- opentelemetry/Cargo.toml | 2 +- 17 files changed, 36 insertions(+), 36 deletions(-) rename opentelemetry-api/src/{log => logs}/mod.rs (100%) rename opentelemetry-otlp/src/{log.rs => logs.rs} (100%) rename opentelemetry-sdk/src/export/{log => logs}/mod.rs (86%) rename opentelemetry-sdk/src/export/{log => logs}/stdout.rs (91%) rename opentelemetry-sdk/src/{log => logs}/config.rs (100%) rename opentelemetry-sdk/src/{log => logs}/log_emitter.rs (98%) rename opentelemetry-sdk/src/{log => logs}/log_processor.rs (99%) rename opentelemetry-sdk/src/{log => logs}/mod.rs (100%) rename opentelemetry-sdk/src/{log => logs}/record.rs (100%) rename opentelemetry-sdk/src/{log => logs}/runtime.rs (98%) diff --git a/opentelemetry-api/Cargo.toml b/opentelemetry-api/Cargo.toml index 1837e5b78f..5c20e0fbc2 100644 --- a/opentelemetry-api/Cargo.toml +++ b/opentelemetry-api/Cargo.toml @@ -29,4 +29,4 @@ default = ["trace"] trace = ["pin-project-lite"] metrics = ["fnv"] testing = ["trace"] -log = [] \ No newline at end of file +logs = [] \ No newline at end of file diff --git a/opentelemetry-api/src/global/error_handler.rs b/opentelemetry-api/src/global/error_handler.rs index fd11510483..290a8a2043 100644 --- a/opentelemetry-api/src/global/error_handler.rs +++ b/opentelemetry-api/src/global/error_handler.rs @@ -1,8 +1,8 @@ use std::sync::PoisonError; use std::sync::RwLock; -#[cfg(feature = "log")] -use crate::log::LogError; +#[cfg(feature = "logs")] +use crate::logs::LogError; #[cfg(feature = "metrics")] use crate::metrics::MetricsError; #[cfg(feature = "trace")] @@ -26,8 +26,8 @@ pub enum Error { /// An issue raised by the metrics module. Metric(#[from] MetricsError), - #[cfg(feature = "log")] - #[cfg_attr(docsrs, doc(cfg(feature = "log")))] + #[cfg(feature = "logs")] + #[cfg_attr(docsrs, doc(cfg(feature = "logs")))] #[error(transparent)] /// Failed to export logs. Log(#[from] LogError), @@ -58,8 +58,8 @@ pub fn handle_error>(err: T) { #[cfg(feature = "trace")] #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] Error::Trace(err) => eprintln!("OpenTelemetry trace error occurred. {}", err), - #[cfg(feature = "log")] - #[cfg_attr(docsrs, doc(cfg(feature = "log")))] + #[cfg(feature = "logs")] + #[cfg_attr(docsrs, doc(cfg(feature = "logs")))] Error::Log(err) => eprintln!("OpenTelemetry log error occurred. {}", err), Error::Other(err_msg) => eprintln!("OpenTelemetry error occurred. {}", err_msg), }, diff --git a/opentelemetry-api/src/lib.rs b/opentelemetry-api/src/lib.rs index ffed020fc0..45fecee53c 100644 --- a/opentelemetry-api/src/lib.rs +++ b/opentelemetry-api/src/lib.rs @@ -67,9 +67,9 @@ pub mod propagation; #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] pub mod trace; -#[cfg(feature = "log")] -#[cfg_attr(docsrs, doc(cfg(feature = "log")))] -pub mod log; +#[cfg(feature = "logs")] +#[cfg_attr(docsrs, doc(cfg(feature = "logs")))] +pub mod logs; #[doc(hidden)] #[cfg(any(feature = "metrics", feature = "trace"))] diff --git a/opentelemetry-api/src/log/mod.rs b/opentelemetry-api/src/logs/mod.rs similarity index 100% rename from opentelemetry-api/src/log/mod.rs rename to opentelemetry-api/src/logs/mod.rs diff --git a/opentelemetry-otlp/src/lib.rs b/opentelemetry-otlp/src/lib.rs index 385870cb7a..adf094b8a0 100644 --- a/opentelemetry-otlp/src/lib.rs +++ b/opentelemetry-otlp/src/lib.rs @@ -181,8 +181,8 @@ #![cfg_attr(test, deny(warnings))] mod exporter; -#[cfg(feature = "log")] -mod log; +#[cfg(feature = "logs")] +mod logs; #[cfg(feature = "metrics")] mod metric; #[cfg(feature = "trace")] @@ -202,7 +202,7 @@ pub use crate::metric::{ OTEL_EXPORTER_OTLP_METRICS_ENDPOINT, OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, }; -#[cfg(feature = "log")] +#[cfg(feature = "logs")] pub use crate::log::*; pub use crate::exporter::{ diff --git a/opentelemetry-otlp/src/log.rs b/opentelemetry-otlp/src/logs.rs similarity index 100% rename from opentelemetry-otlp/src/log.rs rename to opentelemetry-otlp/src/logs.rs diff --git a/opentelemetry-sdk/src/export/log/mod.rs b/opentelemetry-sdk/src/export/logs/mod.rs similarity index 86% rename from opentelemetry-sdk/src/export/log/mod.rs rename to opentelemetry-sdk/src/export/logs/mod.rs index a82a0d893e..2aa5640053 100644 --- a/opentelemetry-sdk/src/export/log/mod.rs +++ b/opentelemetry-sdk/src/export/logs/mod.rs @@ -1,8 +1,8 @@ //! Log exporters -use crate::{log::LogRecord, Resource}; +use crate::{logs::LogRecord, Resource}; use async_trait::async_trait; -use opentelemetry_api::log::LogError; -use opentelemetry_api::{log::LogResult, InstrumentationLibrary}; +use opentelemetry_api::logs::LogError; +use opentelemetry_api::{logs::LogResult, InstrumentationLibrary}; use std::{fmt::Debug, sync::Arc}; pub mod stdout; diff --git a/opentelemetry-sdk/src/export/log/stdout.rs b/opentelemetry-sdk/src/export/logs/stdout.rs similarity index 91% rename from opentelemetry-sdk/src/export/log/stdout.rs rename to opentelemetry-sdk/src/export/logs/stdout.rs index 590c7a2542..5996329595 100644 --- a/opentelemetry-sdk/src/export/log/stdout.rs +++ b/opentelemetry-sdk/src/export/logs/stdout.rs @@ -28,11 +28,11 @@ //! } //! ``` use crate::export::{ - log::{ExportResult, LogData, LogExporter}, + logs::{ExportResult, LogData, LogExporter}, ExportError, }; use async_trait::async_trait; -use opentelemetry_api::log::LogError; +use opentelemetry_api::logs::LogError; use std::fmt::Debug; use std::io::{stdout, Stdout, Write}; @@ -40,7 +40,7 @@ use std::io::{stdout, Stdout, Write}; #[derive(Debug)] pub struct PipelineBuilder { pretty_print: bool, - log_config: Option, + log_config: Option, writer: W, } @@ -68,7 +68,7 @@ impl PipelineBuilder { } /// Assign the SDK trace configuration. - pub fn with_trace_config(mut self, config: crate::log::Config) -> Self { + pub fn with_trace_config(mut self, config: crate::logs::Config) -> Self { self.log_config = Some(config); self } @@ -88,11 +88,11 @@ where W: Write + Debug + Send + 'static, { /// Install the stdout exporter pipeline with the recommended defaults. - pub fn install_simple(mut self) -> crate::log::LogEmitter { + pub fn install_simple(mut self) -> crate::logs::LogEmitter { let exporter = Exporter::new(self.writer, self.pretty_print); let mut provider_builder = - crate::log::LogEmitterProvider::builder().with_simple_exporter(exporter); + crate::logs::LogEmitterProvider::builder().with_simple_exporter(exporter); if let Some(config) = self.log_config.take() { provider_builder = provider_builder.with_config(config); } diff --git a/opentelemetry-sdk/src/export/mod.rs b/opentelemetry-sdk/src/export/mod.rs index 999c4c9919..8622bda739 100644 --- a/opentelemetry-sdk/src/export/mod.rs +++ b/opentelemetry-sdk/src/export/mod.rs @@ -1,8 +1,8 @@ //! Telemetry Export -#[cfg(feature = "log")] -#[cfg_attr(docsrs, doc(cfg(feature = "log")))] -pub mod log; +#[cfg(feature = "logs")] +#[cfg_attr(docsrs, doc(cfg(feature = "logs")))] +pub mod logs; #[cfg(feature = "trace")] #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] diff --git a/opentelemetry-sdk/src/lib.rs b/opentelemetry-sdk/src/lib.rs index dba2a25282..5e202ed93c 100644 --- a/opentelemetry-sdk/src/lib.rs +++ b/opentelemetry-sdk/src/lib.rs @@ -103,9 +103,9 @@ pub mod export; mod instrumentation; -#[cfg(feature = "log")] -#[cfg_attr(docsrs, doc(cfg(feature = "log")))] -pub mod log; +#[cfg(feature = "logs")] +#[cfg_attr(docsrs, doc(cfg(feature = "logs")))] +pub mod logs; #[cfg(feature = "metrics")] #[cfg_attr(docsrs, doc(cfg(feature = "metrics")))] pub mod metrics; diff --git a/opentelemetry-sdk/src/log/config.rs b/opentelemetry-sdk/src/logs/config.rs similarity index 100% rename from opentelemetry-sdk/src/log/config.rs rename to opentelemetry-sdk/src/logs/config.rs diff --git a/opentelemetry-sdk/src/log/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs similarity index 98% rename from opentelemetry-sdk/src/log/log_emitter.rs rename to opentelemetry-sdk/src/logs/log_emitter.rs index a9929cd92d..aca1594fbe 100644 --- a/opentelemetry-sdk/src/log/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -1,10 +1,10 @@ use super::{BatchLogProcessor, Config, LogProcessor, LogRecord, LogRuntime, SimpleLogProcessor}; use crate::{ - export::log::{LogData, LogExporter}, + export::logs::{LogData, LogExporter}, resource::{EnvResourceDetector, SdkProvidedResourceDetector}, Resource, }; -use opentelemetry_api::{log::LogResult, InstrumentationLibrary}; +use opentelemetry_api::{logs::LogResult, InstrumentationLibrary}; use std::{ borrow::Cow, sync::{Arc, Weak}, diff --git a/opentelemetry-sdk/src/log/log_processor.rs b/opentelemetry-sdk/src/logs/log_processor.rs similarity index 99% rename from opentelemetry-sdk/src/log/log_processor.rs rename to opentelemetry-sdk/src/logs/log_processor.rs index 2a90c6e2c4..bcf4cb6cc1 100644 --- a/opentelemetry-sdk/src/log/log_processor.rs +++ b/opentelemetry-sdk/src/logs/log_processor.rs @@ -1,7 +1,7 @@ use super::LogRuntime; use crate::{ - export::log::{ExportResult, LogData, LogExporter}, - log::TrySend, + export::logs::{ExportResult, LogData, LogExporter}, + logs::TrySend, }; use futures_channel::oneshot; use futures_util::{ @@ -10,7 +10,7 @@ use futures_util::{ }; use opentelemetry_api::{ global, - log::{LogError, LogResult}, + logs::{LogError, LogResult}, }; use std::thread; use std::{ diff --git a/opentelemetry-sdk/src/log/mod.rs b/opentelemetry-sdk/src/logs/mod.rs similarity index 100% rename from opentelemetry-sdk/src/log/mod.rs rename to opentelemetry-sdk/src/logs/mod.rs diff --git a/opentelemetry-sdk/src/log/record.rs b/opentelemetry-sdk/src/logs/record.rs similarity index 100% rename from opentelemetry-sdk/src/log/record.rs rename to opentelemetry-sdk/src/logs/record.rs diff --git a/opentelemetry-sdk/src/log/runtime.rs b/opentelemetry-sdk/src/logs/runtime.rs similarity index 98% rename from opentelemetry-sdk/src/log/runtime.rs rename to opentelemetry-sdk/src/logs/runtime.rs index edf9d59816..3d5ad32115 100644 --- a/opentelemetry-sdk/src/log/runtime.rs +++ b/opentelemetry-sdk/src/logs/runtime.rs @@ -4,7 +4,7 @@ //! //! [`BatchLogProcessor`]: crate::log::BatchLogProcessor //! [`Runtime`]: crate::runtime::Runtime -use crate::log::BatchMessage; +use crate::logs::BatchMessage; #[cfg(feature = "rt-async-std")] use crate::runtime::AsyncStd; use crate::runtime::Runtime; @@ -13,7 +13,7 @@ use crate::runtime::Tokio; #[cfg(feature = "rt-tokio-current-thread")] use crate::runtime::TokioCurrentThread; use futures_util::stream::Stream; -use opentelemetry_api::log::LogError; +use opentelemetry_api::logs::LogError; use std::fmt::Debug; #[cfg(any( diff --git a/opentelemetry/Cargo.toml b/opentelemetry/Cargo.toml index fc0ea962a3..79d0b7c55f 100644 --- a/opentelemetry/Cargo.toml +++ b/opentelemetry/Cargo.toml @@ -28,7 +28,7 @@ opentelemetry_sdk = { version = "0.19", path = "../opentelemetry-sdk" } default = ["trace"] trace = ["opentelemetry_api/trace", "opentelemetry_sdk/trace"] metrics = ["opentelemetry_api/metrics", "opentelemetry_sdk/metrics"] -log = ["opentelemetry_sdk/log"] +logs = ["opentelemetry_sdk/logs"] testing = ["opentelemetry_api/testing", "opentelemetry_sdk/testing"] rt-tokio = ["opentelemetry_sdk/rt-tokio"] rt-tokio-current-thread = ["opentelemetry_sdk/rt-tokio-current-thread"] From d3b7b1563dad9c8171aaa84beef3122560bdfe71 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 13:53:14 +0530 Subject: [PATCH 22/51] Implement Drop for LogEmitterProvider. --- opentelemetry-sdk/src/logs/log_emitter.rs | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index aca1594fbe..9e73e8d563 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -4,7 +4,11 @@ use crate::{ resource::{EnvResourceDetector, SdkProvidedResourceDetector}, Resource, }; -use opentelemetry_api::{logs::LogResult, InstrumentationLibrary}; +use opentelemetry_api::{ + global::{handle_error, Error}, + logs::LogResult, + InstrumentationLibrary, +}; use std::{ borrow::Cow, sync::{Arc, Weak}, @@ -93,6 +97,23 @@ impl LogEmitterProvider { } } +impl Drop for LogEmitterProvider { + fn drop(&mut self) { + match self.try_shutdown() { + None => handle_error(Error::Other( + "canont shutdown LogEmitterProvider when child LogEmitters are still active".into(), + )), + Some(results) => { + for result in results { + if let Err(err) = result { + handle_error(err) + } + } + } + } + } +} + #[derive(Debug)] pub(crate) struct LogEmitterProviderInner { processors: Vec>, From aeb53ff4c61828315fe1c6f6a7a8236db84c4374 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 14:09:58 +0530 Subject: [PATCH 23/51] Use std::convert::identity. --- opentelemetry-sdk/src/logs/log_processor.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opentelemetry-sdk/src/logs/log_processor.rs b/opentelemetry-sdk/src/logs/log_processor.rs index bcf4cb6cc1..4b1117139c 100644 --- a/opentelemetry-sdk/src/logs/log_processor.rs +++ b/opentelemetry-sdk/src/logs/log_processor.rs @@ -129,7 +129,7 @@ impl LogProcessor for BatchLogProcessor { futures_executor::block_on(res_receiver) .map_err(|err| LogError::Other(err.into())) - .and_then(|identity| identity) + .and_then(std::convert::identity) } fn shutdown(&mut self) -> LogResult<()> { @@ -139,7 +139,7 @@ impl LogProcessor for BatchLogProcessor { futures_executor::block_on(res_receiver) .map_err(|err| LogError::Other(err.into())) - .and_then(|identity| identity) + .and_then(std::convert::identity) } } From 0593aa514e0fe19728ccb6aa66d7d685198f32f7 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 19:16:04 +0530 Subject: [PATCH 24/51] Use opentelemetry-log-exporter as the thread name. --- opentelemetry-sdk/src/logs/log_processor.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/logs/log_processor.rs b/opentelemetry-sdk/src/logs/log_processor.rs index 4b1117139c..bf749ce896 100644 --- a/opentelemetry-sdk/src/logs/log_processor.rs +++ b/opentelemetry-sdk/src/logs/log_processor.rs @@ -48,7 +48,7 @@ impl SimpleLogProcessor { let (shutdown_tx, shutdown_rx) = crossbeam_channel::bounded(0); let _ = thread::Builder::new() - .name("opentelemetry-exporter".to_string()) + .name("opentelemetry-log-exporter".to_string()) .spawn(move || { while let Ok(Some(log)) = log_rx.recv() { if let Err(err) = futures_executor::block_on(exporter.export(vec![log])) { From fec1cdf8e566d6ba33cd7e92a7f3cd8f1e73abf6 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 19:25:55 +0530 Subject: [PATCH 25/51] Use the correct module name. --- opentelemetry-proto/src/transform/logs.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index 11dc27c8de..a275d5c483 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -5,7 +5,7 @@ use std::borrow::Cow; pub mod tonic { use std::collections::BTreeMap; - use opentelemetry::sdk::log::{Any, Severity}; + use opentelemetry::sdk::logs::{Any, Severity}; use crate::{ tonic::{ @@ -92,8 +92,8 @@ pub mod tonic { .collect() } - impl From for LogRecord { - fn from(log_record: opentelemetry::sdk::log::LogRecord) -> Self { + impl From for LogRecord { + fn from(log_record: opentelemetry::sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); let record = LogRecord { @@ -135,8 +135,8 @@ pub mod tonic { } } - impl From for ResourceLogs { - fn from(log_data: opentelemetry::sdk::export::log::LogData) -> Self { + impl From for ResourceLogs { + fn from(log_data: opentelemetry::sdk::export::logs::LogData) -> Self { ResourceLogs { resource: Some(Resource { attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) @@ -171,7 +171,7 @@ pub mod grpcio { }, transform::common::grpcio::resource_attributes, }; - use opentelemetry::sdk::log::{Any, Severity}; + use opentelemetry::sdk::logs::{Any, Severity}; use protobuf::{RepeatedField, SingularPtrField}; use super::*; @@ -259,8 +259,8 @@ pub mod grpcio { .collect() } - impl From for LogRecord { - fn from(log_record: opentelemetry::sdk::log::LogRecord) -> Self { + impl From for LogRecord { + fn from(log_record: opentelemetry::sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); LogRecord { @@ -300,8 +300,8 @@ pub mod grpcio { } } - impl From for ResourceLogs { - fn from(log_data: opentelemetry::sdk::export::log::LogData) -> Self { + impl From for ResourceLogs { + fn from(log_data: opentelemetry::sdk::export::logs::LogData) -> Self { ResourceLogs { resource: SingularPtrField::some(Resource { attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) From e0fae82a4970dac23a542420b2411002a7859bec Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 19:55:23 +0530 Subject: [PATCH 26/51] Remove From impls for SeverityNumber. --- opentelemetry-proto/src/transform/logs.rs | 128 ++++++++++------------ 1 file changed, 56 insertions(+), 72 deletions(-) diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index a275d5c483..0ba01caa9d 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -49,37 +49,6 @@ pub mod tonic { } } - impl From for SeverityNumber { - fn from(number: Severity) -> Self { - match number { - Severity::Trace => SeverityNumber::Trace, - Severity::Trace2 => SeverityNumber::Trace2, - Severity::Trace3 => SeverityNumber::Trace3, - Severity::Trace4 => SeverityNumber::Trace4, - Severity::Debug => SeverityNumber::Debug, - Severity::Debug2 => SeverityNumber::Debug2, - Severity::Debug3 => SeverityNumber::Debug3, - Severity::Debug4 => SeverityNumber::Debug4, - Severity::Info => SeverityNumber::Info, - Severity::Info2 => SeverityNumber::Info2, - Severity::Info3 => SeverityNumber::Info3, - Severity::Info4 => SeverityNumber::Info4, - Severity::Warn => SeverityNumber::Warn, - Severity::Warn2 => SeverityNumber::Warn2, - Severity::Warn3 => SeverityNumber::Warn3, - Severity::Warn4 => SeverityNumber::Warn4, - Severity::Error => SeverityNumber::Error, - Severity::Error2 => SeverityNumber::Error2, - Severity::Error3 => SeverityNumber::Error3, - Severity::Error4 => SeverityNumber::Error4, - Severity::Fatal => SeverityNumber::Fatal, - Severity::Fatal2 => SeverityNumber::Fatal2, - Severity::Fatal3 => SeverityNumber::Fatal3, - Severity::Fatal4 => SeverityNumber::Fatal4, - } - } - } - fn attributes_to_keyvalue(attributes: BTreeMap, Any>) -> Vec { attributes .into_iter() @@ -95,6 +64,33 @@ pub mod tonic { impl From for LogRecord { fn from(log_record: opentelemetry::sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); + let severity_number = match log_record.severity_number { + Some(Severity::Trace) => SeverityNumber::Trace, + Some(Severity::Trace2) => SeverityNumber::Trace2, + Some(Severity::Trace3) => SeverityNumber::Trace3, + Some(Severity::Trace4) => SeverityNumber::Trace4, + Some(Severity::Debug) => SeverityNumber::Debug, + Some(Severity::Debug2) => SeverityNumber::Debug2, + Some(Severity::Debug3) => SeverityNumber::Debug3, + Some(Severity::Debug4) => SeverityNumber::Debug4, + Some(Severity::Info) => SeverityNumber::Info, + Some(Severity::Info2) => SeverityNumber::Info2, + Some(Severity::Info3) => SeverityNumber::Info3, + Some(Severity::Info4) => SeverityNumber::Info4, + Some(Severity::Warn) => SeverityNumber::Warn, + Some(Severity::Warn2) => SeverityNumber::Warn2, + Some(Severity::Warn3) => SeverityNumber::Warn3, + Some(Severity::Warn4) => SeverityNumber::Warn4, + Some(Severity::Error) => SeverityNumber::Error, + Some(Severity::Error2) => SeverityNumber::Error2, + Some(Severity::Error3) => SeverityNumber::Error3, + Some(Severity::Error4) => SeverityNumber::Error4, + Some(Severity::Fatal) => SeverityNumber::Fatal, + Some(Severity::Fatal2) => SeverityNumber::Fatal2, + Some(Severity::Fatal3) => SeverityNumber::Fatal3, + Some(Severity::Fatal4) => SeverityNumber::Fatal4, + None => SeverityNumber::Unspecified + }; let record = LogRecord { time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or_default(), @@ -102,11 +98,7 @@ pub mod tonic { .observed_timestamp .map(to_nanos) .unwrap_or_default(), - severity_number: log_record - .severity_number - .map(SeverityNumber::from) - .map(Into::into) - .unwrap_or_default(), + severity_number: severity_number.into(), severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), body: Some(AnyValue { value: log_record.body.map(Into::into), @@ -214,37 +206,6 @@ pub mod grpcio { } } - impl From for SeverityNumber { - fn from(number: Severity) -> Self { - match number { - Severity::Trace => SeverityNumber::SEVERITY_NUMBER_TRACE, - Severity::Trace2 => SeverityNumber::SEVERITY_NUMBER_TRACE2, - Severity::Trace3 => SeverityNumber::SEVERITY_NUMBER_TRACE3, - Severity::Trace4 => SeverityNumber::SEVERITY_NUMBER_TRACE4, - Severity::Debug => SeverityNumber::SEVERITY_NUMBER_DEBUG, - Severity::Debug2 => SeverityNumber::SEVERITY_NUMBER_DEBUG2, - Severity::Debug3 => SeverityNumber::SEVERITY_NUMBER_DEBUG3, - Severity::Debug4 => SeverityNumber::SEVERITY_NUMBER_DEBUG4, - Severity::Info => SeverityNumber::SEVERITY_NUMBER_INFO, - Severity::Info2 => SeverityNumber::SEVERITY_NUMBER_INFO2, - Severity::Info3 => SeverityNumber::SEVERITY_NUMBER_INFO3, - Severity::Info4 => SeverityNumber::SEVERITY_NUMBER_INFO4, - Severity::Warn => SeverityNumber::SEVERITY_NUMBER_WARN, - Severity::Warn2 => SeverityNumber::SEVERITY_NUMBER_WARN2, - Severity::Warn3 => SeverityNumber::SEVERITY_NUMBER_WARN3, - Severity::Warn4 => SeverityNumber::SEVERITY_NUMBER_WARN4, - Severity::Error => SeverityNumber::SEVERITY_NUMBER_ERROR, - Severity::Error2 => SeverityNumber::SEVERITY_NUMBER_ERROR2, - Severity::Error3 => SeverityNumber::SEVERITY_NUMBER_ERROR3, - Severity::Error4 => SeverityNumber::SEVERITY_NUMBER_ERROR4, - Severity::Fatal => SeverityNumber::SEVERITY_NUMBER_FATAL, - Severity::Fatal2 => SeverityNumber::SEVERITY_NUMBER_FATAL2, - Severity::Fatal3 => SeverityNumber::SEVERITY_NUMBER_FATAL3, - Severity::Fatal4 => SeverityNumber::SEVERITY_NUMBER_FATAL4, - } - } - } - fn attributes_to_keyvalue(attributes: BTreeMap, Any>) -> Vec { attributes .into_iter() @@ -262,14 +223,37 @@ pub mod grpcio { impl From for LogRecord { fn from(log_record: opentelemetry::sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); + let severity_number = match log_record.severity_number { + Some(Severity::Trace) => SeverityNumber::SEVERITY_NUMBER_TRACE, + Some(Severity::Trace2) => SeverityNumber::SEVERITY_NUMBER_TRACE2, + Some(Severity::Trace3) => SeverityNumber::SEVERITY_NUMBER_TRACE3, + Some(Severity::Trace4) => SeverityNumber::SEVERITY_NUMBER_TRACE4, + Some(Severity::Debug) => SeverityNumber::SEVERITY_NUMBER_DEBUG, + Some(Severity::Debug2) => SeverityNumber::SEVERITY_NUMBER_DEBUG2, + Some(Severity::Debug3) => SeverityNumber::SEVERITY_NUMBER_DEBUG3, + Some(Severity::Debug4) => SeverityNumber::SEVERITY_NUMBER_DEBUG4, + Some(Severity::Info) => SeverityNumber::SEVERITY_NUMBER_INFO, + Some(Severity::Info2) => SeverityNumber::SEVERITY_NUMBER_INFO2, + Some(Severity::Info3) => SeverityNumber::SEVERITY_NUMBER_INFO3, + Some(Severity::Info4) => SeverityNumber::SEVERITY_NUMBER_INFO4, + Some(Severity::Warn) => SeverityNumber::SEVERITY_NUMBER_WARN, + Some(Severity::Warn2) => SeverityNumber::SEVERITY_NUMBER_WARN2, + Some(Severity::Warn3) => SeverityNumber::SEVERITY_NUMBER_WARN3, + Some(Severity::Warn4) => SeverityNumber::SEVERITY_NUMBER_WARN4, + Some(Severity::Error) => SeverityNumber::SEVERITY_NUMBER_ERROR, + Some(Severity::Error2) => SeverityNumber::SEVERITY_NUMBER_ERROR2, + Some(Severity::Error3) => SeverityNumber::SEVERITY_NUMBER_ERROR3, + Some(Severity::Error4) => SeverityNumber::SEVERITY_NUMBER_ERROR4, + Some(Severity::Fatal) => SeverityNumber::SEVERITY_NUMBER_FATAL, + Some(Severity::Fatal2) => SeverityNumber::SEVERITY_NUMBER_FATAL2, + Some(Severity::Fatal3) => SeverityNumber::SEVERITY_NUMBER_FATAL3, + Some(Severity::Fatal4) => SeverityNumber::SEVERITY_NUMBER_FATAL4, + None => SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED, + }; LogRecord { time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or(0), - severity_number: log_record - .severity_number - .map(SeverityNumber::from) - .map(Into::into) - .unwrap_or_default(), + severity_number, severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), body: SingularPtrField::some(AnyValue { value: log_record.body.map(Into::into), From 4777062e2c026d26efd076b992e72c86c12075b3 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 19:58:47 +0530 Subject: [PATCH 27/51] log_emitter: Set emitter version as None. --- opentelemetry-sdk/src/logs/log_emitter.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index 9e73e8d563..3b63b00c9b 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -37,7 +37,7 @@ impl LogEmitterProvider { /// Create a new `LogEmitter`. pub fn log_emitter(&self, name: impl Into>) -> LogEmitter { - self.versioned_log_emitter(name, Some(env!("CARGO_PKG_VERSION"))) + self.versioned_log_emitter(name, None) } /// Create a new version `LogEmitter` instance. From a8e64d6c2ff526510679166ef63c190e15306c33 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 7 May 2022 22:53:34 +0530 Subject: [PATCH 28/51] Rename attributes_to_keyvalue to attributes_to_keyv_alue Co-authored-by: Zhongyang Wu --- opentelemetry-proto/src/transform/logs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index 0ba01caa9d..53c641ae41 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -49,7 +49,7 @@ pub mod tonic { } } - fn attributes_to_keyvalue(attributes: BTreeMap, Any>) -> Vec { + fn attributes_to_key_value(attributes: BTreeMap, Any>) -> Vec { attributes .into_iter() .map(|(key, value)| KeyValue { From fbe8c4e204116a4517181fa960d734f8c3c40e5c Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 24 Apr 2023 16:50:24 +0530 Subject: [PATCH 29/51] Update logs --- opentelemetry-proto/src/proto/grpcio/logs.rs | 249 +++++++--------- .../src/proto/grpcio/logs_service.rs | 278 +++++++++++++++++- 2 files changed, 372 insertions(+), 155 deletions(-) diff --git a/opentelemetry-proto/src/proto/grpcio/logs.rs b/opentelemetry-proto/src/proto/grpcio/logs.rs index f334dd1340..2fa6bd55f3 100644 --- a/opentelemetry-proto/src/proto/grpcio/logs.rs +++ b/opentelemetry-proto/src/proto/grpcio/logs.rs @@ -1,4 +1,4 @@ -// This file is generated by rust-protobuf 2.27.1. Do not edit +// This file is generated by rust-protobuf 2.28.0. Do not edit // @generated // https://github.com/rust-lang/rust-clippy/issues/702 @@ -21,7 +21,7 @@ /// Generated files are compatible only with the same version /// of protobuf runtime. -// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_27_1; +// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_28_0; #[derive(PartialEq,Clone,Default)] #[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] @@ -197,7 +197,7 @@ impl ::protobuf::reflect::ProtobufValue for LogsData { pub struct ResourceLogs { // message fields pub resource: ::protobuf::SingularPtrField, - pub instrumentation_library_logs: ::protobuf::RepeatedField, + pub scope_logs: ::protobuf::RepeatedField, pub schema_url: ::std::string::String, // special fields #[cfg_attr(feature = "with-serde", serde(skip))] @@ -250,29 +250,29 @@ impl ResourceLogs { self.resource.take().unwrap_or_else(|| super::resource::Resource::new()) } - // repeated .opentelemetry.proto.logs.v1.InstrumentationLibraryLogs instrumentation_library_logs = 2; + // repeated .opentelemetry.proto.logs.v1.ScopeLogs scope_logs = 2; - pub fn get_instrumentation_library_logs(&self) -> &[InstrumentationLibraryLogs] { - &self.instrumentation_library_logs + pub fn get_scope_logs(&self) -> &[ScopeLogs] { + &self.scope_logs } - pub fn clear_instrumentation_library_logs(&mut self) { - self.instrumentation_library_logs.clear(); + pub fn clear_scope_logs(&mut self) { + self.scope_logs.clear(); } // Param is passed by value, moved - pub fn set_instrumentation_library_logs(&mut self, v: ::protobuf::RepeatedField) { - self.instrumentation_library_logs = v; + pub fn set_scope_logs(&mut self, v: ::protobuf::RepeatedField) { + self.scope_logs = v; } // Mutable pointer to the field. - pub fn mut_instrumentation_library_logs(&mut self) -> &mut ::protobuf::RepeatedField { - &mut self.instrumentation_library_logs + pub fn mut_scope_logs(&mut self) -> &mut ::protobuf::RepeatedField { + &mut self.scope_logs } // Take field - pub fn take_instrumentation_library_logs(&mut self) -> ::protobuf::RepeatedField { - ::std::mem::replace(&mut self.instrumentation_library_logs, ::protobuf::RepeatedField::new()) + pub fn take_scope_logs(&mut self) -> ::protobuf::RepeatedField { + ::std::mem::replace(&mut self.scope_logs, ::protobuf::RepeatedField::new()) } // string schema_url = 3; @@ -309,7 +309,7 @@ impl ::protobuf::Message for ResourceLogs { return false; } }; - for v in &self.instrumentation_library_logs { + for v in &self.scope_logs { if !v.is_initialized() { return false; } @@ -325,7 +325,7 @@ impl ::protobuf::Message for ResourceLogs { ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.resource)?; }, 2 => { - ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.instrumentation_library_logs)?; + ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.scope_logs)?; }, 3 => { ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.schema_url)?; @@ -346,7 +346,7 @@ impl ::protobuf::Message for ResourceLogs { let len = v.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; } - for value in &self.instrumentation_library_logs { + for value in &self.scope_logs { let len = value.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; }; @@ -364,7 +364,7 @@ impl ::protobuf::Message for ResourceLogs { os.write_raw_varint32(v.get_cached_size())?; v.write_to_with_cached_sizes(os)?; } - for v in &self.instrumentation_library_logs { + for v in &self.scope_logs { os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?; os.write_raw_varint32(v.get_cached_size())?; v.write_to_with_cached_sizes(os)?; @@ -415,10 +415,10 @@ impl ::protobuf::Message for ResourceLogs { |m: &ResourceLogs| { &m.resource }, |m: &mut ResourceLogs| { &mut m.resource }, )); - fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( - "instrumentation_library_logs", - |m: &ResourceLogs| { &m.instrumentation_library_logs }, - |m: &mut ResourceLogs| { &mut m.instrumentation_library_logs }, + fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "scope_logs", + |m: &ResourceLogs| { &m.scope_logs }, + |m: &mut ResourceLogs| { &mut m.scope_logs }, )); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( "schema_url", @@ -442,7 +442,7 @@ impl ::protobuf::Message for ResourceLogs { impl ::protobuf::Clear for ResourceLogs { fn clear(&mut self) { self.resource.clear(); - self.instrumentation_library_logs.clear(); + self.scope_logs.clear(); self.schema_url.clear(); self.unknown_fields.clear(); } @@ -462,9 +462,9 @@ impl ::protobuf::reflect::ProtobufValue for ResourceLogs { #[derive(PartialEq,Clone,Default)] #[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] -pub struct InstrumentationLibraryLogs { +pub struct ScopeLogs { // message fields - pub instrumentation_library: ::protobuf::SingularPtrField, + pub scope: ::protobuf::SingularPtrField, pub log_records: ::protobuf::RepeatedField, pub schema_url: ::std::string::String, // special fields @@ -474,48 +474,48 @@ pub struct InstrumentationLibraryLogs { pub cached_size: ::protobuf::CachedSize, } -impl<'a> ::std::default::Default for &'a InstrumentationLibraryLogs { - fn default() -> &'a InstrumentationLibraryLogs { - ::default_instance() +impl<'a> ::std::default::Default for &'a ScopeLogs { + fn default() -> &'a ScopeLogs { + ::default_instance() } } -impl InstrumentationLibraryLogs { - pub fn new() -> InstrumentationLibraryLogs { +impl ScopeLogs { + pub fn new() -> ScopeLogs { ::std::default::Default::default() } - // .opentelemetry.proto.common.v1.InstrumentationLibrary instrumentation_library = 1; + // .opentelemetry.proto.common.v1.InstrumentationScope scope = 1; - pub fn get_instrumentation_library(&self) -> &super::common::InstrumentationLibrary { - self.instrumentation_library.as_ref().unwrap_or_else(|| ::default_instance()) + pub fn get_scope(&self) -> &super::common::InstrumentationScope { + self.scope.as_ref().unwrap_or_else(|| ::default_instance()) } - pub fn clear_instrumentation_library(&mut self) { - self.instrumentation_library.clear(); + pub fn clear_scope(&mut self) { + self.scope.clear(); } - pub fn has_instrumentation_library(&self) -> bool { - self.instrumentation_library.is_some() + pub fn has_scope(&self) -> bool { + self.scope.is_some() } // Param is passed by value, moved - pub fn set_instrumentation_library(&mut self, v: super::common::InstrumentationLibrary) { - self.instrumentation_library = ::protobuf::SingularPtrField::some(v); + pub fn set_scope(&mut self, v: super::common::InstrumentationScope) { + self.scope = ::protobuf::SingularPtrField::some(v); } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. - pub fn mut_instrumentation_library(&mut self) -> &mut super::common::InstrumentationLibrary { - if self.instrumentation_library.is_none() { - self.instrumentation_library.set_default(); + pub fn mut_scope(&mut self) -> &mut super::common::InstrumentationScope { + if self.scope.is_none() { + self.scope.set_default(); } - self.instrumentation_library.as_mut().unwrap() + self.scope.as_mut().unwrap() } // Take field - pub fn take_instrumentation_library(&mut self) -> super::common::InstrumentationLibrary { - self.instrumentation_library.take().unwrap_or_else(|| super::common::InstrumentationLibrary::new()) + pub fn take_scope(&mut self) -> super::common::InstrumentationScope { + self.scope.take().unwrap_or_else(|| super::common::InstrumentationScope::new()) } // repeated .opentelemetry.proto.logs.v1.LogRecord log_records = 2; @@ -570,9 +570,9 @@ impl InstrumentationLibraryLogs { } } -impl ::protobuf::Message for InstrumentationLibraryLogs { +impl ::protobuf::Message for ScopeLogs { fn is_initialized(&self) -> bool { - for v in &self.instrumentation_library { + for v in &self.scope { if !v.is_initialized() { return false; } @@ -590,7 +590,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { - ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.instrumentation_library)?; + ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.scope)?; }, 2 => { ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.log_records)?; @@ -610,7 +610,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; - if let Some(ref v) = self.instrumentation_library.as_ref() { + if let Some(ref v) = self.scope.as_ref() { let len = v.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; } @@ -627,7 +627,7 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { - if let Some(ref v) = self.instrumentation_library.as_ref() { + if let Some(ref v) = self.scope.as_ref() { os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?; os.write_raw_varint32(v.get_cached_size())?; v.write_to_with_cached_sizes(os)?; @@ -670,59 +670,59 @@ impl ::protobuf::Message for InstrumentationLibraryLogs { Self::descriptor_static() } - fn new() -> InstrumentationLibraryLogs { - InstrumentationLibraryLogs::new() + fn new() -> ScopeLogs { + ScopeLogs::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); - fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( - "instrumentation_library", - |m: &InstrumentationLibraryLogs| { &m.instrumentation_library }, - |m: &mut InstrumentationLibraryLogs| { &mut m.instrumentation_library }, + fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "scope", + |m: &ScopeLogs| { &m.scope }, + |m: &mut ScopeLogs| { &mut m.scope }, )); fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( "log_records", - |m: &InstrumentationLibraryLogs| { &m.log_records }, - |m: &mut InstrumentationLibraryLogs| { &mut m.log_records }, + |m: &ScopeLogs| { &m.log_records }, + |m: &mut ScopeLogs| { &mut m.log_records }, )); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( "schema_url", - |m: &InstrumentationLibraryLogs| { &m.schema_url }, - |m: &mut InstrumentationLibraryLogs| { &mut m.schema_url }, + |m: &ScopeLogs| { &m.schema_url }, + |m: &mut ScopeLogs| { &mut m.schema_url }, )); - ::protobuf::reflect::MessageDescriptor::new_pb_name::( - "InstrumentationLibraryLogs", + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "ScopeLogs", fields, file_descriptor_proto() ) }) } - fn default_instance() -> &'static InstrumentationLibraryLogs { - static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; - instance.get(InstrumentationLibraryLogs::new) + fn default_instance() -> &'static ScopeLogs { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(ScopeLogs::new) } } -impl ::protobuf::Clear for InstrumentationLibraryLogs { +impl ::protobuf::Clear for ScopeLogs { fn clear(&mut self) { - self.instrumentation_library.clear(); + self.scope.clear(); self.log_records.clear(); self.schema_url.clear(); self.unknown_fields.clear(); } } -impl ::std::fmt::Debug for InstrumentationLibraryLogs { +impl ::std::fmt::Debug for ScopeLogs { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } -impl ::protobuf::reflect::ProtobufValue for InstrumentationLibraryLogs { +impl ::protobuf::reflect::ProtobufValue for ScopeLogs { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } @@ -736,7 +736,6 @@ pub struct LogRecord { pub observed_time_unix_nano: u64, pub severity_number: SeverityNumber, pub severity_text: ::std::string::String, - pub name: ::std::string::String, pub body: ::protobuf::SingularPtrField, pub attributes: ::protobuf::RepeatedField, pub dropped_attributes_count: u32, @@ -832,32 +831,6 @@ impl LogRecord { ::std::mem::replace(&mut self.severity_text, ::std::string::String::new()) } - // string name = 4; - - - pub fn get_name(&self) -> &str { - &self.name - } - pub fn clear_name(&mut self) { - self.name.clear(); - } - - // Param is passed by value, moved - pub fn set_name(&mut self, v: ::std::string::String) { - self.name = v; - } - - // Mutable pointer to the field. - // If field is not initialized, it is initialized with default value first. - pub fn mut_name(&mut self) -> &mut ::std::string::String { - &mut self.name - } - - // Take field - pub fn take_name(&mut self) -> ::std::string::String { - ::std::mem::replace(&mut self.name, ::std::string::String::new()) - } - // .opentelemetry.proto.common.v1.AnyValue body = 5; @@ -1038,9 +1011,6 @@ impl ::protobuf::Message for LogRecord { 3 => { ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.severity_text)?; }, - 4 => { - ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?; - }, 5 => { ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.body)?; }, @@ -1091,9 +1061,6 @@ impl ::protobuf::Message for LogRecord { if !self.severity_text.is_empty() { my_size += ::protobuf::rt::string_size(3, &self.severity_text); } - if !self.name.is_empty() { - my_size += ::protobuf::rt::string_size(4, &self.name); - } if let Some(ref v) = self.body.as_ref() { let len = v.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; @@ -1132,9 +1099,6 @@ impl ::protobuf::Message for LogRecord { if !self.severity_text.is_empty() { os.write_string(3, &self.severity_text)?; } - if !self.name.is_empty() { - os.write_string(4, &self.name)?; - } if let Some(ref v) = self.body.as_ref() { os.write_tag(5, ::protobuf::wire_format::WireTypeLengthDelimited)?; os.write_raw_varint32(v.get_cached_size())?; @@ -1215,11 +1179,6 @@ impl ::protobuf::Message for LogRecord { |m: &LogRecord| { &m.severity_text }, |m: &mut LogRecord| { &mut m.severity_text }, )); - fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( - "name", - |m: &LogRecord| { &m.name }, - |m: &mut LogRecord| { &mut m.name }, - )); fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( "body", |m: &LogRecord| { &m.body }, @@ -1270,7 +1229,6 @@ impl ::protobuf::Clear for LogRecord { self.observed_time_unix_nano = 0; self.severity_number = SeverityNumber::SEVERITY_NUMBER_UNSPECIFIED; self.severity_text.clear(); - self.name.clear(); self.body.clear(); self.attributes.clear(); self.dropped_attributes_count = 0; @@ -1469,46 +1427,45 @@ static file_descriptor_proto_data: &'static [u8] = b"\ s.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/pr\ oto/resource/v1/resource.proto\"Z\n\x08LogsData\x12N\n\rresource_logs\ \x18\x01\x20\x03(\x0b2).opentelemetry.proto.logs.v1.ResourceLogsR\x0cres\ - ourceLogs\"\xef\x01\n\x0cResourceLogs\x12E\n\x08resource\x18\x01\x20\x01\ - (\x0b2).opentelemetry.proto.resource.v1.ResourceR\x08resource\x12y\n\x1c\ - instrumentation_library_logs\x18\x02\x20\x03(\x0b27.opentelemetry.proto.\ - logs.v1.InstrumentationLibraryLogsR\x1ainstrumentationLibraryLogs\x12\ - \x1d\n\nschema_url\x18\x03\x20\x01(\tR\tschemaUrl\"\xf4\x01\n\x1aInstrum\ - entationLibraryLogs\x12n\n\x17instrumentation_library\x18\x01\x20\x01(\ - \x0b25.opentelemetry.proto.common.v1.InstrumentationLibraryR\x16instrume\ - ntationLibrary\x12G\n\x0blog_records\x18\x02\x20\x03(\x0b2&.opentelemetr\ - y.proto.logs.v1.LogRecordR\nlogRecords\x12\x1d\n\nschema_url\x18\x03\x20\ - \x01(\tR\tschemaUrl\"\x85\x04\n\tLogRecord\x12$\n\x0etime_unix_nano\x18\ - \x01\x20\x01(\x06R\x0ctimeUnixNano\x125\n\x17observed_time_unix_nano\x18\ - \x0b\x20\x01(\x06R\x14observedTimeUnixNano\x12T\n\x0fseverity_number\x18\ - \x02\x20\x01(\x0e2+.opentelemetry.proto.logs.v1.SeverityNumberR\x0esever\ - ityNumber\x12#\n\rseverity_text\x18\x03\x20\x01(\tR\x0cseverityText\x12\ - \x16\n\x04name\x18\x04\x20\x01(\tR\x04nameB\x02\x18\x01\x12;\n\x04body\ + ourceLogs\"\xc3\x01\n\x0cResourceLogs\x12E\n\x08resource\x18\x01\x20\x01\ + (\x0b2).opentelemetry.proto.resource.v1.ResourceR\x08resource\x12E\n\nsc\ + ope_logs\x18\x02\x20\x03(\x0b2&.opentelemetry.proto.logs.v1.ScopeLogsR\t\ + scopeLogs\x12\x1d\n\nschema_url\x18\x03\x20\x01(\tR\tschemaUrlJ\x06\x08\ + \xe8\x07\x10\xe9\x07\"\xbe\x01\n\tScopeLogs\x12I\n\x05scope\x18\x01\x20\ + \x01(\x0b23.opentelemetry.proto.common.v1.InstrumentationScopeR\x05scope\ + \x12G\n\x0blog_records\x18\x02\x20\x03(\x0b2&.opentelemetry.proto.logs.v\ + 1.LogRecordR\nlogRecords\x12\x1d\n\nschema_url\x18\x03\x20\x01(\tR\tsche\ + maUrl\"\xf3\x03\n\tLogRecord\x12$\n\x0etime_unix_nano\x18\x01\x20\x01(\ + \x06R\x0ctimeUnixNano\x125\n\x17observed_time_unix_nano\x18\x0b\x20\x01(\ + \x06R\x14observedTimeUnixNano\x12T\n\x0fseverity_number\x18\x02\x20\x01(\ + \x0e2+.opentelemetry.proto.logs.v1.SeverityNumberR\x0eseverityNumber\x12\ + #\n\rseverity_text\x18\x03\x20\x01(\tR\x0cseverityText\x12;\n\x04body\ \x18\x05\x20\x01(\x0b2'.opentelemetry.proto.common.v1.AnyValueR\x04body\ \x12G\n\nattributes\x18\x06\x20\x03(\x0b2'.opentelemetry.proto.common.v1\ .KeyValueR\nattributes\x128\n\x18dropped_attributes_count\x18\x07\x20\ \x01(\rR\x16droppedAttributesCount\x12\x14\n\x05flags\x18\x08\x20\x01(\ \x07R\x05flags\x12\x19\n\x08trace_id\x18\t\x20\x01(\x0cR\x07traceId\x12\ - \x17\n\x07span_id\x18\n\x20\x01(\x0cR\x06spanId*\xc3\x05\n\x0eSeverityNu\ - mber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\0\x12\x19\n\x15SEVERIT\ - Y_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\ - \x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TR\ - ACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEV\ - ERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\ - \x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBE\ - R_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVER\ - ITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\ - \x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\ - \x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERIT\ - Y_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\ - \x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ER\ - ROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SE\ - VERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\ - \x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBE\ - R_FATAL4\x10\x18*X\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAG_UNSP\ - ECIFIED\x10\0\x12%\n\x20LOG_RECORD_FLAG_TRACE_FLAGS_MASK\x10\xff\x01BU\n\ - \x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io\ - /proto/otlp/logs/v1b\x06proto3\ + \x17\n\x07span_id\x18\n\x20\x01(\x0cR\x06spanIdJ\x04\x08\x04\x10\x05*\ + \xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\ + \x10\0\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_\ + NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\ + \x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DE\ + BUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEV\ + ERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\ + \x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_IN\ + FO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERI\ + TY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\ + \n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\ + \x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERIT\ + Y_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\ + \x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ER\ + ROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEV\ + ERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\ + \x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*X\n\x0eLogRecordFlags\x12\ + \x1f\n\x1bLOG_RECORD_FLAG_UNSPECIFIED\x10\0\x12%\n\x20LOG_RECORD_FLAG_TR\ + ACE_FLAGS_MASK\x10\xff\x01Bs\n\x1eio.opentelemetry.proto.logs.v1B\tLogsP\ + rotoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemet\ + ry.Proto.Logs.V1b\x06proto3\ "; static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; diff --git a/opentelemetry-proto/src/proto/grpcio/logs_service.rs b/opentelemetry-proto/src/proto/grpcio/logs_service.rs index 782eb25d5c..81010efb2c 100644 --- a/opentelemetry-proto/src/proto/grpcio/logs_service.rs +++ b/opentelemetry-proto/src/proto/grpcio/logs_service.rs @@ -1,4 +1,4 @@ -// This file is generated by rust-protobuf 2.27.1. Do not edit +// This file is generated by rust-protobuf 2.28.0. Do not edit // @generated // https://github.com/rust-lang/rust-clippy/issues/702 @@ -21,7 +21,7 @@ /// Generated files are compatible only with the same version /// of protobuf runtime. -// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_27_1; +// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_28_0; #[derive(PartialEq,Clone,Default)] #[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] @@ -195,6 +195,8 @@ impl ::protobuf::reflect::ProtobufValue for ExportLogsServiceRequest { #[derive(PartialEq,Clone,Default)] #[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] pub struct ExportLogsServiceResponse { + // message fields + pub partial_success: ::protobuf::SingularPtrField, // special fields #[cfg_attr(feature = "with-serde", serde(skip))] pub unknown_fields: ::protobuf::UnknownFields, @@ -212,10 +214,48 @@ impl ExportLogsServiceResponse { pub fn new() -> ExportLogsServiceResponse { ::std::default::Default::default() } + + // .opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess partial_success = 1; + + + pub fn get_partial_success(&self) -> &ExportLogsPartialSuccess { + self.partial_success.as_ref().unwrap_or_else(|| ::default_instance()) + } + pub fn clear_partial_success(&mut self) { + self.partial_success.clear(); + } + + pub fn has_partial_success(&self) -> bool { + self.partial_success.is_some() + } + + // Param is passed by value, moved + pub fn set_partial_success(&mut self, v: ExportLogsPartialSuccess) { + self.partial_success = ::protobuf::SingularPtrField::some(v); + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_partial_success(&mut self) -> &mut ExportLogsPartialSuccess { + if self.partial_success.is_none() { + self.partial_success.set_default(); + } + self.partial_success.as_mut().unwrap() + } + + // Take field + pub fn take_partial_success(&mut self) -> ExportLogsPartialSuccess { + self.partial_success.take().unwrap_or_else(|| ExportLogsPartialSuccess::new()) + } } impl ::protobuf::Message for ExportLogsServiceResponse { fn is_initialized(&self) -> bool { + for v in &self.partial_success { + if !v.is_initialized() { + return false; + } + }; true } @@ -223,6 +263,9 @@ impl ::protobuf::Message for ExportLogsServiceResponse { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { + 1 => { + ::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.partial_success)?; + }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, @@ -235,12 +278,21 @@ impl ::protobuf::Message for ExportLogsServiceResponse { #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; + if let Some(ref v) = self.partial_success.as_ref() { + let len = v.compute_size(); + my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; + } my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + if let Some(ref v) = self.partial_success.as_ref() { + os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?; + os.write_raw_varint32(v.get_cached_size())?; + v.write_to_with_cached_sizes(os)?; + } os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } @@ -278,7 +330,12 @@ impl ::protobuf::Message for ExportLogsServiceResponse { fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { - let fields = ::std::vec::Vec::new(); + let mut fields = ::std::vec::Vec::new(); + fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage>( + "partial_success", + |m: &ExportLogsServiceResponse| { &m.partial_success }, + |m: &mut ExportLogsServiceResponse| { &mut m.partial_success }, + )); ::protobuf::reflect::MessageDescriptor::new_pb_name::( "ExportLogsServiceResponse", fields, @@ -295,6 +352,7 @@ impl ::protobuf::Message for ExportLogsServiceResponse { impl ::protobuf::Clear for ExportLogsServiceResponse { fn clear(&mut self) { + self.partial_success.clear(); self.unknown_fields.clear(); } } @@ -311,16 +369,218 @@ impl ::protobuf::reflect::ProtobufValue for ExportLogsServiceResponse { } } +#[derive(PartialEq,Clone,Default)] +#[cfg_attr(feature = "with-serde", derive(::serde::Serialize, ::serde::Deserialize))] +pub struct ExportLogsPartialSuccess { + // message fields + pub rejected_log_records: i64, + pub error_message: ::std::string::String, + // special fields + #[cfg_attr(feature = "with-serde", serde(skip))] + pub unknown_fields: ::protobuf::UnknownFields, + #[cfg_attr(feature = "with-serde", serde(skip))] + pub cached_size: ::protobuf::CachedSize, +} + +impl<'a> ::std::default::Default for &'a ExportLogsPartialSuccess { + fn default() -> &'a ExportLogsPartialSuccess { + ::default_instance() + } +} + +impl ExportLogsPartialSuccess { + pub fn new() -> ExportLogsPartialSuccess { + ::std::default::Default::default() + } + + // int64 rejected_log_records = 1; + + + pub fn get_rejected_log_records(&self) -> i64 { + self.rejected_log_records + } + pub fn clear_rejected_log_records(&mut self) { + self.rejected_log_records = 0; + } + + // Param is passed by value, moved + pub fn set_rejected_log_records(&mut self, v: i64) { + self.rejected_log_records = v; + } + + // string error_message = 2; + + + pub fn get_error_message(&self) -> &str { + &self.error_message + } + pub fn clear_error_message(&mut self) { + self.error_message.clear(); + } + + // Param is passed by value, moved + pub fn set_error_message(&mut self, v: ::std::string::String) { + self.error_message = v; + } + + // Mutable pointer to the field. + // If field is not initialized, it is initialized with default value first. + pub fn mut_error_message(&mut self) -> &mut ::std::string::String { + &mut self.error_message + } + + // Take field + pub fn take_error_message(&mut self) -> ::std::string::String { + ::std::mem::replace(&mut self.error_message, ::std::string::String::new()) + } +} + +impl ::protobuf::Message for ExportLogsPartialSuccess { + fn is_initialized(&self) -> bool { + true + } + + fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { + while !is.eof()? { + let (field_number, wire_type) = is.read_tag_unpack()?; + match field_number { + 1 => { + if wire_type != ::protobuf::wire_format::WireTypeVarint { + return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); + } + let tmp = is.read_int64()?; + self.rejected_log_records = tmp; + }, + 2 => { + ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.error_message)?; + }, + _ => { + ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; + }, + }; + } + ::std::result::Result::Ok(()) + } + + // Compute sizes of nested messages + #[allow(unused_variables)] + fn compute_size(&self) -> u32 { + let mut my_size = 0; + if self.rejected_log_records != 0 { + my_size += ::protobuf::rt::value_size(1, self.rejected_log_records, ::protobuf::wire_format::WireTypeVarint); + } + if !self.error_message.is_empty() { + my_size += ::protobuf::rt::string_size(2, &self.error_message); + } + my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); + self.cached_size.set(my_size); + my_size + } + + fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { + if self.rejected_log_records != 0 { + os.write_int64(1, self.rejected_log_records)?; + } + if !self.error_message.is_empty() { + os.write_string(2, &self.error_message)?; + } + os.write_unknown_fields(self.get_unknown_fields())?; + ::std::result::Result::Ok(()) + } + + fn get_cached_size(&self) -> u32 { + self.cached_size.get() + } + + fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { + &self.unknown_fields + } + + fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { + &mut self.unknown_fields + } + + fn as_any(&self) -> &dyn (::std::any::Any) { + self as &dyn (::std::any::Any) + } + fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { + self as &mut dyn (::std::any::Any) + } + fn into_any(self: ::std::boxed::Box) -> ::std::boxed::Box { + self + } + + fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { + Self::descriptor_static() + } + + fn new() -> ExportLogsPartialSuccess { + ExportLogsPartialSuccess::new() + } + + fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { + static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; + descriptor.get(|| { + let mut fields = ::std::vec::Vec::new(); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>( + "rejected_log_records", + |m: &ExportLogsPartialSuccess| { &m.rejected_log_records }, + |m: &mut ExportLogsPartialSuccess| { &mut m.rejected_log_records }, + )); + fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( + "error_message", + |m: &ExportLogsPartialSuccess| { &m.error_message }, + |m: &mut ExportLogsPartialSuccess| { &mut m.error_message }, + )); + ::protobuf::reflect::MessageDescriptor::new_pb_name::( + "ExportLogsPartialSuccess", + fields, + file_descriptor_proto() + ) + }) + } + + fn default_instance() -> &'static ExportLogsPartialSuccess { + static instance: ::protobuf::rt::LazyV2 = ::protobuf::rt::LazyV2::INIT; + instance.get(ExportLogsPartialSuccess::new) + } +} + +impl ::protobuf::Clear for ExportLogsPartialSuccess { + fn clear(&mut self) { + self.rejected_log_records = 0; + self.error_message.clear(); + self.unknown_fields.clear(); + } +} + +impl ::std::fmt::Debug for ExportLogsPartialSuccess { + fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { + ::protobuf::text_format::fmt(self, f) + } +} + +impl ::protobuf::reflect::ProtobufValue for ExportLogsPartialSuccess { + fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { + ::protobuf::reflect::ReflectValueRef::Message(self) + } +} + static file_descriptor_proto_data: &'static [u8] = b"\ \n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentele\ metry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\ \"j\n\x18ExportLogsServiceRequest\x12N\n\rresource_logs\x18\x01\x20\x03(\ - \x0b2).opentelemetry.proto.logs.v1.ResourceLogsR\x0cresourceLogs\"\x1b\n\ - \x19ExportLogsServiceResponse2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\ - \x06Export\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceR\ - equest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRespo\ - nse\"\0Bp\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProt\ - oP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1b\x06proto3\ + \x0b2).opentelemetry.proto.logs.v1.ResourceLogsR\x0cresourceLogs\"\x85\ + \x01\n\x19ExportLogsServiceResponse\x12h\n\x0fpartial_success\x18\x01\ + \x20\x01(\x0b2?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialS\ + uccessR\x0epartialSuccess\"q\n\x18ExportLogsPartialSuccess\x120\n\x14rej\ + ected_log_records\x18\x01\x20\x01(\x03R\x12rejectedLogRecords\x12#\n\rer\ + ror_message\x18\x02\x20\x01(\tR\x0cerrorMessage2\x9d\x01\n\x0bLogsServic\ + e\x12\x8d\x01\n\x06Export\x12?.opentelemetry.proto.collector.logs.v1.Exp\ + ortLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportL\ + ogsServiceResponse\"\0B\x98\x01\n(io.opentelemetry.proto.collector.logs.\ + v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/l\ + ogs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3\ "; static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; From d85295d38b806dc0df6d4c6118de5291a4ed1431 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 24 Apr 2023 17:00:29 +0530 Subject: [PATCH 30/51] Fix typos in feature names. --- opentelemetry-otlp/Cargo.toml | 2 +- opentelemetry-proto/Cargo.toml | 2 +- opentelemetry-sdk/Cargo.toml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/opentelemetry-otlp/Cargo.toml b/opentelemetry-otlp/Cargo.toml index 221da61e65..cf61850479 100644 --- a/opentelemetry-otlp/Cargo.toml +++ b/opentelemetry-otlp/Cargo.toml @@ -62,7 +62,7 @@ tokio = { version = "1.0", features = ["macros", "rt-multi-thread"] } # telemetry pillars and functions trace = ["opentelemetry_api/trace", "opentelemetry_sdk/trace", "opentelemetry-proto/traces"] metrics = ["opentelemetry_api/metrics", "opentelemetry_sdk/metrics", "opentelemetry-proto/metrics", "grpc-tonic"] -log = ["opentelemetry/log", "opentelemetry-proto/logs"] +logs = ["opentelemetry_api/logs", "opentelemetry_sdk/logs", "opentelemetry-proto/logs"] # add ons serialize = ["serde"] diff --git a/opentelemetry-proto/Cargo.toml b/opentelemetry-proto/Cargo.toml index cb18ea6d41..14ca9a12a9 100644 --- a/opentelemetry-proto/Cargo.toml +++ b/opentelemetry-proto/Cargo.toml @@ -47,7 +47,7 @@ grpcio = { version = "0.12", optional = true } tonic = { version = "0.9.0", optional = true } prost = { version = "0.11.0", optional = true } protobuf = { version = "2.18", optional = true } # todo: update to 3.0 so we have docs for generated types. -opentelemetry = { version = "0.19", default-features = false, features = ["trace", "metrics", "log"], path = "../opentelemetry" } +opentelemetry = { version = "0.19", default-features = false, features = ["trace", "metrics", "logs"], path = "../opentelemetry" } futures = { version = "0.3", default-features = false, features = ["std"] } futures-util = { version = "0.3", default-features = false, features = ["std"] } serde = { version = "1.0", optional = true } diff --git a/opentelemetry-sdk/Cargo.toml b/opentelemetry-sdk/Cargo.toml index 64549fdec6..5ea6f23b39 100644 --- a/opentelemetry-sdk/Cargo.toml +++ b/opentelemetry-sdk/Cargo.toml @@ -46,8 +46,8 @@ default = ["trace"] trace = ["opentelemetry_api/trace", "crossbeam-channel", "rand", "async-trait", "percent-encoding"] jaeger_remote_sampler = ["trace", "opentelemetry-http", "http", "serde", "serde_json", "url"] metrics = ["opentelemetry_api/metrics", "regex", "ordered-float"] -log = ["opentelemetry_api/log", "crossbeam-channel", "async-trait", "serde_json"] -testing = ["opentelemetry_api/testing", "trace", "metrics", "log", "rt-async-std", "rt-tokio", "rt-tokio-current-thread", "tokio/macros", "tokio/rt-multi-thread"] +logs = ["opentelemetry_api/logs", "crossbeam-channel", "async-trait", "serde_json"] +testing = ["opentelemetry_api/testing", "trace", "metrics", "logs", "rt-async-std", "rt-tokio", "rt-tokio-current-thread", "tokio/macros", "tokio/rt-multi-thread"] rt-tokio = ["tokio", "tokio-stream"] rt-tokio-current-thread = ["tokio", "tokio-stream"] rt-async-std = ["async-std"] From b7dbee522ff5c0f078291506fbb7377848e9d581 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 24 Apr 2023 17:02:52 +0530 Subject: [PATCH 31/51] Add logs protobuf files to GRPCIO_PROTO_FILES. --- opentelemetry-proto/tests/grpc_build.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opentelemetry-proto/tests/grpc_build.rs b/opentelemetry-proto/tests/grpc_build.rs index 1fe76e7d97..43f138957e 100644 --- a/opentelemetry-proto/tests/grpc_build.rs +++ b/opentelemetry-proto/tests/grpc_build.rs @@ -12,6 +12,8 @@ const GRPCIO_PROTO_FILES: &[&str] = &[ "src/proto/opentelemetry-proto/opentelemetry/proto/collector/trace/v1/trace_service.proto", "src/proto/opentelemetry-proto/opentelemetry/proto/metrics/v1/metrics.proto", "src/proto/opentelemetry-proto/opentelemetry/proto/collector/metrics/v1/metrics_service.proto", + "src/proto/opentelemetry-proto/opentelemetry/proto/logs/v1/logs.proto", + "src/proto/opentelemetry-proto/opentelemetry/proto/collector/logs/v1/logs_service.proto", "src/proto/tracez.proto", ]; const GRPCIO_INCLUDES: &[&str] = &["src/proto/opentelemetry-proto/", "src/proto"]; From 93bca4fd4c2e10e6ba107acf46ebbd70d1a188fb Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 24 Apr 2023 17:03:24 +0530 Subject: [PATCH 32/51] Update to opentelemetry-proto 0.19.0. --- opentelemetry-proto/src/transform/common.rs | 5 ++- opentelemetry-proto/src/transform/logs.rs | 39 ++++++++++----------- opentelemetry-proto/src/transform/traces.rs | 22 ++---------- 3 files changed, 25 insertions(+), 41 deletions(-) diff --git a/opentelemetry-proto/src/transform/common.rs b/opentelemetry-proto/src/transform/common.rs index 7c51dfba85..6fedfd2f59 100644 --- a/opentelemetry-proto/src/transform/common.rs +++ b/opentelemetry-proto/src/transform/common.rs @@ -122,7 +122,10 @@ pub mod tonic { #[cfg(feature = "gen-protoc")] pub mod grpcio { use crate::proto::grpcio::common::{AnyValue, ArrayValue, InstrumentationScope, KeyValue}; - use opentelemetry::{sdk::trace::EvictedHashMap, Array, Value}; + use opentelemetry::{ + sdk::{trace::EvictedHashMap, Resource}, + Array, Value, + }; use protobuf::RepeatedField; use std::borrow::Cow; diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index 53c641ae41..f5b48b9684 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -10,7 +10,7 @@ pub mod tonic { use crate::{ tonic::{ common::v1::{any_value::Value, AnyValue, ArrayValue, KeyValue, KeyValueList}, - logs::v1::{InstrumentationLibraryLogs, LogRecord, ResourceLogs, SeverityNumber}, + logs::v1::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, resource::v1::Resource, }, transform::common::tonic::resource_attributes, @@ -89,7 +89,7 @@ pub mod tonic { Some(Severity::Fatal2) => SeverityNumber::Fatal2, Some(Severity::Fatal3) => SeverityNumber::Fatal3, Some(Severity::Fatal4) => SeverityNumber::Fatal4, - None => SeverityNumber::Unspecified + None => SeverityNumber::Unspecified, }; let record = LogRecord { @@ -105,7 +105,7 @@ pub mod tonic { }), attributes: log_record .attributes - .map(attributes_to_keyvalue) + .map(attributes_to_key_value) .unwrap_or_default(), dropped_attributes_count: 0, flags: trace_context @@ -136,15 +136,16 @@ pub mod tonic { dropped_attributes_count: 0, }), schema_url: "".to_string(), - instrumentation_library_logs: vec![InstrumentationLibraryLogs { + scope_logs: vec![ScopeLogs { schema_url: log_data .instrumentation .schema_url .clone() .map(Into::into) .unwrap_or_default(), - instrumentation_library: Some(log_data.instrumentation.into()), + scope: Some(log_data.instrumentation.into()), log_records: vec![log_data.record.into()], + ..Default::default() }], } } @@ -158,7 +159,7 @@ pub mod grpcio { use crate::{ proto::grpcio::{ common::{AnyValue, AnyValue_oneof_value, ArrayValue, KeyValue, KeyValueList}, - logs::{InstrumentationLibraryLogs, LogRecord, ResourceLogs, SeverityNumber}, + logs::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, resource::Resource, }, transform::common::grpcio::resource_attributes, @@ -294,21 +295,17 @@ pub mod grpcio { ..Default::default() }), schema_url: "".to_string(), - instrumentation_library_logs: RepeatedField::from_vec(vec![ - InstrumentationLibraryLogs { - schema_url: log_data - .instrumentation - .schema_url - .clone() - .map(Into::into) - .unwrap_or_default(), - instrumentation_library: SingularPtrField::some( - log_data.instrumentation.into(), - ), - log_records: RepeatedField::from_vec(vec![log_data.record.into()]), - ..Default::default() - }, - ]), + scope_logs: RepeatedField::from_vec(vec![ScopeLogs { + schema_url: log_data + .instrumentation + .schema_url + .clone() + .map(Into::into) + .unwrap_or_default(), + scope: SingularPtrField::some(log_data.instrumentation.into()), + log_records: RepeatedField::from_vec(vec![log_data.record.into()]), + ..Default::default() + }]), ..Default::default() } } diff --git a/opentelemetry-proto/src/transform/traces.rs b/opentelemetry-proto/src/transform/traces.rs index d23ef02aca..94c4e9886f 100644 --- a/opentelemetry-proto/src/transform/traces.rs +++ b/opentelemetry-proto/src/transform/traces.rs @@ -7,7 +7,7 @@ pub mod tonic { use super::*; use crate::proto::tonic::resource::v1::Resource; use crate::proto::tonic::trace::v1::{span, status, ResourceSpans, ScopeSpans, Span, Status}; - use crate::transform::common::tonic::Attributes; + use crate::transform::common::tonic::{resource_attributes, Attributes}; use opentelemetry::trace; impl From for span::SpanKind { @@ -49,7 +49,7 @@ pub mod tonic { let span_kind: span::SpanKind = source_span.span_kind.into(); ResourceSpans { resource: Some(Resource { - attributes: resource_attributes(&source_span.resource).0, + attributes: resource_attributes(Some(&source_span.resource)).0, dropped_attributes_count: 0, }), schema_url: source_span @@ -107,14 +107,6 @@ pub mod tonic { } } } - - fn resource_attributes(resource: &sdk::Resource) -> Attributes { - resource - .iter() - .map(|(k, v)| opentelemetry::KeyValue::new(k.clone(), v.clone())) - .collect::>() - .into() - } } #[cfg(feature = "gen-protoc")] @@ -168,7 +160,7 @@ pub mod grpcio { fn from(source_span: SpanData) -> Self { ResourceSpans { resource: SingularPtrField::from(Some(Resource { - attributes: resource_attributes(&source_span.resource).0, + attributes: resource_attributes(Some(&source_span.resource)).0, dropped_attributes_count: 0, ..Default::default() })), @@ -231,12 +223,4 @@ pub mod grpcio { } } } - - fn resource_attributes(resource: &sdk::Resource) -> Attributes { - resource - .iter() - .map(|(k, v)| opentelemetry::KeyValue::new(k.clone(), v.clone())) - .collect::>() - .into() - } } From e20e697b50f4163b1ce0900d0ae3fffe2ee771b3 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 24 Apr 2023 17:18:37 +0530 Subject: [PATCH 33/51] Update crates/modules names. --- opentelemetry-proto/src/transform/common.rs | 6 +++--- opentelemetry-proto/src/transform/logs.rs | 20 ++++++++++---------- opentelemetry-sdk/src/export/logs/stdout.rs | 2 +- opentelemetry-sdk/src/logs/log_emitter.rs | 16 +++++++++------- 4 files changed, 23 insertions(+), 21 deletions(-) diff --git a/opentelemetry-proto/src/transform/common.rs b/opentelemetry-proto/src/transform/common.rs index 7819aa8396..8618e1ea74 100644 --- a/opentelemetry-proto/src/transform/common.rs +++ b/opentelemetry-proto/src/transform/common.rs @@ -109,7 +109,7 @@ pub mod tonic { resource .map(|res| { res.iter() - .map(|(k, v)| opentelemetry::KeyValue::new(k.clone(), v.clone())) + .map(|(k, v)| opentelemetry_api::KeyValue::new(k.clone(), v.clone())) .collect::>() }) .unwrap_or_default() @@ -121,7 +121,7 @@ pub mod tonic { pub mod grpcio { use crate::proto::grpcio::common::{AnyValue, ArrayValue, InstrumentationScope, KeyValue}; use opentelemetry_api::{Array, Value}; - use opentelemetry_sdk::trace::EvictedHashMap; + use opentelemetry_sdk::{trace::EvictedHashMap, Resource}; use protobuf::RepeatedField; use std::borrow::Cow; @@ -209,7 +209,7 @@ pub mod grpcio { .map(|resource| { resource .iter() - .map(|(k, v)| opentelemetry::KeyValue::new(k.clone(), v.clone())) + .map(|(k, v)| opentelemetry_api::KeyValue::new(k.clone(), v.clone())) .collect::>() }) .unwrap_or_default() diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index f5b48b9684..c4e9201ac6 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -5,7 +5,7 @@ use std::borrow::Cow; pub mod tonic { use std::collections::BTreeMap; - use opentelemetry::sdk::logs::{Any, Severity}; + use opentelemetry_sdk::logs::{Any, Severity}; use crate::{ tonic::{ @@ -61,8 +61,8 @@ pub mod tonic { .collect() } - impl From for LogRecord { - fn from(log_record: opentelemetry::sdk::logs::LogRecord) -> Self { + impl From for LogRecord { + fn from(log_record: opentelemetry_sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); let severity_number = match log_record.severity_number { Some(Severity::Trace) => SeverityNumber::Trace, @@ -127,8 +127,8 @@ pub mod tonic { } } - impl From for ResourceLogs { - fn from(log_data: opentelemetry::sdk::export::logs::LogData) -> Self { + impl From for ResourceLogs { + fn from(log_data: opentelemetry_sdk::export::logs::LogData) -> Self { ResourceLogs { resource: Some(Resource { attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) @@ -164,7 +164,7 @@ pub mod grpcio { }, transform::common::grpcio::resource_attributes, }; - use opentelemetry::sdk::logs::{Any, Severity}; + use opentelemetry_sdk::logs::{Any, Severity}; use protobuf::{RepeatedField, SingularPtrField}; use super::*; @@ -221,8 +221,8 @@ pub mod grpcio { .collect() } - impl From for LogRecord { - fn from(log_record: opentelemetry::sdk::logs::LogRecord) -> Self { + impl From for LogRecord { + fn from(log_record: opentelemetry_sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); let severity_number = match log_record.severity_number { Some(Severity::Trace) => SeverityNumber::SEVERITY_NUMBER_TRACE, @@ -285,8 +285,8 @@ pub mod grpcio { } } - impl From for ResourceLogs { - fn from(log_data: opentelemetry::sdk::export::logs::LogData) -> Self { + impl From for ResourceLogs { + fn from(log_data: opentelemetry_sdk::export::logs::LogData) -> Self { ResourceLogs { resource: SingularPtrField::some(Resource { attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) diff --git a/opentelemetry-sdk/src/export/logs/stdout.rs b/opentelemetry-sdk/src/export/logs/stdout.rs index 5996329595..98a5a60319 100644 --- a/opentelemetry-sdk/src/export/logs/stdout.rs +++ b/opentelemetry-sdk/src/export/logs/stdout.rs @@ -98,7 +98,7 @@ where } let provider = provider_builder.build(); - provider.versioned_log_emitter("opentelemetry", Some(env!("CARGO_PKG_VERSION"))) + provider.versioned_log_emitter("opentelemetry", Some(env!("CARGO_PKG_VERSION")), None, None) } } diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index 3b63b00c9b..1d6756f414 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -35,16 +35,13 @@ impl LogEmitterProvider { Builder::default() } - /// Create a new `LogEmitter`. - pub fn log_emitter(&self, name: impl Into>) -> LogEmitter { - self.versioned_log_emitter(name, None) - } - - /// Create a new version `LogEmitter` instance. + /// Create a new versioned `LogEmitter` instance. pub fn versioned_log_emitter( &self, name: impl Into>, version: Option<&'static str>, + schema_url: Option>, + attributes: Option>, ) -> LogEmitter { let name = name.into(); @@ -55,7 +52,12 @@ impl LogEmitterProvider { }; LogEmitter::new( - InstrumentationLibrary::new(component_name, version.map(Into::into), None), + InstrumentationLibrary::new( + component_name, + version.map(Into::into), + schema_url, + attributes, + ), Arc::downgrade(&self.inner), ) } From e77388a0ee4a2336a648b7995f1fa1ef370baa98 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 24 Apr 2023 17:23:26 +0530 Subject: [PATCH 34/51] Remove incorrect exporter example in docs. --- opentelemetry-sdk/src/export/logs/stdout.rs | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/opentelemetry-sdk/src/export/logs/stdout.rs b/opentelemetry-sdk/src/export/logs/stdout.rs index 98a5a60319..b2c59b53da 100644 --- a/opentelemetry-sdk/src/export/logs/stdout.rs +++ b/opentelemetry-sdk/src/export/logs/stdout.rs @@ -7,26 +7,7 @@ //! [`LogRecord`]: crate::log::LogRecord //! [`Write`]: std::io::Write //! [`Stdout`]: std::io::Stdout -//! -//! # Examples -//! -//! ```no_run -//! use opentelemetry_api::global::shutdown_tracer_provider; -//! use opentelemetry_api::trace::Tracer; -//! use opentelemetry_sdk::export::trace::stdout; -//! -//! fn main() { -//! let tracer = stdout::new_pipeline() -//! .with_pretty_print(true) -//! .install_simple(); -//! -//! tracer.in_span("doing_work", |cx| { -//! // Traced app logic here... -//! }); -//! -//! shutdown_tracer_provider(); // sending remaining spans -//! } -//! ``` +// TODO: Add an example for using this exporter. use crate::export::{ logs::{ExportResult, LogData, LogExporter}, ExportError, From 0d88043cbf14872900924084b5cabed280e6cebd Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Wed, 26 Apr 2023 16:27:12 +0530 Subject: [PATCH 35/51] Move stdout logs exporter to the opentelemetry-stdout crate. --- opentelemetry-sdk/src/export/logs/mod.rs | 2 -- opentelemetry-stdout/Cargo.toml | 2 ++ opentelemetry-stdout/src/lib.rs | 6 ++++ .../src/logs/mod.rs | 28 ++++++++++--------- 4 files changed, 23 insertions(+), 15 deletions(-) rename opentelemetry-sdk/src/export/logs/stdout.rs => opentelemetry-stdout/src/logs/mod.rs (83%) diff --git a/opentelemetry-sdk/src/export/logs/mod.rs b/opentelemetry-sdk/src/export/logs/mod.rs index 2aa5640053..a95e12965a 100644 --- a/opentelemetry-sdk/src/export/logs/mod.rs +++ b/opentelemetry-sdk/src/export/logs/mod.rs @@ -5,8 +5,6 @@ use opentelemetry_api::logs::LogError; use opentelemetry_api::{logs::LogResult, InstrumentationLibrary}; use std::{fmt::Debug, sync::Arc}; -pub mod stdout; - /// `LogExporter` defines the interface that log exporters should implement. #[async_trait] pub trait LogExporter: Send + Debug { diff --git a/opentelemetry-stdout/Cargo.toml b/opentelemetry-stdout/Cargo.toml index 3f16f5ec50..a2ea695913 100644 --- a/opentelemetry-stdout/Cargo.toml +++ b/opentelemetry-stdout/Cargo.toml @@ -18,9 +18,11 @@ rust-version = "1.60" [features] trace = ["opentelemetry_api/trace", "opentelemetry_sdk/trace", "futures-util"] metrics = ["async-trait", "opentelemetry_api/metrics", "opentelemetry_sdk/metrics"] +logs = ["opentelemetry_api/logs", "opentelemetry_sdk/logs", "async-trait", "thiserror"] [dependencies] async-trait = { version = "0.1", optional = true } +thiserror = { version = "1", optional = true } futures-util = { version = "0.3", optional = true, default-features = false } opentelemetry_api = { version = "0.19", path = "../opentelemetry-api", default_features = false } opentelemetry_sdk = { version = "0.19", path = "../opentelemetry-sdk", default_features = false } diff --git a/opentelemetry-stdout/src/lib.rs b/opentelemetry-stdout/src/lib.rs index 042463223a..7ff59d4a04 100644 --- a/opentelemetry-stdout/src/lib.rs +++ b/opentelemetry-stdout/src/lib.rs @@ -50,3 +50,9 @@ mod trace; #[cfg_attr(docsrs, doc(cfg(feature = "trace")))] #[cfg(feature = "trace")] pub use trace::*; + +#[cfg(feature = "logs")] +mod logs; +#[cfg_attr(docsrs, doc(cfg(feature = "logs")))] +#[cfg(feature = "logs")] +pub use logs::*; diff --git a/opentelemetry-sdk/src/export/logs/stdout.rs b/opentelemetry-stdout/src/logs/mod.rs similarity index 83% rename from opentelemetry-sdk/src/export/logs/stdout.rs rename to opentelemetry-stdout/src/logs/mod.rs index b2c59b53da..c3018d6b1a 100644 --- a/opentelemetry-sdk/src/export/logs/stdout.rs +++ b/opentelemetry-stdout/src/logs/mod.rs @@ -3,17 +3,20 @@ //! The stdout [`LogExporter`] writes debug printed [`LogRecord`]s to its configured //! [`Write`] instance. By default it will write to [`Stdout`]. //! -//! [`LogExporter`]: super::LogExporter -//! [`LogRecord`]: crate::log::LogRecord +//! [`LogExporter`]: opentelemetry_sdk::export::logs::LogExporter +//! [`LogRecord`]: crate::logs::LogRecord //! [`Write`]: std::io::Write //! [`Stdout`]: std::io::Stdout // TODO: Add an example for using this exporter. -use crate::export::{ - logs::{ExportResult, LogData, LogExporter}, - ExportError, -}; use async_trait::async_trait; use opentelemetry_api::logs::LogError; +use opentelemetry_sdk::{ + export::{ + logs::{ExportResult, LogData, LogExporter}, + ExportError, + }, + logs::{Config, LogEmitter, LogEmitterProvider}, +}; use std::fmt::Debug; use std::io::{stdout, Stdout, Write}; @@ -21,7 +24,7 @@ use std::io::{stdout, Stdout, Write}; #[derive(Debug)] pub struct PipelineBuilder { pretty_print: bool, - log_config: Option, + log_config: Option, writer: W, } @@ -48,8 +51,8 @@ impl PipelineBuilder { self } - /// Assign the SDK trace configuration. - pub fn with_trace_config(mut self, config: crate::logs::Config) -> Self { + /// Assign the SDK logs configuration. + pub fn with_logs_config(mut self, config: crate::logs::Config) -> Self { self.log_config = Some(config); self } @@ -69,11 +72,10 @@ where W: Write + Debug + Send + 'static, { /// Install the stdout exporter pipeline with the recommended defaults. - pub fn install_simple(mut self) -> crate::logs::LogEmitter { + pub fn install_simple(mut self) -> LogEmitter { let exporter = Exporter::new(self.writer, self.pretty_print); - let mut provider_builder = - crate::logs::LogEmitterProvider::builder().with_simple_exporter(exporter); + let mut provider_builder = LogEmitterProvider::builder().with_simple_exporter(exporter); if let Some(config) = self.log_config.take() { provider_builder = provider_builder.with_config(config); } @@ -85,7 +87,7 @@ where /// A [`LogExporter`] that writes to [`Stdout`] or other configured [`Write`]. /// -/// [`LogExporter`]: super::LogExporter +/// [`LogExporter`]: opentelemetry_sdk::export::logs::LogExporter /// [`Write`]: std::io::Write /// [`Stdout`]: std::io::Stdout #[derive(Debug)] From 3a34dea796ce28241a325619fef325b284de3310 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Thu, 27 Apr 2023 21:19:11 +0530 Subject: [PATCH 36/51] Store resource using Cow instead of Arc. --- opentelemetry-proto/src/transform/logs.rs | 105 +++++++++++++++++----- opentelemetry-sdk/src/export/logs/mod.rs | 4 +- opentelemetry-sdk/src/logs/config.rs | 4 +- opentelemetry-sdk/src/logs/log_emitter.rs | 48 +--------- 4 files changed, 92 insertions(+), 69 deletions(-) diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index c4e9201ac6..e8d4c3f79f 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -7,13 +7,10 @@ pub mod tonic { use opentelemetry_sdk::logs::{Any, Severity}; - use crate::{ - tonic::{ - common::v1::{any_value::Value, AnyValue, ArrayValue, KeyValue, KeyValueList}, - logs::v1::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, - resource::v1::Resource, - }, - transform::common::tonic::resource_attributes, + use crate::tonic::{ + common::v1::{any_value::Value, AnyValue, ArrayValue, KeyValue, KeyValueList}, + logs::v1::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, + resource::v1::Resource, }; use super::*; @@ -61,6 +58,38 @@ pub mod tonic { .collect() } + fn build_resource_key_values( + resource: &opentelemetry_sdk::Resource, + log_resource: Option, Any>>, + ) -> Vec { + let mut final_kv: Vec = Vec::new(); + + if let Some(log_resource) = log_resource { + for (key, value) in log_resource.into_iter() { + let api_key = opentelemetry_api::Key::from(key); + if resource.get(api_key.clone()).is_some() { + continue; + } + + final_kv.push(KeyValue { + key: api_key.into(), + value: Some(AnyValue { + value: Some(value.into()), + }), + }); + } + } + + for (key, value) in resource.iter() { + final_kv.push(KeyValue { + key: key.clone().into(), + value: Some(value.clone().into()), + }) + } + + final_kv + } + impl From for LogRecord { fn from(log_record: opentelemetry_sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); @@ -128,11 +157,13 @@ pub mod tonic { } impl From for ResourceLogs { - fn from(log_data: opentelemetry_sdk::export::logs::LogData) -> Self { + fn from(mut log_data: opentelemetry_sdk::export::logs::LogData) -> Self { ResourceLogs { resource: Some(Resource { - attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) - .0, + attributes: build_resource_key_values( + log_data.resource.as_ref(), + log_data.record.resource.take(), + ), dropped_attributes_count: 0, }), schema_url: "".to_string(), @@ -156,13 +187,10 @@ pub mod tonic { pub mod grpcio { use std::collections::BTreeMap; - use crate::{ - proto::grpcio::{ - common::{AnyValue, AnyValue_oneof_value, ArrayValue, KeyValue, KeyValueList}, - logs::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, - resource::Resource, - }, - transform::common::grpcio::resource_attributes, + use crate::proto::grpcio::{ + common::{AnyValue, AnyValue_oneof_value, ArrayValue, KeyValue, KeyValueList}, + logs::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, + resource::Resource, }; use opentelemetry_sdk::logs::{Any, Severity}; use protobuf::{RepeatedField, SingularPtrField}; @@ -221,6 +249,41 @@ pub mod grpcio { .collect() } + fn build_resource_key_values( + resource: &opentelemetry_sdk::Resource, + log_resource: Option, Any>>, + ) -> Vec { + let mut final_kv: Vec = Vec::new(); + + if let Some(log_resource) = log_resource { + for (key, value) in log_resource.into_iter() { + let api_key = opentelemetry_api::Key::from(key); + if resource.get(api_key.clone()).is_some() { + continue; + } + + final_kv.push(KeyValue { + key: api_key.into(), + value: SingularPtrField::some(AnyValue { + value: Some(value.into()), + ..Default::default() + }), + ..Default::default() + }); + } + } + + for (key, value) in resource.iter() { + final_kv.push(KeyValue { + key: key.clone().into(), + value: SingularPtrField::some(value.clone().into()), + ..Default::default() + }) + } + + final_kv + } + impl From for LogRecord { fn from(log_record: opentelemetry_sdk::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); @@ -286,11 +349,13 @@ pub mod grpcio { } impl From for ResourceLogs { - fn from(log_data: opentelemetry_sdk::export::logs::LogData) -> Self { + fn from(mut log_data: opentelemetry_sdk::export::logs::LogData) -> Self { ResourceLogs { resource: SingularPtrField::some(Resource { - attributes: resource_attributes(log_data.resource.as_ref().map(AsRef::as_ref)) - .0, + attributes: RepeatedField::from_vec(build_resource_key_values( + log_data.resource.as_ref(), + log_data.record.resource.take(), + )), dropped_attributes_count: 0, ..Default::default() }), diff --git a/opentelemetry-sdk/src/export/logs/mod.rs b/opentelemetry-sdk/src/export/logs/mod.rs index a95e12965a..4adcc6d59f 100644 --- a/opentelemetry-sdk/src/export/logs/mod.rs +++ b/opentelemetry-sdk/src/export/logs/mod.rs @@ -3,7 +3,7 @@ use crate::{logs::LogRecord, Resource}; use async_trait::async_trait; use opentelemetry_api::logs::LogError; use opentelemetry_api::{logs::LogResult, InstrumentationLibrary}; -use std::{fmt::Debug, sync::Arc}; +use std::{borrow::Cow, fmt::Debug}; /// `LogExporter` defines the interface that log exporters should implement. #[async_trait] @@ -22,7 +22,7 @@ pub struct LogData { /// Log record pub record: LogRecord, /// Resource for the emitter who produced this `LogData`. - pub resource: Option>, + pub resource: Cow<'static, Resource>, /// Instrumentation details for the emitter who produced this `LogData`. pub instrumentation: InstrumentationLibrary, } diff --git a/opentelemetry-sdk/src/logs/config.rs b/opentelemetry-sdk/src/logs/config.rs index 9ba655ae2a..19a3ce0572 100644 --- a/opentelemetry-sdk/src/logs/config.rs +++ b/opentelemetry-sdk/src/logs/config.rs @@ -1,8 +1,8 @@ -use std::sync::Arc; +use std::borrow::Cow; /// Log emitter configuration. #[derive(Debug, Default)] pub struct Config { /// Contains attributes representing an entity that produces telemetry. - pub resource: Option>, + pub resource: Cow<'static, crate::Resource>, } diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index 1d6756f414..d1d473e53b 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -1,9 +1,5 @@ use super::{BatchLogProcessor, Config, LogProcessor, LogRecord, LogRuntime, SimpleLogProcessor}; -use crate::{ - export::logs::{LogData, LogExporter}, - resource::{EnvResourceDetector, SdkProvidedResourceDetector}, - Resource, -}; +use crate::export::logs::{LogData, LogExporter}; use opentelemetry_api::{ global::{handle_error, Error}, logs::LogResult, @@ -12,7 +8,6 @@ use opentelemetry_api::{ use std::{ borrow::Cow, sync::{Arc, Weak}, - time::Duration, }; #[derive(Debug, Clone)] @@ -122,28 +117,11 @@ pub(crate) struct LogEmitterProviderInner { config: Config, } -#[derive(Debug)] +#[derive(Debug, Default)] /// Builder for provider attributes. pub struct Builder { processors: Vec>, config: Config, - sdk_provided_resource: Resource, -} - -impl Default for Builder { - fn default() -> Self { - Builder { - processors: Default::default(), - config: Default::default(), - sdk_provided_resource: Resource::from_detectors( - Duration::from_secs(0), - vec![ - Box::new(SdkProvidedResourceDetector), - Box::new(EnvResourceDetector::new()), - ], - ), - } - } } impl Builder { @@ -178,32 +156,12 @@ impl Builder { Builder { config, ..self } } - /// Return the clone of sdk provided resource. - /// - /// See - /// for details. - pub fn sdk_provided_resource(&self) -> Resource { - self.sdk_provided_resource.clone() - } - /// Create a new provider from this configuration. pub fn build(self) -> LogEmitterProvider { - let mut config = self.config; - config.resource = match config.resource { - None => Some(Arc::new(self.sdk_provided_resource)), - Some(resource) => { - if resource.is_empty() { - None - } else { - Some(Arc::new(self.sdk_provided_resource.merge(resource))) - } - } - }; - LogEmitterProvider { inner: Arc::new(LogEmitterProviderInner { processors: self.processors, - config, + config: self.config, }), } } From 5f75512afac8827ea517792f17ccfda208a1f8e9 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Thu, 27 Apr 2023 21:19:39 +0530 Subject: [PATCH 37/51] Add From> implementation for Key. --- opentelemetry-api/src/common.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/opentelemetry-api/src/common.rs b/opentelemetry-api/src/common.rs index 987865cd3f..216a7f0df8 100644 --- a/opentelemetry-api/src/common.rs +++ b/opentelemetry-api/src/common.rs @@ -99,6 +99,16 @@ impl From> for Key { } } +impl From> for Key { + /// Convert a `Cow<'static, str>` to a `Key` + fn from(string: Cow<'static, str>) -> Self { + match string { + Cow::Borrowed(s) => Key(OtelString::Static(s)), + Cow::Owned(s) => Key(OtelString::Owned(s)), + } + } +} + impl fmt::Debug for Key { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { self.0.fmt(fmt) From 1656a9124375c221e84a5463904e5fd0a750f129 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Thu, 27 Apr 2023 23:49:05 +0530 Subject: [PATCH 38/51] Update logging SDK. --- opentelemetry-otlp/src/lib.rs | 2 +- opentelemetry-otlp/src/logs.rs | 71 ++++++++++++++++++++-------------- 2 files changed, 42 insertions(+), 31 deletions(-) diff --git a/opentelemetry-otlp/src/lib.rs b/opentelemetry-otlp/src/lib.rs index adf094b8a0..8fee1df2c0 100644 --- a/opentelemetry-otlp/src/lib.rs +++ b/opentelemetry-otlp/src/lib.rs @@ -203,7 +203,7 @@ pub use crate::metric::{ }; #[cfg(feature = "logs")] -pub use crate::log::*; +pub use crate::logs::*; pub use crate::exporter::{ HasExportConfig, WithExportConfig, OTEL_EXPORTER_OTLP_ENDPOINT, diff --git a/opentelemetry-otlp/src/logs.rs b/opentelemetry-otlp/src/logs.rs index 2a9e873ae9..7b7848487f 100644 --- a/opentelemetry-otlp/src/logs.rs +++ b/opentelemetry-otlp/src/logs.rs @@ -54,10 +54,8 @@ use std::{ time::Duration, }; -use opentelemetry::{ - log::LogError, - sdk::{self, export::log::LogData, log::LogRuntime}, -}; +use opentelemetry_api::logs::LogError; +use opentelemetry_sdk::{self, export::logs::LogData, logs::LogRuntime}; impl OtlpPipeline { /// Create a OTLP logging pipeline. @@ -162,7 +160,7 @@ pub enum LogExporter { /// The Collector URL collector_endpoint: Uri, /// The HTTP log exporter - log_exporter: Option>, + log_exporter: Option>, }, } @@ -253,17 +251,17 @@ impl LogExporter { } let channel: GrpcChannel = match (grpcio_config.credentials, grpcio_config.use_tls) { - (None, Some(true)) => builder.secure_connect( - config.endpoint.as_str(), - ChannelCredentialsBuilder::new().build(), - ), + (None, Some(true)) => builder + .set_credentials(ChannelCredentialsBuilder::new().build()) + .connect(config.endpoint.as_str()), (None, _) => builder.connect(config.endpoint.as_str()), - (Some(credentials), _) => builder.secure_connect( - config.endpoint.as_str(), - ChannelCredentialsBuilder::new() - .cert(credentials.cert.into(), credentials.key.into()) - .build(), - ), + (Some(credentials), _) => builder + .set_credentials( + ChannelCredentialsBuilder::new() + .cert(credentials.cert.into(), credentials.key.into()) + .build(), + ) + .connect(config.endpoint.as_str()), }; LogExporter::Grpcio { @@ -291,8 +289,8 @@ impl LogExporter { } #[async_trait] -impl opentelemetry::sdk::export::log::LogExporter for LogExporter { - async fn export(&mut self, batch: Vec) -> opentelemetry::log::LogResult<()> { +impl opentelemetry_sdk::export::logs::LogExporter for LogExporter { + async fn export(&mut self, batch: Vec) -> opentelemetry_api::logs::LogResult<()> { match self { #[cfg(feature = "grpc-sys")] LogExporter::Grpcio { @@ -405,7 +403,7 @@ impl opentelemetry::sdk::export::log::LogExporter for LogExporter { #[derive(Default, Debug)] pub struct OtlpLogPipeline { exporter_builder: Option, - log_config: Option, + log_config: Option, } impl OtlpLogPipeline { @@ -418,8 +416,8 @@ impl OtlpLogPipeline { /// Returns a [`LogEmitter`] with the name `opentelemetry-otlp` and the /// current crate version, using the configured log exporter. /// - /// [`LogEmitter`]: opentelemetry::sdk::log::LogEmitter - pub fn simple(self) -> Result { + /// [`LogEmitter`]: opentelemetry::opentelemetry_sdk::logs::LogEmitter + pub fn simple(self) -> Result { Ok(build_simple_with_exporter( self.exporter_builder .ok_or(crate::Error::NoExporterBuilder)? @@ -433,7 +431,10 @@ impl OtlpLogPipeline { /// batch log processor. /// /// [`LogEmitter`]: opentelemetry::log::LogEmitter - pub fn batch(self, runtime: R) -> Result { + pub fn batch( + self, + runtime: R, + ) -> Result { Ok(build_batch_with_exporter( self.exporter_builder .ok_or(crate::Error::NoExporterBuilder)? @@ -446,27 +447,37 @@ impl OtlpLogPipeline { fn build_simple_with_exporter( exporter: LogExporter, - log_config: Option, -) -> sdk::log::LogEmitter { + log_config: Option, +) -> opentelemetry_sdk::logs::LogEmitter { let mut provider_builder = - sdk::log::LogEmitterProvider::builder().with_simple_exporter(exporter); + opentelemetry_sdk::logs::LogEmitterProvider::builder().with_simple_exporter(exporter); if let Some(config) = log_config { provider_builder = provider_builder.with_config(config); } let provider = provider_builder.build(); - provider.versioned_log_emitter("opentelemetry-otlp", Some(env!("CARGO_PKG_VERSION"))) + provider.versioned_log_emitter( + "opentelemetry-otlp", + Some(env!("CARGO_PKG_VERSION")), + None, + None, + ) } fn build_batch_with_exporter( exporter: LogExporter, - log_config: Option, + log_config: Option, runtime: R, -) -> sdk::log::LogEmitter { - let mut provider_builder = - sdk::log::LogEmitterProvider::builder().with_batch_exporter(exporter, runtime); +) -> opentelemetry_sdk::logs::LogEmitter { + let mut provider_builder = opentelemetry_sdk::logs::LogEmitterProvider::builder() + .with_batch_exporter(exporter, runtime); if let Some(config) = log_config { provider_builder = provider_builder.with_config(config); } let provider = provider_builder.build(); - provider.versioned_log_emitter("opentelemetry-otlp", Some(env!("CARGO_PKG_VERSION"))) + provider.versioned_log_emitter( + "opentelemetry-otlp", + Some(env!("CARGO_PKG_VERSION")), + None, + None, + ) } From a9b41aa4820897673d33d4b1e1899b2c98dced8b Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Thu, 27 Apr 2023 23:49:22 +0530 Subject: [PATCH 39/51] Add ordered-float dependency. --- opentelemetry-stdout/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/opentelemetry-stdout/Cargo.toml b/opentelemetry-stdout/Cargo.toml index a2ea695913..fb36a73b33 100644 --- a/opentelemetry-stdout/Cargo.toml +++ b/opentelemetry-stdout/Cargo.toml @@ -28,6 +28,7 @@ opentelemetry_api = { version = "0.19", path = "../opentelemetry-api", default_f opentelemetry_sdk = { version = "0.19", path = "../opentelemetry-sdk", default_features = false } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" +ordered-float = "3.4.0" [dev-dependencies] opentelemetry_api = { version = "0.19", path = "../opentelemetry-api", features = ["metrics"] } From 26b2b1e0675b0bdecf03817961d3b8e326f032c2 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Thu, 27 Apr 2023 23:49:39 +0530 Subject: [PATCH 40/51] Rewrite LogsExporter. --- opentelemetry-stdout/src/common.rs | 125 +++++++++++++++- opentelemetry-stdout/src/logs/exporter.rs | 115 +++++++++++++++ opentelemetry-stdout/src/logs/mod.rs | 134 +---------------- opentelemetry-stdout/src/logs/transform.rs | 153 ++++++++++++++++++++ opentelemetry-stdout/src/trace/transform.rs | 20 +-- 5 files changed, 393 insertions(+), 154 deletions(-) create mode 100644 opentelemetry-stdout/src/logs/exporter.rs create mode 100644 opentelemetry-stdout/src/logs/transform.rs diff --git a/opentelemetry-stdout/src/common.rs b/opentelemetry-stdout/src/common.rs index 7dec1cd7e5..d6098d36bb 100644 --- a/opentelemetry-stdout/src/common.rs +++ b/opentelemetry-stdout/src/common.rs @@ -1,9 +1,15 @@ -use std::{borrow::Cow, collections::BTreeMap}; +use std::{ + borrow::Cow, + collections::BTreeMap, + hash::{Hash, Hasher}, + time::{SystemTime, UNIX_EPOCH}, +}; -use serde::Serialize; +use ordered_float::OrderedFloat; +use serde::{Serialize, Serializer}; -#[derive(Debug, Serialize, Clone)] -pub(crate) struct AttributeSet(BTreeMap); +#[derive(Debug, Serialize, Clone, Hash, Eq, PartialEq)] +pub(crate) struct AttributeSet(pub BTreeMap); impl From<&opentelemetry_sdk::AttributeSet> for AttributeSet { fn from(value: &opentelemetry_sdk::AttributeSet) -> Self { @@ -16,6 +22,29 @@ impl From<&opentelemetry_sdk::AttributeSet> for AttributeSet { } } +impl From<&opentelemetry_sdk::Resource> for AttributeSet { + fn from(value: &opentelemetry_sdk::Resource) -> Self { + AttributeSet( + value + .iter() + .map(|(key, value)| (Key::from(key.clone()), Value::from(value.clone()))) + .collect(), + ) + } +} + +#[cfg(feature = "logs")] +impl From, opentelemetry_sdk::logs::Any>> for AttributeSet { + fn from(value: BTreeMap, opentelemetry_sdk::logs::Any>) -> Self { + AttributeSet( + value + .into_iter() + .map(|(key, value)| (key.into(), value.into())) + .collect(), + ) + } +} + #[derive(Debug, Clone, Serialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Resource { @@ -43,16 +72,22 @@ impl From<&opentelemetry_sdk::Resource> for Resource { } } -#[derive(Debug, Clone, Serialize, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Serialize, Hash, PartialEq, Eq, Ord, PartialOrd)] pub(crate) struct Key(Cow<'static, str>); +impl From> for Key { + fn from(value: Cow<'static, str>) -> Self { + Key(value) + } +} + impl From for Key { fn from(value: opentelemetry_api::Key) -> Self { Key(value.as_str().to_string().into()) } } -#[derive(Debug, Serialize, PartialEq, Clone)] +#[derive(Debug, Serialize, Clone)] pub(crate) enum Value { #[serde(rename = "boolValue")] Bool(bool), @@ -64,6 +99,38 @@ pub(crate) enum Value { String(String), #[serde(rename = "arrayValue")] Array(Vec), + #[serde(rename = "kvListValue")] + KeyValues(Vec), + #[serde(rename = "bytesValue")] + BytesValue(Vec), +} + +impl PartialEq for Value { + fn eq(&self, other: &Self) -> bool { + match (&self, &other) { + (Value::Double(f), Value::Double(of)) => OrderedFloat(*f).eq(&OrderedFloat(*of)), + (non_double, other_non_double) => non_double.eq(other_non_double), + } + } +} + +impl Eq for Value {} + +impl Hash for Value { + fn hash(&self, state: &mut H) { + match &self { + Value::Bool(b) => b.hash(state), + Value::Int(i) => i.hash(state), + Value::Double(f) => OrderedFloat(*f).hash(state), + Value::String(s) => s.hash(state), + Value::Array(a) => a.iter().for_each(|v| v.hash(state)), + Value::KeyValues(kv) => kv.iter().for_each(|kv| { + kv.key.hash(state); + kv.value.hash(state); + }), + Value::BytesValue(b) => b.hash(state), + } + } } impl From for Value { @@ -91,6 +158,30 @@ impl From for Value { } } +#[cfg(feature = "logs")] +impl From for Value { + fn from(value: opentelemetry_sdk::logs::Any) -> Self { + match value { + opentelemetry_sdk::logs::Any::Boolean(b) => Value::Bool(b), + opentelemetry_sdk::logs::Any::Int(i) => Value::Int(i), + opentelemetry_sdk::logs::Any::Double(d) => Value::Double(d), + opentelemetry_sdk::logs::Any::String(s) => Value::String(s.into()), + opentelemetry_sdk::logs::Any::ListAny(a) => { + Value::Array(a.into_iter().map(Into::into).collect()) + } + opentelemetry_sdk::logs::Any::Map(m) => Value::KeyValues( + m.into_iter() + .map(|(key, value)| KeyValue { + key: Key(key), + value: value.into(), + }) + .collect(), + ), + opentelemetry_sdk::logs::Any::Bytes(b) => Value::BytesValue(b), + } + } +} + #[derive(Debug, Serialize, PartialEq, Clone)] #[serde(rename_all = "camelCase")] pub(crate) struct KeyValue { @@ -98,6 +189,16 @@ pub(crate) struct KeyValue { value: Value, } +#[cfg(feature = "logs")] +impl From<(Cow<'static, str>, opentelemetry_sdk::logs::Any)> for KeyValue { + fn from((key, value): (Cow<'static, str>, opentelemetry_sdk::logs::Any)) -> Self { + KeyValue { + key: key.into(), + value: value.into(), + } + } +} + impl From for KeyValue { fn from(value: opentelemetry_api::KeyValue) -> Self { KeyValue { @@ -148,3 +249,15 @@ impl From for Scope { } } } + +pub(crate) fn as_unix_nano(time: &SystemTime, serializer: S) -> Result +where + S: Serializer, +{ + let nanos = time + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_nanos(); + + serializer.serialize_u128(nanos) +} diff --git a/opentelemetry-stdout/src/logs/exporter.rs b/opentelemetry-stdout/src/logs/exporter.rs new file mode 100644 index 0000000000..2e7d107698 --- /dev/null +++ b/opentelemetry-stdout/src/logs/exporter.rs @@ -0,0 +1,115 @@ +use core::fmt; +use std::io::{stdout, Write}; + +use async_trait::async_trait; +use opentelemetry_api::{ + logs::{LogError, LogResult}, + ExportError, +}; +use opentelemetry_sdk::export::logs::{ExportResult, LogData}; + +type Encoder = + Box LogResult<()> + Send + Sync>; + +/// A [`LogExporter`] that writes to [`Stdout`] or other configured [`Write`]. +/// +/// [`LogExporter`]: opentelemetry_sdk::export::logs::LogExporter +/// [`Write`]: std::io::Write +/// [`Stdout`]: std::io::Stdout +pub struct LogExporter { + writer: Option>, + encoder: Encoder, +} + +impl LogExporter { + pub fn builder() -> LogExporterBuilder { + Default::default() + } +} + +impl Default for LogExporter { + fn default() -> Self { + LogExporterBuilder::default().build() + } +} + +impl fmt::Debug for LogExporter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("LogsExporter") + } +} + +#[async_trait] +impl opentelemetry_sdk::export::logs::LogExporter for LogExporter { + /// Export spans to stdout + async fn export(&mut self, batch: Vec) -> ExportResult { + let res = if let Some(writer) = &mut self.writer { + (self.encoder)(writer, crate::logs::LogData::from(batch)) + .and_then(|_| writer.write_all(b"\n").map_err(|e| Error(e).into())) + } else { + Err("exporter is shut down".into()) + }; + + res + } + + fn shutdown(&mut self) { + self.writer.take(); + } +} + +/// Stdout exporter's error +#[derive(thiserror::Error, Debug)] +#[error(transparent)] +struct Error(#[from] std::io::Error); + +impl ExportError for Error { + fn exporter_name(&self) -> &'static str { + "stdout" + } +} + +#[derive(Default)] +pub struct LogExporterBuilder { + writer: Option>, + encoder: Option, +} + +impl fmt::Debug for LogExporterBuilder { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("LogExporterBuilder") + } +} + +impl LogExporterBuilder { + pub fn with_writer(mut self, writer: W) -> Self + where + W: Write + Send + Sync + 'static, + { + self.writer = Some(Box::new(writer)); + self + } + + pub fn with_exporter(mut self, encoder: E) -> Self + where + E: Fn(&mut dyn Write, crate::logs::transform::LogData) -> LogResult<()> + + Send + + Sync + + 'static, + { + self.encoder = Some(Box::new(encoder)); + self + } + + pub fn build(self) -> LogExporter { + LogExporter { + writer: Some(self.writer.unwrap_or_else(|| Box::new(stdout()))), + encoder: self.encoder.unwrap_or_else(|| { + Box::new(|writer, logs| { + serde_json::to_writer(writer, &logs) + .map_err(|err| LogError::Other(Box::new(err))) + }) + }), + } + } +} diff --git a/opentelemetry-stdout/src/logs/mod.rs b/opentelemetry-stdout/src/logs/mod.rs index c3018d6b1a..87a88bc756 100644 --- a/opentelemetry-stdout/src/logs/mod.rs +++ b/opentelemetry-stdout/src/logs/mod.rs @@ -8,134 +8,8 @@ //! [`Write`]: std::io::Write //! [`Stdout`]: std::io::Stdout // TODO: Add an example for using this exporter. -use async_trait::async_trait; -use opentelemetry_api::logs::LogError; -use opentelemetry_sdk::{ - export::{ - logs::{ExportResult, LogData, LogExporter}, - ExportError, - }, - logs::{Config, LogEmitter, LogEmitterProvider}, -}; -use std::fmt::Debug; -use std::io::{stdout, Stdout, Write}; +mod exporter; +mod transform; -/// Pipeline builder -#[derive(Debug)] -pub struct PipelineBuilder { - pretty_print: bool, - log_config: Option, - writer: W, -} - -/// Create a new stdout exporter pipeline builder. -pub fn new_pipeline() -> PipelineBuilder { - PipelineBuilder::default() -} - -impl Default for PipelineBuilder { - /// Return the default pipeline builder. - fn default() -> Self { - Self { - pretty_print: false, - log_config: None, - writer: stdout(), - } - } -} - -impl PipelineBuilder { - /// Specify the pretty print setting. - pub fn with_pretty_print(mut self, pretty_print: bool) -> Self { - self.pretty_print = pretty_print; - self - } - - /// Assign the SDK logs configuration. - pub fn with_logs_config(mut self, config: crate::logs::Config) -> Self { - self.log_config = Some(config); - self - } - - /// Specify the writer to use. - pub fn with_writer(self, writer: T) -> PipelineBuilder { - PipelineBuilder { - pretty_print: self.pretty_print, - log_config: self.log_config, - writer, - } - } -} - -impl PipelineBuilder -where - W: Write + Debug + Send + 'static, -{ - /// Install the stdout exporter pipeline with the recommended defaults. - pub fn install_simple(mut self) -> LogEmitter { - let exporter = Exporter::new(self.writer, self.pretty_print); - - let mut provider_builder = LogEmitterProvider::builder().with_simple_exporter(exporter); - if let Some(config) = self.log_config.take() { - provider_builder = provider_builder.with_config(config); - } - let provider = provider_builder.build(); - - provider.versioned_log_emitter("opentelemetry", Some(env!("CARGO_PKG_VERSION")), None, None) - } -} - -/// A [`LogExporter`] that writes to [`Stdout`] or other configured [`Write`]. -/// -/// [`LogExporter`]: opentelemetry_sdk::export::logs::LogExporter -/// [`Write`]: std::io::Write -/// [`Stdout`]: std::io::Stdout -#[derive(Debug)] -pub struct Exporter { - writer: W, - pretty_print: bool, -} - -impl Exporter { - /// Create a new stdout `Exporter`. - pub fn new(writer: W, pretty_print: bool) -> Self { - Self { - writer, - pretty_print, - } - } -} - -#[async_trait] -impl LogExporter for Exporter -where - W: Write + Debug + Send + 'static, -{ - /// Export spans to stdout - async fn export(&mut self, batch: Vec) -> ExportResult { - for log in batch { - if self.pretty_print { - self.writer - .write_all(format!("{:#?}\n", log).as_bytes()) - .map_err(|err| LogError::ExportFailed(Box::new(Error::from(err))))?; - } else { - self.writer - .write_all(format!("{:?}\n", log).as_bytes()) - .map_err(|err| LogError::ExportFailed(Box::new(Error::from(err))))?; - } - } - - Ok(()) - } -} - -/// Stdout exporter's error -#[derive(thiserror::Error, Debug)] -#[error(transparent)] -struct Error(#[from] std::io::Error); - -impl ExportError for Error { - fn exporter_name(&self) -> &'static str { - "stdout" - } -} +pub use exporter::*; +pub use transform::*; diff --git a/opentelemetry-stdout/src/logs/transform.rs b/opentelemetry-stdout/src/logs/transform.rs new file mode 100644 index 0000000000..1c6bdc74f2 --- /dev/null +++ b/opentelemetry-stdout/src/logs/transform.rs @@ -0,0 +1,153 @@ +use std::{borrow::Cow, collections::HashMap, time::SystemTime}; + +use crate::common::{as_unix_nano, AttributeSet, KeyValue, Resource, Scope, Value}; +use serde::{Serialize, Serializer}; + +/// Transformed logs data that can be serialized. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct LogData { + #[serde(skip_serializing_if = "Vec::is_empty")] + resource_logs: Vec, +} + +impl From> for LogData { + fn from(sdk_logs: Vec) -> LogData { + let mut resource_logs = HashMap::::new(); + + for mut sdk_log in sdk_logs { + let resource_schema_url = sdk_log.resource.schema_url().map(|s| s.to_string().into()); + let schema_url = sdk_log.instrumentation.schema_url.clone(); + let scope: Scope = sdk_log.instrumentation.clone().into(); + let resource: Resource = sdk_log.resource.as_ref().into(); + + let mut attrs: AttributeSet = sdk_log.resource.as_ref().into(); + if let Some(record_resource) = sdk_log.record.resource.take() { + for (key, value) in record_resource.into_iter() { + let key = key.into(); + if !attrs.0.contains_key(&key) { + attrs.0.insert(key, value.into()); + } + } + } + + let rl = resource_logs + .entry(attrs) + .or_insert_with(move || ResourceLogs { + resource, + scope_logs: Vec::with_capacity(1), + schema_url: resource_schema_url, + }); + + match rl.scope_logs.iter_mut().find(|sl| sl.scope == scope) { + Some(sl) => sl.log_records.push(sdk_log.into()), + None => rl.scope_logs.push(ScopeLogs { + scope, + log_records: vec![sdk_log.into()], + schema_url, + }), + } + } + + LogData { + resource_logs: resource_logs.into_values().collect(), + } + } +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct ResourceLogs { + resource: Resource, + #[serde(skip_serializing_if = "Vec::is_empty")] + scope_logs: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + schema_url: Option>, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct ScopeLogs { + scope: Scope, + #[serde(skip_serializing_if = "Vec::is_empty")] + log_records: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + schema_url: Option>, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +struct LogRecord { + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "opt_as_unix_nano" + )] + time_unix_nano: Option, + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "opt_as_unix_nano" + )] + observed_time_unix_nano: Option, + severity_number: u32, + #[serde(skip_serializing_if = "Option::is_none")] + severity_text: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + body: Option, + attributes: Vec, + dropped_attributes_count: u32, + #[serde(skip_serializing_if = "Option::is_none")] + flags: Option, + #[serde(skip_serializing_if = "Option::is_none")] + span_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + trace_id: Option, +} + +impl From for LogRecord { + fn from(value: opentelemetry_sdk::export::logs::LogData) -> Self { + LogRecord { + trace_id: value + .record + .trace_context + .as_ref() + .map(|c| format!("{:x}", c.trace_id)), + span_id: value + .record + .trace_context + .as_ref() + .map(|c| format!("{:x}", c.span_id)), + flags: value + .record + .trace_context + .map(|c| c.trace_flags.map(|f| f.to_u8())) + .unwrap_or_default(), + time_unix_nano: value.record.timestamp, + observed_time_unix_nano: value.record.observed_timestamp, + severity_number: value + .record + .severity_number + .map(|u| u as u32) + .unwrap_or_default(), + attributes: value + .record + .attributes + .map(|attrs| { + attrs + .into_iter() + .map(|(key, value)| (key, value).into()) + .collect() + }) + .unwrap_or_default(), + dropped_attributes_count: 0, + severity_text: value.record.severity_text, + body: value.record.body.map(|a| a.into()), + } + } +} + +fn opt_as_unix_nano(time: &Option, serializer: S) -> Result +where + S: Serializer, +{ + as_unix_nano(time.as_ref().unwrap(), serializer) +} diff --git a/opentelemetry-stdout/src/trace/transform.rs b/opentelemetry-stdout/src/trace/transform.rs index 1397d7ecc3..34b8b1e167 100644 --- a/opentelemetry-stdout/src/trace/transform.rs +++ b/opentelemetry-stdout/src/trace/transform.rs @@ -1,13 +1,9 @@ -use std::{ - borrow::Cow, - collections::HashMap, - time::{SystemTime, UNIX_EPOCH}, -}; +use std::{borrow::Cow, collections::HashMap, time::SystemTime}; use opentelemetry_sdk::AttributeSet; use serde::{Serialize, Serializer}; -use crate::common::{KeyValue, Resource, Scope}; +use crate::common::{as_unix_nano, KeyValue, Resource, Scope}; /// Transformed trace data that can be serialized #[derive(Debug, Serialize)] @@ -92,18 +88,6 @@ struct Span { status: Status, } -fn as_unix_nano(time: &SystemTime, serializer: S) -> Result -where - S: Serializer, -{ - let nanos = time - .duration_since(UNIX_EPOCH) - .unwrap_or_default() - .as_nanos(); - - serializer.serialize_u128(nanos) -} - impl From for Span { fn from(value: opentelemetry_sdk::export::trace::SpanData) -> Self { Span { From 283f9321112ebc35f01a78bb1d9c0e1f412aecb9 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 29 Apr 2023 20:20:17 +0530 Subject: [PATCH 41/51] Move LogRecord to api crate, simplify resources. --- opentelemetry-api/src/logs/mod.rs | 4 + .../src/logs/record.rs | 110 +++++++--- opentelemetry-otlp/src/logs.rs | 22 +- opentelemetry-proto/src/transform/common.rs | 56 +++-- opentelemetry-proto/src/transform/logs.rs | 195 +++++------------- opentelemetry-proto/src/transform/traces.rs | 4 +- opentelemetry-sdk/src/export/logs/mod.rs | 8 +- opentelemetry-sdk/src/logs/log_emitter.rs | 42 ++-- opentelemetry-sdk/src/logs/log_processor.rs | 4 +- opentelemetry-sdk/src/logs/mod.rs | 4 +- opentelemetry-stdout/src/common.rs | 36 ++-- opentelemetry-stdout/src/logs/transform.rs | 17 +- 12 files changed, 241 insertions(+), 261 deletions(-) rename {opentelemetry-sdk => opentelemetry-api}/src/logs/record.rs (67%) diff --git a/opentelemetry-api/src/logs/mod.rs b/opentelemetry-api/src/logs/mod.rs index 15002ce30e..4e956ca20d 100644 --- a/opentelemetry-api/src/logs/mod.rs +++ b/opentelemetry-api/src/logs/mod.rs @@ -5,6 +5,10 @@ use futures_channel::{mpsc::TrySendError, oneshot::Canceled}; use std::time::Duration; use thiserror::Error; +mod record; + +pub use record::{Any, LogRecord, LogRecordBuilder, Severity, TraceContext}; + /// Describe the result of operations in log SDK. pub type LogResult = Result; diff --git a/opentelemetry-sdk/src/logs/record.rs b/opentelemetry-api/src/logs/record.rs similarity index 67% rename from opentelemetry-sdk/src/logs/record.rs rename to opentelemetry-api/src/logs/record.rs index c0958c6cf8..6ac38c2528 100644 --- a/opentelemetry-sdk/src/logs/record.rs +++ b/opentelemetry-api/src/logs/record.rs @@ -1,5 +1,8 @@ -use opentelemetry_api::trace::{SpanContext, SpanId, TraceContextExt, TraceFlags, TraceId}; -use std::{borrow::Cow, collections::BTreeMap, time::SystemTime}; +use crate::{ + trace::{OrderMap, SpanContext, SpanId, TraceContextExt, TraceFlags, TraceId}, + Array, Key, StringValue, Value, +}; +use std::{borrow::Cow, time::SystemTime}; #[derive(Debug, Clone, Default)] #[non_exhaustive] @@ -23,10 +26,8 @@ pub struct LogRecord { /// Record body pub body: Option, - /// Resource attributes for the entity that produced this record - pub resource: Option, Any>>, /// Additional attributes associated with this record - pub attributes: Option, Any>>, + pub attributes: Option>, } impl LogRecord { @@ -57,7 +58,7 @@ pub enum Any { /// A double value Double(f64), /// A string value - String(Cow<'static, str>), + String(StringValue), /// A boolean value Boolean(bool), /// A byte array @@ -65,7 +66,7 @@ pub enum Any { /// An array of `Any` values ListAny(Vec), /// A map of string keys to `Any` values, arbitrarily nested. - Map(BTreeMap, Any>), + Map(OrderMap), } macro_rules! impl_trivial_from { @@ -93,19 +94,46 @@ impl_trivial_from!(f32, Any::Double); impl_trivial_from!(String, Any::String); impl_trivial_from!(Cow<'static, str>, Any::String); impl_trivial_from!(&'static str, Any::String); +impl_trivial_from!(StringValue, Any::String); impl_trivial_from!(bool, Any::Boolean); -impl> From> for Any { - /// Converts a list of `Into` values into a [`Any::ListAny`] - /// value. - fn from(val: Vec) -> Any { - Any::ListAny(val.into_iter().map(Into::into).collect()) +impl> FromIterator for Any { + /// Creates an [`Any::ListAny`] value from a sequence of `Into` values. + fn from_iter>(iter: I) -> Self { + Any::ListAny(iter.into_iter().map(Into::into).collect()) + } +} + +impl, V: Into> FromIterator<(K, V)> for Any { + /// Creates an [`Any::Map`] value from a sequence of key-value pairs + /// that can be converted into a `Key` and `Any` respectively. + fn from_iter>(iter: I) -> Self { + Any::Map(OrderMap::from_iter( + iter.into_iter().map(|(k, v)| (k.into(), v.into())), + )) + } +} + +impl From for Any { + fn from(value: Value) -> Self { + match value { + Value::Bool(b) => b.into(), + Value::I64(i) => i.into(), + Value::F64(f) => f.into(), + Value::String(s) => s.into(), + Value::Array(a) => match a { + Array::Bool(b) => Any::from_iter(b), + Array::F64(f) => Any::from_iter(f), + Array::I64(i) => Any::from_iter(i), + Array::String(s) => Any::from_iter(s), + }, + } } } /// A normalized severity value. -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd)] pub enum Severity { /// TRACE Trace = 1, @@ -157,6 +185,44 @@ pub enum Severity { Fatal4 = 24, } +impl Severity { + /// Return the string representing the short name for the `Severity` + /// value as specified by the OpenTelemetry logs data model. + pub const fn name(&self) -> &'static str { + match &self { + Severity::Trace => "TRACE", + Severity::Trace2 => "TRACE2", + Severity::Trace3 => "TRACE3", + Severity::Trace4 => "TRACE4", + + Severity::Debug => "DEBUG", + Severity::Debug2 => "DEBUG2", + Severity::Debug3 => "DEBUG3", + Severity::Debug4 => "DEBUG4", + + Severity::Info => "INFO", + Severity::Info2 => "INFO2", + Severity::Info3 => "INFO3", + Severity::Info4 => "INFO4", + + Severity::Warn => "WARN", + Severity::Warn2 => "WARN2", + Severity::Warn3 => "WARN3", + Severity::Warn4 => "WARN4", + + Severity::Error => "ERROR", + Severity::Error2 => "ERROR2", + Severity::Error3 => "ERROR3", + Severity::Error4 => "ERROR4", + + Severity::Fatal => "FATAL", + Severity::Fatal2 => "FATAL2", + Severity::Fatal3 => "FATAL3", + Severity::Fatal4 => "FATAL4", + } + } +} + /// A builder for [`LogRecord`] values. #[derive(Debug, Clone)] pub struct LogRecordBuilder { @@ -250,18 +316,8 @@ impl LogRecordBuilder { } } - /// Assign resource - pub fn with_resource(self, resource: BTreeMap, Any>) -> Self { - Self { - record: LogRecord { - resource: Some(resource), - ..self.record - }, - } - } - /// Assign attributes, overriding previously set attributes - pub fn with_attributes(self, attributes: BTreeMap, Any>) -> Self { + pub fn with_attributes(self, attributes: OrderMap) -> Self { Self { record: LogRecord { attributes: Some(attributes), @@ -273,13 +329,15 @@ impl LogRecordBuilder { /// Set a single attribute for this record pub fn with_attribute(mut self, key: K, value: V) -> Self where - K: Into>, + K: Into, V: Into, { if let Some(ref mut map) = self.record.attributes { map.insert(key.into(), value.into()); } else { - self.record.attributes = Some(BTreeMap::from([(key.into(), value.into())])); + let mut map = OrderMap::with_capacity(1); + map.insert(key.into(), value.into()); + self.record.attributes = Some(map); } self diff --git a/opentelemetry-otlp/src/logs.rs b/opentelemetry-otlp/src/logs.rs index 7b7848487f..39965f8d96 100644 --- a/opentelemetry-otlp/src/logs.rs +++ b/opentelemetry-otlp/src/logs.rs @@ -413,11 +413,11 @@ impl OtlpLogPipeline { self } - /// Returns a [`LogEmitter`] with the name `opentelemetry-otlp` and the + /// Returns a [`Logger`] with the name `opentelemetry-otlp` and the /// current crate version, using the configured log exporter. /// - /// [`LogEmitter`]: opentelemetry::opentelemetry_sdk::logs::LogEmitter - pub fn simple(self) -> Result { + /// [`Logger`]: opentelemetry::opentelemetry_sdk::logs::Logger + pub fn simple(self) -> Result { Ok(build_simple_with_exporter( self.exporter_builder .ok_or(crate::Error::NoExporterBuilder)? @@ -426,15 +426,15 @@ impl OtlpLogPipeline { )) } - /// Returns a [`LogEmitter`] with the name `opentelemetry-otlp` and the + /// Returns a [`Logger`] with the name `opentelemetry-otlp` and the /// current crate version, using the configured log exporter and a /// batch log processor. /// - /// [`LogEmitter`]: opentelemetry::log::LogEmitter + /// [`Logger`]: opentelemetry::log::Logger pub fn batch( self, runtime: R, - ) -> Result { + ) -> Result { Ok(build_batch_with_exporter( self.exporter_builder .ok_or(crate::Error::NoExporterBuilder)? @@ -448,9 +448,9 @@ impl OtlpLogPipeline { fn build_simple_with_exporter( exporter: LogExporter, log_config: Option, -) -> opentelemetry_sdk::logs::LogEmitter { +) -> opentelemetry_sdk::logs::Logger { let mut provider_builder = - opentelemetry_sdk::logs::LogEmitterProvider::builder().with_simple_exporter(exporter); + opentelemetry_sdk::logs::LoggerProvider::builder().with_simple_exporter(exporter); if let Some(config) = log_config { provider_builder = provider_builder.with_config(config); } @@ -467,9 +467,9 @@ fn build_batch_with_exporter( exporter: LogExporter, log_config: Option, runtime: R, -) -> opentelemetry_sdk::logs::LogEmitter { - let mut provider_builder = opentelemetry_sdk::logs::LogEmitterProvider::builder() - .with_batch_exporter(exporter, runtime); +) -> opentelemetry_sdk::logs::Logger { + let mut provider_builder = + opentelemetry_sdk::logs::LoggerProvider::builder().with_batch_exporter(exporter, runtime); if let Some(config) = log_config { provider_builder = provider_builder.with_config(config); } diff --git a/opentelemetry-proto/src/transform/common.rs b/opentelemetry-proto/src/transform/common.rs index 8618e1ea74..0050af12cc 100644 --- a/opentelemetry-proto/src/transform/common.rs +++ b/opentelemetry-proto/src/transform/common.rs @@ -44,6 +44,7 @@ pub mod tonic { } /// Wrapper type for Vec<[`KeyValue`](crate::proto::tonic::common::v1::KeyValue)> + #[derive(Default)] pub struct Attributes(pub ::std::vec::Vec); impl From for Attributes { @@ -73,6 +74,20 @@ pub mod tonic { } } + #[cfg(feature = "logs")] + impl, V: Into> FromIterator<(K, V)> for Attributes { + fn from_iter>(iter: T) -> Self { + Attributes( + iter.into_iter() + .map(|(k, v)| KeyValue { + key: k.into(), + value: Some(v.into()), + }) + .collect(), + ) + } + } + impl From for AnyValue { fn from(value: Value) -> Self { AnyValue { @@ -105,14 +120,11 @@ pub mod tonic { } #[cfg(any(feature = "traces", feature = "logs"))] - pub(crate) fn resource_attributes(resource: Option<&Resource>) -> Attributes { + pub(crate) fn resource_attributes(resource: &Resource) -> Attributes { resource - .map(|res| { - res.iter() - .map(|(k, v)| opentelemetry_api::KeyValue::new(k.clone(), v.clone())) - .collect::>() - }) - .unwrap_or_default() + .iter() + .map(|(k, v)| opentelemetry_api::KeyValue::new(k.clone(), v.clone())) + .collect::>() .into() } } @@ -122,7 +134,7 @@ pub mod grpcio { use crate::proto::grpcio::common::{AnyValue, ArrayValue, InstrumentationScope, KeyValue}; use opentelemetry_api::{Array, Value}; use opentelemetry_sdk::{trace::EvictedHashMap, Resource}; - use protobuf::RepeatedField; + use protobuf::{RepeatedField, SingularPtrField}; use std::borrow::Cow; impl From for InstrumentationScope { @@ -135,6 +147,7 @@ pub mod grpcio { } } + #[derive(Default)] pub struct Attributes(pub ::protobuf::RepeatedField); impl From for Attributes { @@ -168,6 +181,21 @@ pub mod grpcio { } } + #[cfg(feature = "logs")] + impl, V: Into> FromIterator<(K, V)> for Attributes { + fn from_iter>(iter: T) -> Self { + Attributes(RepeatedField::from_vec( + iter.into_iter() + .map(|(k, v)| KeyValue { + key: k.into(), + value: SingularPtrField::some(v.into()), + ..Default::default() + }) + .collect(), + )) + } + } + impl From for AnyValue { fn from(value: Value) -> Self { let mut any_value = AnyValue::new(); @@ -204,15 +232,11 @@ pub mod grpcio { } #[cfg(any(feature = "traces", feature = "logs"))] - pub(crate) fn resource_attributes(resource: Option<&Resource>) -> Attributes { + pub(crate) fn resource_attributes(resource: &Resource) -> Attributes { resource - .map(|resource| { - resource - .iter() - .map(|(k, v)| opentelemetry_api::KeyValue::new(k.clone(), v.clone())) - .collect::>() - }) - .unwrap_or_default() + .iter() + .map(|(k, v)| opentelemetry_api::KeyValue::new(k.clone(), v.clone())) + .collect::>() .into() } } diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index e8d4c3f79f..938d5cdb2a 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -1,20 +1,28 @@ use crate::transform::common::to_nanos; -use std::borrow::Cow; #[cfg(feature = "gen-tonic")] pub mod tonic { - use std::collections::BTreeMap; - - use opentelemetry_sdk::logs::{Any, Severity}; - - use crate::tonic::{ - common::v1::{any_value::Value, AnyValue, ArrayValue, KeyValue, KeyValueList}, - logs::v1::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, - resource::v1::Resource, + use crate::{ + tonic::{ + common::v1::{any_value::Value, AnyValue, ArrayValue, KeyValue, KeyValueList}, + logs::v1::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, + resource::v1::Resource, + Attributes, + }, + transform::common::tonic::resource_attributes, }; + use opentelemetry_api::logs::{Any, Severity}; use super::*; + impl From for AnyValue { + fn from(value: Any) -> Self { + AnyValue { + value: Some(value.into()), + } + } + } + impl From for Value { fn from(value: Any) -> Self { match value { @@ -34,7 +42,7 @@ pub mod tonic { values: m .into_iter() .map(|(key, value)| KeyValue { - key: key.into_owned(), + key: key.into(), value: Some(AnyValue { value: Some(value.into()), }), @@ -46,52 +54,8 @@ pub mod tonic { } } - fn attributes_to_key_value(attributes: BTreeMap, Any>) -> Vec { - attributes - .into_iter() - .map(|(key, value)| KeyValue { - key: key.into_owned(), - value: Some(AnyValue { - value: Some(value.into()), - }), - }) - .collect() - } - - fn build_resource_key_values( - resource: &opentelemetry_sdk::Resource, - log_resource: Option, Any>>, - ) -> Vec { - let mut final_kv: Vec = Vec::new(); - - if let Some(log_resource) = log_resource { - for (key, value) in log_resource.into_iter() { - let api_key = opentelemetry_api::Key::from(key); - if resource.get(api_key.clone()).is_some() { - continue; - } - - final_kv.push(KeyValue { - key: api_key.into(), - value: Some(AnyValue { - value: Some(value.into()), - }), - }); - } - } - - for (key, value) in resource.iter() { - final_kv.push(KeyValue { - key: key.clone().into(), - value: Some(value.clone().into()), - }) - } - - final_kv - } - - impl From for LogRecord { - fn from(log_record: opentelemetry_sdk::logs::LogRecord) -> Self { + impl From for LogRecord { + fn from(log_record: opentelemetry_api::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); let severity_number = match log_record.severity_number { Some(Severity::Trace) => SeverityNumber::Trace, @@ -129,13 +93,12 @@ pub mod tonic { .unwrap_or_default(), severity_number: severity_number.into(), severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), - body: Some(AnyValue { - value: log_record.body.map(Into::into), - }), + body: log_record.body.map(Into::into), attributes: log_record .attributes - .map(attributes_to_key_value) - .unwrap_or_default(), + .map(|attrs| Attributes::from_iter(attrs.into_iter())) + .unwrap_or_default() + .0, dropped_attributes_count: 0, flags: trace_context .map(|ctx| { @@ -157,13 +120,10 @@ pub mod tonic { } impl From for ResourceLogs { - fn from(mut log_data: opentelemetry_sdk::export::logs::LogData) -> Self { + fn from(log_data: opentelemetry_sdk::export::logs::LogData) -> Self { ResourceLogs { resource: Some(Resource { - attributes: build_resource_key_values( - log_data.resource.as_ref(), - log_data.record.resource.take(), - ), + attributes: resource_attributes(&log_data.resource).0, dropped_attributes_count: 0, }), schema_url: "".to_string(), @@ -185,18 +145,29 @@ pub mod tonic { #[cfg(feature = "gen-protoc")] pub mod grpcio { - use std::collections::BTreeMap; - - use crate::proto::grpcio::{ - common::{AnyValue, AnyValue_oneof_value, ArrayValue, KeyValue, KeyValueList}, - logs::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, - resource::Resource, + use crate::{ + grpcio::Attributes, + proto::grpcio::{ + common::{AnyValue, AnyValue_oneof_value, ArrayValue, KeyValue, KeyValueList}, + logs::{LogRecord, ResourceLogs, ScopeLogs, SeverityNumber}, + resource::Resource, + }, + transform::common::grpcio::resource_attributes, }; - use opentelemetry_sdk::logs::{Any, Severity}; + use opentelemetry_api::logs::{Any, Severity}; use protobuf::{RepeatedField, SingularPtrField}; use super::*; + impl From for AnyValue { + fn from(value: Any) -> Self { + AnyValue { + value: Some(value.into()), + ..Default::default() + } + } + } + impl From for AnyValue_oneof_value { fn from(value: Any) -> Self { match value { @@ -219,7 +190,7 @@ pub mod grpcio { values: RepeatedField::from_vec( m.into_iter() .map(|(key, value)| KeyValue { - key: key.into_owned(), + key: key.into(), value: SingularPtrField::some(AnyValue { value: Some(value.into()), ..Default::default() @@ -235,57 +206,8 @@ pub mod grpcio { } } - fn attributes_to_keyvalue(attributes: BTreeMap, Any>) -> Vec { - attributes - .into_iter() - .map(|(key, value)| KeyValue { - key: key.into_owned(), - value: SingularPtrField::some(AnyValue { - value: Some(value.into()), - ..Default::default() - }), - ..Default::default() - }) - .collect() - } - - fn build_resource_key_values( - resource: &opentelemetry_sdk::Resource, - log_resource: Option, Any>>, - ) -> Vec { - let mut final_kv: Vec = Vec::new(); - - if let Some(log_resource) = log_resource { - for (key, value) in log_resource.into_iter() { - let api_key = opentelemetry_api::Key::from(key); - if resource.get(api_key.clone()).is_some() { - continue; - } - - final_kv.push(KeyValue { - key: api_key.into(), - value: SingularPtrField::some(AnyValue { - value: Some(value.into()), - ..Default::default() - }), - ..Default::default() - }); - } - } - - for (key, value) in resource.iter() { - final_kv.push(KeyValue { - key: key.clone().into(), - value: SingularPtrField::some(value.clone().into()), - ..Default::default() - }) - } - - final_kv - } - - impl From for LogRecord { - fn from(log_record: opentelemetry_sdk::logs::LogRecord) -> Self { + impl From for LogRecord { + fn from(log_record: opentelemetry_api::logs::LogRecord) -> Self { let trace_context = log_record.trace_context.as_ref(); let severity_number = match log_record.severity_number { Some(Severity::Trace) => SeverityNumber::SEVERITY_NUMBER_TRACE, @@ -319,16 +241,12 @@ pub mod grpcio { time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or(0), severity_number, severity_text: log_record.severity_text.map(Into::into).unwrap_or_default(), - body: SingularPtrField::some(AnyValue { - value: log_record.body.map(Into::into), - ..Default::default() - }), - attributes: RepeatedField::from_vec( - log_record - .attributes - .map(attributes_to_keyvalue) - .unwrap_or_default(), - ), + body: log_record.body.map(Into::into).into(), + attributes: log_record + .attributes + .map(|attrs| Attributes::from_iter(attrs.into_iter())) + .unwrap_or_default() + .0, dropped_attributes_count: 0, flags: trace_context .map(|ctx| { @@ -349,13 +267,10 @@ pub mod grpcio { } impl From for ResourceLogs { - fn from(mut log_data: opentelemetry_sdk::export::logs::LogData) -> Self { + fn from(log_data: opentelemetry_sdk::export::logs::LogData) -> Self { ResourceLogs { resource: SingularPtrField::some(Resource { - attributes: RepeatedField::from_vec(build_resource_key_values( - log_data.resource.as_ref(), - log_data.record.resource.take(), - )), + attributes: resource_attributes(&log_data.resource).0, dropped_attributes_count: 0, ..Default::default() }), diff --git a/opentelemetry-proto/src/transform/traces.rs b/opentelemetry-proto/src/transform/traces.rs index 5b6fd2e705..939aca647b 100644 --- a/opentelemetry-proto/src/transform/traces.rs +++ b/opentelemetry-proto/src/transform/traces.rs @@ -49,7 +49,7 @@ pub mod tonic { let span_kind: span::SpanKind = source_span.span_kind.into(); ResourceSpans { resource: Some(Resource { - attributes: resource_attributes(Some(&source_span.resource)).0, + attributes: resource_attributes(&source_span.resource).0, dropped_attributes_count: 0, }), schema_url: source_span @@ -160,7 +160,7 @@ pub mod grpcio { fn from(source_span: SpanData) -> Self { ResourceSpans { resource: SingularPtrField::from(Some(Resource { - attributes: resource_attributes(Some(&source_span.resource)).0, + attributes: resource_attributes(&source_span.resource).0, dropped_attributes_count: 0, ..Default::default() })), diff --git a/opentelemetry-sdk/src/export/logs/mod.rs b/opentelemetry-sdk/src/export/logs/mod.rs index 4adcc6d59f..3fc13fa8b5 100644 --- a/opentelemetry-sdk/src/export/logs/mod.rs +++ b/opentelemetry-sdk/src/export/logs/mod.rs @@ -1,8 +1,10 @@ //! Log exporters -use crate::{logs::LogRecord, Resource}; +use crate::Resource; use async_trait::async_trait; -use opentelemetry_api::logs::LogError; -use opentelemetry_api::{logs::LogResult, InstrumentationLibrary}; +use opentelemetry_api::{ + logs::{LogError, LogRecord, LogResult}, + InstrumentationLibrary, +}; use std::{borrow::Cow, fmt::Debug}; /// `LogExporter` defines the interface that log exporters should implement. diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index d1d473e53b..e349df3896 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -1,8 +1,8 @@ -use super::{BatchLogProcessor, Config, LogProcessor, LogRecord, LogRuntime, SimpleLogProcessor}; +use super::{BatchLogProcessor, Config, LogProcessor, LogRuntime, SimpleLogProcessor}; use crate::export::logs::{LogData, LogExporter}; use opentelemetry_api::{ global::{handle_error, Error}, - logs::LogResult, + logs::{LogRecord, LogResult}, InstrumentationLibrary, }; use std::{ @@ -12,17 +12,17 @@ use std::{ #[derive(Debug, Clone)] /// Creator for `LogEmitter` instances. -pub struct LogEmitterProvider { - inner: Arc, +pub struct LoggerProvider { + inner: Arc, } /// Default log emitter name if empty string is provided. const DEFAULT_COMPONENT_NAME: &str = "rust.opentelemetry.io/sdk/logemitter"; -impl LogEmitterProvider { +impl LoggerProvider { /// Build a new log emitter provider. - pub(crate) fn new(inner: Arc) -> Self { - LogEmitterProvider { inner } + pub(crate) fn new(inner: Arc) -> Self { + LoggerProvider { inner } } /// Create a new `LogEmitterProvider` builder. @@ -37,7 +37,7 @@ impl LogEmitterProvider { version: Option<&'static str>, schema_url: Option>, attributes: Option>, - ) -> LogEmitter { + ) -> Logger { let name = name.into(); let component_name = if name.is_empty() { @@ -46,7 +46,7 @@ impl LogEmitterProvider { name }; - LogEmitter::new( + Logger::new( InstrumentationLibrary::new( component_name, version.map(Into::into), @@ -94,7 +94,7 @@ impl LogEmitterProvider { } } -impl Drop for LogEmitterProvider { +impl Drop for LoggerProvider { fn drop(&mut self) { match self.try_shutdown() { None => handle_error(Error::Other( @@ -112,7 +112,7 @@ impl Drop for LogEmitterProvider { } #[derive(Debug)] -pub(crate) struct LogEmitterProviderInner { +pub(crate) struct LoggerProviderInner { processors: Vec>, config: Config, } @@ -157,9 +157,9 @@ impl Builder { } /// Create a new provider from this configuration. - pub fn build(self) -> LogEmitterProvider { - LogEmitterProvider { - inner: Arc::new(LogEmitterProviderInner { + pub fn build(self) -> LoggerProvider { + LoggerProvider { + inner: Arc::new(LoggerProviderInner { processors: self.processors, config: self.config, }), @@ -171,25 +171,25 @@ impl Builder { /// The object for emitting [`LogRecord`]s. /// /// [`LogRecord`]: crate::log::LogRecord -pub struct LogEmitter { +pub struct Logger { instrumentation_lib: InstrumentationLibrary, - provider: Weak, + provider: Weak, } -impl LogEmitter { +impl Logger { pub(crate) fn new( instrumentation_lib: InstrumentationLibrary, - provider: Weak, + provider: Weak, ) -> Self { - LogEmitter { + Logger { instrumentation_lib, provider, } } /// LogEmitterProvider associated with this tracer. - pub fn provider(&self) -> Option { - self.provider.upgrade().map(LogEmitterProvider::new) + pub fn provider(&self) -> Option { + self.provider.upgrade().map(LoggerProvider::new) } /// Instrumentation library information of this tracer. diff --git a/opentelemetry-sdk/src/logs/log_processor.rs b/opentelemetry-sdk/src/logs/log_processor.rs index bf749ce896..a6fea44596 100644 --- a/opentelemetry-sdk/src/logs/log_processor.rs +++ b/opentelemetry-sdk/src/logs/log_processor.rs @@ -272,7 +272,7 @@ pub struct BatchConfig { max_queue_size: usize, /// The delay interval in milliseconds between two consecutive processing - /// of batches. The default value is 5 seconds. + /// of batches. The default value is 1 second. scheduled_delay: Duration, /// The maximum number of logs to process in a single batch. If there are @@ -289,7 +289,7 @@ impl Default for BatchConfig { fn default() -> Self { BatchConfig { max_queue_size: 2_048, - scheduled_delay: Duration::from_millis(5_000), + scheduled_delay: Duration::from_millis(1_000), max_export_batch_size: 512, max_export_timeout: Duration::from_millis(30_000), } diff --git a/opentelemetry-sdk/src/logs/mod.rs b/opentelemetry-sdk/src/logs/mod.rs index e47be2a63b..3aaa607910 100644 --- a/opentelemetry-sdk/src/logs/mod.rs +++ b/opentelemetry-sdk/src/logs/mod.rs @@ -3,14 +3,12 @@ mod config; mod log_emitter; mod log_processor; -mod record; mod runtime; pub use config::Config; -pub use log_emitter::{Builder, LogEmitter, LogEmitterProvider}; +pub use log_emitter::{Builder, Logger, LoggerProvider}; pub use log_processor::{ BatchConfig, BatchLogProcessor, BatchLogProcessorBuilder, BatchMessage, LogProcessor, SimpleLogProcessor, }; -pub use record::{Any, LogRecord, LogRecordBuilder, Severity, TraceContext}; pub use runtime::{LogRuntime, TrySend}; diff --git a/opentelemetry-stdout/src/common.rs b/opentelemetry-stdout/src/common.rs index d6098d36bb..67e9570ce5 100644 --- a/opentelemetry-stdout/src/common.rs +++ b/opentelemetry-stdout/src/common.rs @@ -33,18 +33,6 @@ impl From<&opentelemetry_sdk::Resource> for AttributeSet { } } -#[cfg(feature = "logs")] -impl From, opentelemetry_sdk::logs::Any>> for AttributeSet { - fn from(value: BTreeMap, opentelemetry_sdk::logs::Any>) -> Self { - AttributeSet( - value - .into_iter() - .map(|(key, value)| (key.into(), value.into())) - .collect(), - ) - } -} - #[derive(Debug, Clone, Serialize)] #[serde(rename_all = "camelCase")] pub(crate) struct Resource { @@ -159,25 +147,25 @@ impl From for Value { } #[cfg(feature = "logs")] -impl From for Value { - fn from(value: opentelemetry_sdk::logs::Any) -> Self { +impl From for Value { + fn from(value: opentelemetry_api::logs::Any) -> Self { match value { - opentelemetry_sdk::logs::Any::Boolean(b) => Value::Bool(b), - opentelemetry_sdk::logs::Any::Int(i) => Value::Int(i), - opentelemetry_sdk::logs::Any::Double(d) => Value::Double(d), - opentelemetry_sdk::logs::Any::String(s) => Value::String(s.into()), - opentelemetry_sdk::logs::Any::ListAny(a) => { + opentelemetry_api::logs::Any::Boolean(b) => Value::Bool(b), + opentelemetry_api::logs::Any::Int(i) => Value::Int(i), + opentelemetry_api::logs::Any::Double(d) => Value::Double(d), + opentelemetry_api::logs::Any::String(s) => Value::String(s.into()), + opentelemetry_api::logs::Any::ListAny(a) => { Value::Array(a.into_iter().map(Into::into).collect()) } - opentelemetry_sdk::logs::Any::Map(m) => Value::KeyValues( + opentelemetry_api::logs::Any::Map(m) => Value::KeyValues( m.into_iter() .map(|(key, value)| KeyValue { - key: Key(key), + key: key.into(), value: value.into(), }) .collect(), ), - opentelemetry_sdk::logs::Any::Bytes(b) => Value::BytesValue(b), + opentelemetry_api::logs::Any::Bytes(b) => Value::BytesValue(b), } } } @@ -190,8 +178,8 @@ pub(crate) struct KeyValue { } #[cfg(feature = "logs")] -impl From<(Cow<'static, str>, opentelemetry_sdk::logs::Any)> for KeyValue { - fn from((key, value): (Cow<'static, str>, opentelemetry_sdk::logs::Any)) -> Self { +impl From<(opentelemetry_api::Key, opentelemetry_api::logs::Any)> for KeyValue { + fn from((key, value): (opentelemetry_api::Key, opentelemetry_api::logs::Any)) -> Self { KeyValue { key: key.into(), value: value.into(), diff --git a/opentelemetry-stdout/src/logs/transform.rs b/opentelemetry-stdout/src/logs/transform.rs index 1c6bdc74f2..ba0a0ceb78 100644 --- a/opentelemetry-stdout/src/logs/transform.rs +++ b/opentelemetry-stdout/src/logs/transform.rs @@ -1,6 +1,7 @@ use std::{borrow::Cow, collections::HashMap, time::SystemTime}; -use crate::common::{as_unix_nano, AttributeSet, KeyValue, Resource, Scope, Value}; +use crate::common::{as_unix_nano, KeyValue, Resource, Scope, Value}; +use opentelemetry_sdk::AttributeSet; use serde::{Serialize, Serializer}; /// Transformed logs data that can be serialized. @@ -15,24 +16,14 @@ impl From> for LogData { fn from(sdk_logs: Vec) -> LogData { let mut resource_logs = HashMap::::new(); - for mut sdk_log in sdk_logs { + for sdk_log in sdk_logs { let resource_schema_url = sdk_log.resource.schema_url().map(|s| s.to_string().into()); let schema_url = sdk_log.instrumentation.schema_url.clone(); let scope: Scope = sdk_log.instrumentation.clone().into(); let resource: Resource = sdk_log.resource.as_ref().into(); - let mut attrs: AttributeSet = sdk_log.resource.as_ref().into(); - if let Some(record_resource) = sdk_log.record.resource.take() { - for (key, value) in record_resource.into_iter() { - let key = key.into(); - if !attrs.0.contains_key(&key) { - attrs.0.insert(key, value.into()); - } - } - } - let rl = resource_logs - .entry(attrs) + .entry(sdk_log.resource.as_ref().into()) .or_insert_with(move || ResourceLogs { resource, scope_logs: Vec::with_capacity(1), From 322da85df837b236c1fc40fcd86289cb6f5d371e Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 1 May 2023 00:23:22 +0530 Subject: [PATCH 42/51] Add API traits for logs. --- opentelemetry-api/src/logs/logger.rs | 23 +++++++++++++++++++++++ opentelemetry-api/src/logs/mod.rs | 4 ++++ 2 files changed, 27 insertions(+) create mode 100644 opentelemetry-api/src/logs/logger.rs diff --git a/opentelemetry-api/src/logs/logger.rs b/opentelemetry-api/src/logs/logger.rs new file mode 100644 index 0000000000..bb5be76381 --- /dev/null +++ b/opentelemetry-api/src/logs/logger.rs @@ -0,0 +1,23 @@ +use std::borrow::Cow; + +use crate::{logs::LogRecord, KeyValue}; + +pub trait Logger { + fn emit(&self, record: LogRecord); +} + +pub trait LoggerProvider { + type Logger: Logger; + + fn versioned_logger( + &self, + name: Cow<'static, str>, + version: Option>, + schema_url: Option>, + attributes: Option>, + ) -> Self::Logger; + + fn logger(&self, name: Cow<'static, str>) -> Self::Logger { + self.versioned_logger(name, None, None, None) + } +} diff --git a/opentelemetry-api/src/logs/mod.rs b/opentelemetry-api/src/logs/mod.rs index 4e956ca20d..d862b69042 100644 --- a/opentelemetry-api/src/logs/mod.rs +++ b/opentelemetry-api/src/logs/mod.rs @@ -5,8 +5,12 @@ use futures_channel::{mpsc::TrySendError, oneshot::Canceled}; use std::time::Duration; use thiserror::Error; +mod logger; +mod noop; mod record; +pub use logger::{Logger, LoggerProvider}; +pub use noop::NoopLoggerProvider; pub use record::{Any, LogRecord, LogRecordBuilder, Severity, TraceContext}; /// Describe the result of operations in log SDK. From 93f5acd63aa3516e5386336b96279ba16077156e Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 1 May 2023 00:24:36 +0530 Subject: [PATCH 43/51] Add api trait impls. --- opentelemetry-sdk/src/logs/log_emitter.rs | 43 +++++++++++------------ 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index e349df3896..5d6b97ca7b 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -19,27 +19,17 @@ pub struct LoggerProvider { /// Default log emitter name if empty string is provided. const DEFAULT_COMPONENT_NAME: &str = "rust.opentelemetry.io/sdk/logemitter"; -impl LoggerProvider { - /// Build a new log emitter provider. - pub(crate) fn new(inner: Arc) -> Self { - LoggerProvider { inner } - } - - /// Create a new `LogEmitterProvider` builder. - pub fn builder() -> Builder { - Builder::default() - } +impl opentelemetry_api::logs::LoggerProvider for LoggerProvider { + type Logger = Logger; /// Create a new versioned `LogEmitter` instance. - pub fn versioned_log_emitter( + fn versioned_logger( &self, - name: impl Into>, - version: Option<&'static str>, + name: Cow<'static, str>, + version: Option>, schema_url: Option>, attributes: Option>, ) -> Logger { - let name = name.into(); - let component_name = if name.is_empty() { Cow::Borrowed(DEFAULT_COMPONENT_NAME) } else { @@ -47,15 +37,22 @@ impl LoggerProvider { }; Logger::new( - InstrumentationLibrary::new( - component_name, - version.map(Into::into), - schema_url, - attributes, - ), + InstrumentationLibrary::new(component_name, version, schema_url, attributes), Arc::downgrade(&self.inner), ) } +} + +impl LoggerProvider { + /// Build a new log emitter provider. + pub(crate) fn new(inner: Arc) -> Self { + LoggerProvider { inner } + } + + /// Create a new `LogEmitterProvider` builder. + pub fn builder() -> Builder { + Builder::default() + } /// Config associated with this provider. pub fn config(&self) -> &Config { @@ -196,9 +193,11 @@ impl Logger { pub fn instrumentation_library(&self) -> &InstrumentationLibrary { &self.instrumentation_lib } +} +impl opentelemetry_api::logs::Logger for Logger { /// Emit a `LogRecord`. - pub fn emit(&self, record: LogRecord) { + fn emit(&self, record: LogRecord) { let provider = match self.provider() { Some(provider) => provider, None => return, From 070b496cb0448d443d79a9f299ac85809c171d82 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 1 May 2023 00:25:22 +0530 Subject: [PATCH 44/51] Add no-op impl for Logger and LoggerProvider. --- opentelemetry-api/src/logs/noop.rs | 33 ++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 opentelemetry-api/src/logs/noop.rs diff --git a/opentelemetry-api/src/logs/noop.rs b/opentelemetry-api/src/logs/noop.rs new file mode 100644 index 0000000000..5cb6dcfc41 --- /dev/null +++ b/opentelemetry-api/src/logs/noop.rs @@ -0,0 +1,33 @@ +use crate::logs::{LogRecord, Logger, LoggerProvider}; + +/// A no-op implementation of a [`LoggerProvider`]. +#[derive(Clone, Debug, Default)] +pub struct NoopLoggerProvider(()); + +impl NoopLoggerProvider { + pub fn new() -> Self { + NoopLoggerProvider(()) + } +} + +impl LoggerProvider for NoopLoggerProvider { + type Logger = NoopLogger; + + fn versioned_logger( + &self, + _name: std::borrow::Cow<'static, str>, + _version: Option>, + _schema_url: Option>, + _attributes: Option>, + ) -> Self::Logger { + NoopLogger(()) + } +} + +/// A no-op implementation of a [`Logger`] +#[derive(Clone, Debug)] +pub struct NoopLogger(()); + +impl Logger for NoopLogger { + fn emit(&self, _record: LogRecord) {} +} From 173fc3d167df87a6819c6d66f309bf181db05a3e Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 1 May 2023 00:25:34 +0530 Subject: [PATCH 45/51] Use api traits. --- opentelemetry-otlp/src/logs.rs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/opentelemetry-otlp/src/logs.rs b/opentelemetry-otlp/src/logs.rs index 39965f8d96..1f18e83de3 100644 --- a/opentelemetry-otlp/src/logs.rs +++ b/opentelemetry-otlp/src/logs.rs @@ -50,11 +50,12 @@ use crate::exporter::ExportConfig; use crate::OtlpPipeline; use async_trait::async_trait; use std::{ + borrow::Cow, fmt::{self, Debug}, time::Duration, }; -use opentelemetry_api::logs::LogError; +use opentelemetry_api::logs::{LogError, LoggerProvider}; use opentelemetry_sdk::{self, export::logs::LogData, logs::LogRuntime}; impl OtlpPipeline { @@ -455,9 +456,9 @@ fn build_simple_with_exporter( provider_builder = provider_builder.with_config(config); } let provider = provider_builder.build(); - provider.versioned_log_emitter( - "opentelemetry-otlp", - Some(env!("CARGO_PKG_VERSION")), + provider.versioned_logger( + Cow::Borrowed("opentelemetry-otlp"), + Some(Cow::Borrowed(env!("CARGO_PKG_VERSION"))), None, None, ) @@ -474,9 +475,9 @@ fn build_batch_with_exporter( provider_builder = provider_builder.with_config(config); } let provider = provider_builder.build(); - provider.versioned_log_emitter( - "opentelemetry-otlp", - Some(env!("CARGO_PKG_VERSION")), + provider.versioned_logger( + Cow::Borrowed("opentelemetry-otlp"), + Some(Cow::Borrowed("CARGO_PKG_VERSION")), None, None, ) From b2fc857edcaad5bbe925099b64e05ee046dcd969 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Mon, 1 May 2023 00:25:43 +0530 Subject: [PATCH 46/51] Add global logger/loggerproviders. --- opentelemetry-api/src/global/logs.rs | 122 +++++++++++++++++++++++++++ opentelemetry-api/src/global/mod.rs | 8 ++ 2 files changed, 130 insertions(+) create mode 100644 opentelemetry-api/src/global/logs.rs diff --git a/opentelemetry-api/src/global/logs.rs b/opentelemetry-api/src/global/logs.rs new file mode 100644 index 0000000000..3bf7610e83 --- /dev/null +++ b/opentelemetry-api/src/global/logs.rs @@ -0,0 +1,122 @@ +use std::{ + borrow::Cow, + fmt, mem, + sync::{Arc, RwLock}, +}; + +use once_cell::sync::Lazy; + +use crate::{ + logs::{Logger, LoggerProvider, NoopLoggerProvider}, + KeyValue, +}; + +pub trait ObjectSafeLoggerProvider { + fn versioned_logger_boxed( + &self, + name: Cow<'static, str>, + version: Option>, + schema_url: Option>, + attributes: Option>, + ) -> Box; +} + +impl ObjectSafeLoggerProvider for P +where + L: Logger + Send + Sync + 'static, + P: LoggerProvider, +{ + fn versioned_logger_boxed( + &self, + name: Cow<'static, str>, + version: Option>, + schema_url: Option>, + attributes: Option>, + ) -> Box { + Box::new(self.versioned_logger(name, version, schema_url, attributes)) + } +} + +pub struct BoxedLogger(Box); + +impl fmt::Debug for BoxedLogger { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("BoxedLogger") + } +} + +impl Logger for BoxedLogger { + fn emit(&self, record: crate::logs::LogRecord) { + self.0.emit(record) + } +} + +#[derive(Clone)] +pub struct GlobalLoggerProvider { + provider: Arc, +} + +impl fmt::Debug for GlobalLoggerProvider { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("GlobalLoggerProvider") + } +} + +impl GlobalLoggerProvider { + pub fn new< + L: Logger + Send + Sync + 'static, + P: LoggerProvider + Send + Sync + 'static, + >( + provider: P, + ) -> Self { + GlobalLoggerProvider { + provider: Arc::new(provider), + } + } +} + +impl LoggerProvider for GlobalLoggerProvider { + type Logger = BoxedLogger; + + fn versioned_logger( + &self, + name: Cow<'static, str>, + version: Option>, + schema_url: Option>, + attributes: Option>, + ) -> Self::Logger { + BoxedLogger( + self.provider + .versioned_logger_boxed(name, version, schema_url, attributes), + ) + } +} + +static GLOBAL_LOGGER_PROVIDER: Lazy> = + Lazy::new(|| RwLock::new(GlobalLoggerProvider::new(NoopLoggerProvider::new()))); + +pub fn logger_provider() -> GlobalLoggerProvider { + GLOBAL_LOGGER_PROVIDER + .read() + .expect("GLOBAL_LOGGER_PROVIDER RwLock poisoned") + .clone() +} + +pub fn logger(name: Cow<'static, str>) -> BoxedLogger { + logger_provider().logger(name) +} + +pub fn set_logger_provider(new_provider: P) -> GlobalLoggerProvider +where + L: Logger + Send + Sync + 'static, + P: LoggerProvider + Send + Sync + 'static, +{ + let mut provider = GLOBAL_LOGGER_PROVIDER + .write() + .expect("GLOBAL_LOGGER_PROVIDER RwLock poisoned"); + mem::replace(&mut *provider, GlobalLoggerProvider::new(new_provider)) +} + +pub fn shutdown_logger_provider() { + let _ = set_logger_provider(NoopLoggerProvider::new()); +} diff --git a/opentelemetry-api/src/global/mod.rs b/opentelemetry-api/src/global/mod.rs index ec32099c6b..966f532637 100644 --- a/opentelemetry-api/src/global/mod.rs +++ b/opentelemetry-api/src/global/mod.rs @@ -142,6 +142,8 @@ //! [`set_meter_provider`]: crate::global::set_meter_provider mod error_handler; +#[cfg(feature = "logs")] +mod logs; #[cfg(feature = "metrics")] mod metrics; #[cfg(feature = "trace")] @@ -150,6 +152,12 @@ mod propagation; mod trace; pub use error_handler::{handle_error, set_error_handler, Error}; +#[cfg(feature = "logs")] +#[cfg_attr(docsrs, doc(cfg(feature = "logs")))] +pub use logs::{ + logger, logger_provider, set_logger_provider, shutdown_logger_provider, GlobalLoggerProvider, + ObjectSafeLoggerProvider, +}; #[cfg(feature = "metrics")] #[cfg_attr(docsrs, doc(cfg(feature = "metrics")))] pub use metrics::{ From 47731ca4030310e48bec472f47a2cb56eedd4b8c Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 6 May 2023 20:56:59 +0530 Subject: [PATCH 47/51] Add include_trace_context, make the component name param ergonomic. --- opentelemetry-api/src/global/logs.rs | 24 +++++++++++++++++------ opentelemetry-api/src/logs/logger.rs | 7 ++++--- opentelemetry-api/src/logs/noop.rs | 16 ++++++++++----- opentelemetry-api/src/logs/record.rs | 10 ++++++++++ opentelemetry-otlp/src/logs.rs | 12 +++++++++++- opentelemetry-sdk/src/logs/log_emitter.rs | 22 ++++++++++++++++++--- 6 files changed, 73 insertions(+), 18 deletions(-) diff --git a/opentelemetry-api/src/global/logs.rs b/opentelemetry-api/src/global/logs.rs index 3bf7610e83..ce80885ef8 100644 --- a/opentelemetry-api/src/global/logs.rs +++ b/opentelemetry-api/src/global/logs.rs @@ -18,6 +18,7 @@ pub trait ObjectSafeLoggerProvider { version: Option>, schema_url: Option>, attributes: Option>, + include_trace_context: bool, ) -> Box; } @@ -32,8 +33,15 @@ where version: Option>, schema_url: Option>, attributes: Option>, + include_trace_context: bool, ) -> Box { - Box::new(self.versioned_logger(name, version, schema_url, attributes)) + Box::new(self.versioned_logger( + name, + version, + schema_url, + attributes, + include_trace_context, + )) } } @@ -80,15 +88,19 @@ impl LoggerProvider for GlobalLoggerProvider { fn versioned_logger( &self, - name: Cow<'static, str>, + name: impl Into>, version: Option>, schema_url: Option>, attributes: Option>, + include_trace_context: bool, ) -> Self::Logger { - BoxedLogger( - self.provider - .versioned_logger_boxed(name, version, schema_url, attributes), - ) + BoxedLogger(self.provider.versioned_logger_boxed( + name.into(), + version, + schema_url, + attributes, + include_trace_context, + )) } } diff --git a/opentelemetry-api/src/logs/logger.rs b/opentelemetry-api/src/logs/logger.rs index bb5be76381..47888987b6 100644 --- a/opentelemetry-api/src/logs/logger.rs +++ b/opentelemetry-api/src/logs/logger.rs @@ -11,13 +11,14 @@ pub trait LoggerProvider { fn versioned_logger( &self, - name: Cow<'static, str>, + name: impl Into>, version: Option>, schema_url: Option>, attributes: Option>, + include_trace_context: bool, ) -> Self::Logger; - fn logger(&self, name: Cow<'static, str>) -> Self::Logger { - self.versioned_logger(name, None, None, None) + fn logger(&self, name: impl Into>) -> Self::Logger { + self.versioned_logger(name, None, None, None, true) } } diff --git a/opentelemetry-api/src/logs/noop.rs b/opentelemetry-api/src/logs/noop.rs index 5cb6dcfc41..db5708de1f 100644 --- a/opentelemetry-api/src/logs/noop.rs +++ b/opentelemetry-api/src/logs/noop.rs @@ -1,4 +1,9 @@ -use crate::logs::{LogRecord, Logger, LoggerProvider}; +use std::borrow::Cow; + +use crate::{ + logs::{LogRecord, Logger, LoggerProvider}, + KeyValue, +}; /// A no-op implementation of a [`LoggerProvider`]. #[derive(Clone, Debug, Default)] @@ -15,10 +20,11 @@ impl LoggerProvider for NoopLoggerProvider { fn versioned_logger( &self, - _name: std::borrow::Cow<'static, str>, - _version: Option>, - _schema_url: Option>, - _attributes: Option>, + _name: impl Into>, + _version: Option>, + _schema_url: Option>, + _attributes: Option>, + _include_trace_context: bool, ) -> Self::Logger { NoopLogger(()) } diff --git a/opentelemetry-api/src/logs/record.rs b/opentelemetry-api/src/logs/record.rs index 6ac38c2528..13eb923fc7 100644 --- a/opentelemetry-api/src/logs/record.rs +++ b/opentelemetry-api/src/logs/record.rs @@ -50,6 +50,16 @@ pub struct TraceContext { pub trace_flags: Option, } +impl From<&SpanContext> for TraceContext { + fn from(span_context: &SpanContext) -> Self { + TraceContext { + trace_id: span_context.trace_id(), + span_id: span_context.span_id(), + trace_flags: Some(span_context.trace_flags()), + } + } +} + /// Value types for representing arbitrary values in a log record. #[derive(Debug, Clone)] pub enum Any { diff --git a/opentelemetry-otlp/src/logs.rs b/opentelemetry-otlp/src/logs.rs index 1f18e83de3..da15b8db44 100644 --- a/opentelemetry-otlp/src/logs.rs +++ b/opentelemetry-otlp/src/logs.rs @@ -418,12 +418,16 @@ impl OtlpLogPipeline { /// current crate version, using the configured log exporter. /// /// [`Logger`]: opentelemetry::opentelemetry_sdk::logs::Logger - pub fn simple(self) -> Result { + pub fn simple( + self, + include_trace_context: bool, + ) -> Result { Ok(build_simple_with_exporter( self.exporter_builder .ok_or(crate::Error::NoExporterBuilder)? .build_log_exporter()?, self.log_config, + include_trace_context, )) } @@ -435,6 +439,7 @@ impl OtlpLogPipeline { pub fn batch( self, runtime: R, + include_trace_context: bool, ) -> Result { Ok(build_batch_with_exporter( self.exporter_builder @@ -442,6 +447,7 @@ impl OtlpLogPipeline { .build_log_exporter()?, self.log_config, runtime, + include_trace_context, )) } } @@ -449,6 +455,7 @@ impl OtlpLogPipeline { fn build_simple_with_exporter( exporter: LogExporter, log_config: Option, + include_trace_context: bool, ) -> opentelemetry_sdk::logs::Logger { let mut provider_builder = opentelemetry_sdk::logs::LoggerProvider::builder().with_simple_exporter(exporter); @@ -461,6 +468,7 @@ fn build_simple_with_exporter( Some(Cow::Borrowed(env!("CARGO_PKG_VERSION"))), None, None, + include_trace_context, ) } @@ -468,6 +476,7 @@ fn build_batch_with_exporter( exporter: LogExporter, log_config: Option, runtime: R, + include_trace_context: bool, ) -> opentelemetry_sdk::logs::Logger { let mut provider_builder = opentelemetry_sdk::logs::LoggerProvider::builder().with_batch_exporter(exporter, runtime); @@ -480,5 +489,6 @@ fn build_batch_with_exporter( Some(Cow::Borrowed("CARGO_PKG_VERSION")), None, None, + include_trace_context, ) } diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index 5d6b97ca7b..6b4501c1d3 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -3,7 +3,8 @@ use crate::export::logs::{LogData, LogExporter}; use opentelemetry_api::{ global::{handle_error, Error}, logs::{LogRecord, LogResult}, - InstrumentationLibrary, + trace::TraceContextExt, + Context, InstrumentationLibrary, }; use std::{ borrow::Cow, @@ -25,11 +26,14 @@ impl opentelemetry_api::logs::LoggerProvider for LoggerProvider { /// Create a new versioned `LogEmitter` instance. fn versioned_logger( &self, - name: Cow<'static, str>, + name: impl Into>, version: Option>, schema_url: Option>, attributes: Option>, + include_trace_context: bool, ) -> Logger { + let name = name.into(); + let component_name = if name.is_empty() { Cow::Borrowed(DEFAULT_COMPONENT_NAME) } else { @@ -39,6 +43,7 @@ impl opentelemetry_api::logs::LoggerProvider for LoggerProvider { Logger::new( InstrumentationLibrary::new(component_name, version, schema_url, attributes), Arc::downgrade(&self.inner), + include_trace_context, ) } } @@ -169,6 +174,7 @@ impl Builder { /// /// [`LogRecord`]: crate::log::LogRecord pub struct Logger { + include_trace_context: bool, instrumentation_lib: InstrumentationLibrary, provider: Weak, } @@ -177,8 +183,10 @@ impl Logger { pub(crate) fn new( instrumentation_lib: InstrumentationLibrary, provider: Weak, + include_trace_context: bool, ) -> Self { Logger { + include_trace_context, instrumentation_lib, provider, } @@ -205,8 +213,16 @@ impl opentelemetry_api::logs::Logger for Logger { let config = provider.config(); for processor in provider.log_processors() { + let mut record = record.clone(); + if self.include_trace_context { + let ctx = Context::current(); + if ctx.has_active_span() { + let span = ctx.span(); + record.trace_context = Some(span.span_context().into()); + } + } let data = LogData { - record: record.clone(), + record, resource: config.resource.clone(), instrumentation: self.instrumentation_lib.clone(), }; From a617fd54d06de4ac99cce280014e440665c976d9 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 7 May 2023 22:08:17 +0530 Subject: [PATCH 48/51] Update docs. --- opentelemetry-api/src/global/logs.rs | 26 ++++++++++++++++++- opentelemetry-api/src/logs/logger.rs | 28 +++++++++++++++++++++ opentelemetry-api/src/logs/noop.rs | 1 + opentelemetry-otlp/src/logs.rs | 4 +-- opentelemetry-sdk/src/logs/log_emitter.rs | 28 ++++++++++----------- opentelemetry-sdk/src/logs/log_processor.rs | 4 +-- opentelemetry-sdk/src/logs/runtime.rs | 4 +-- opentelemetry-stdout/src/logs/exporter.rs | 28 ++++++++++++++++++++- opentelemetry-stdout/src/logs/mod.rs | 2 +- 9 files changed, 102 insertions(+), 23 deletions(-) diff --git a/opentelemetry-api/src/global/logs.rs b/opentelemetry-api/src/global/logs.rs index ce80885ef8..986679a412 100644 --- a/opentelemetry-api/src/global/logs.rs +++ b/opentelemetry-api/src/global/logs.rs @@ -11,7 +11,16 @@ use crate::{ KeyValue, }; +/// Allows a specific [`LoggerProvider`] to be used generically, by mirroring +/// the interface, and boxing the returned types. +/// +/// [`LoggerProvider`]: crate::logs::LoggerProvider. pub trait ObjectSafeLoggerProvider { + /// Creates a versioned named [`Logger`] instance that is a trait object + /// through the underlying [`LoggerProvider`]. + /// + /// [`Logger`]: crate::logs::Logger + /// [`LoggerProvider`]: crate::logs::LoggerProvider fn versioned_logger_boxed( &self, name: Cow<'static, str>, @@ -60,6 +69,7 @@ impl Logger for BoxedLogger { } #[derive(Clone)] +/// Represents the globally configured [`LoggerProvider`] instance. pub struct GlobalLoggerProvider { provider: Arc, } @@ -71,7 +81,7 @@ impl fmt::Debug for GlobalLoggerProvider { } impl GlobalLoggerProvider { - pub fn new< + fn new< L: Logger + Send + Sync + 'static, P: LoggerProvider + Send + Sync + 'static, >( @@ -107,6 +117,10 @@ impl LoggerProvider for GlobalLoggerProvider { static GLOBAL_LOGGER_PROVIDER: Lazy> = Lazy::new(|| RwLock::new(GlobalLoggerProvider::new(NoopLoggerProvider::new()))); +/// Returns an instance of the currently configured global [`LoggerProvider`] +/// through [`GlobalLoggerProvider`]. +/// +/// [`LoggerProvider`]: crate::logs::LoggerProvider pub fn logger_provider() -> GlobalLoggerProvider { GLOBAL_LOGGER_PROVIDER .read() @@ -114,10 +128,19 @@ pub fn logger_provider() -> GlobalLoggerProvider { .clone() } +/// Creates a named instance of [`Logger`] via the configured +/// [`GlobalLoggerProvider`]. +/// +/// If `name` is an empty string, the provider will use a default name. +/// +/// [`Logger`]: crate::logs::Logger pub fn logger(name: Cow<'static, str>) -> BoxedLogger { logger_provider().logger(name) } +/// Sets the given [`LoggerProvider`] instance as the current global provider, +/// returning the [`LoggerProvider`] instance that was previously set as global +/// provider. pub fn set_logger_provider(new_provider: P) -> GlobalLoggerProvider where L: Logger + Send + Sync + 'static, @@ -129,6 +152,7 @@ where mem::replace(&mut *provider, GlobalLoggerProvider::new(new_provider)) } +/// Shut down the current global [`LoggerProvider`]. pub fn shutdown_logger_provider() { let _ = set_logger_provider(NoopLoggerProvider::new()); } diff --git a/opentelemetry-api/src/logs/logger.rs b/opentelemetry-api/src/logs/logger.rs index 47888987b6..04c68d334d 100644 --- a/opentelemetry-api/src/logs/logger.rs +++ b/opentelemetry-api/src/logs/logger.rs @@ -2,13 +2,36 @@ use std::borrow::Cow; use crate::{logs::LogRecord, KeyValue}; +/// The interface for emitting [`LogRecord`]s. pub trait Logger { + /// Emit a [`LogRecord`]. If this `Logger` was created with + /// `include_trace_context` set to `true`, the logger will set the record's + /// [`TraceContext`] to the active trace context, using the current thread's + /// [`Context`]. + /// + /// [`Context`]: crate::Context + /// [`TraceContext`]: crate::logs::TraceContext fn emit(&self, record: LogRecord); } +/// Interfaces that can create [`Logger`] instances. pub trait LoggerProvider { + /// The [`Logger`] type that this provider will return. type Logger: Logger; + /// Returns a new versioned logger with a given name. + /// + /// The `name` should be the application name or the name of the library + /// providing instrumentation. If the name is empty, then an + /// implementation-defined default name may be used instead. + /// + /// If `include_trace_context` is `true`, the newly created [`Logger`] + /// should set the [`TraceContext`] associated with a record to the + /// current thread's active trace context, using [`Context`]. + /// + /// [`Context`]: crate::Context + /// [`TraceContext`]: crate::logs::TraceContext + fn versioned_logger( &self, name: impl Into>, @@ -18,6 +41,11 @@ pub trait LoggerProvider { include_trace_context: bool, ) -> Self::Logger; + /// Returns a new logger with the given name. + /// + /// The `name` should be the application name or the name of the library + /// providing instrumentation. If the name is empty, then an + /// implementation-defined default name may be used instead. fn logger(&self, name: impl Into>) -> Self::Logger { self.versioned_logger(name, None, None, None, true) } diff --git a/opentelemetry-api/src/logs/noop.rs b/opentelemetry-api/src/logs/noop.rs index db5708de1f..172b8ad3bb 100644 --- a/opentelemetry-api/src/logs/noop.rs +++ b/opentelemetry-api/src/logs/noop.rs @@ -10,6 +10,7 @@ use crate::{ pub struct NoopLoggerProvider(()); impl NoopLoggerProvider { + /// Create a new no-op logger provider. pub fn new() -> Self { NoopLoggerProvider(()) } diff --git a/opentelemetry-otlp/src/logs.rs b/opentelemetry-otlp/src/logs.rs index da15b8db44..8550704c33 100644 --- a/opentelemetry-otlp/src/logs.rs +++ b/opentelemetry-otlp/src/logs.rs @@ -417,7 +417,7 @@ impl OtlpLogPipeline { /// Returns a [`Logger`] with the name `opentelemetry-otlp` and the /// current crate version, using the configured log exporter. /// - /// [`Logger`]: opentelemetry::opentelemetry_sdk::logs::Logger + /// [`Logger`]: opentelemetry_sdk::logs::Logger pub fn simple( self, include_trace_context: bool, @@ -435,7 +435,7 @@ impl OtlpLogPipeline { /// current crate version, using the configured log exporter and a /// batch log processor. /// - /// [`Logger`]: opentelemetry::log::Logger + /// [`Logger`]: opentelemetry_sdk::logs::Logger pub fn batch( self, runtime: R, diff --git a/opentelemetry-sdk/src/logs/log_emitter.rs b/opentelemetry-sdk/src/logs/log_emitter.rs index 6b4501c1d3..432121a2d1 100644 --- a/opentelemetry-sdk/src/logs/log_emitter.rs +++ b/opentelemetry-sdk/src/logs/log_emitter.rs @@ -12,18 +12,18 @@ use std::{ }; #[derive(Debug, Clone)] -/// Creator for `LogEmitter` instances. +/// Creator for `Logger` instances. pub struct LoggerProvider { inner: Arc, } -/// Default log emitter name if empty string is provided. -const DEFAULT_COMPONENT_NAME: &str = "rust.opentelemetry.io/sdk/logemitter"; +/// Default logger name if empty string is provided. +const DEFAULT_COMPONENT_NAME: &str = "rust.opentelemetry.io/sdk/logger"; impl opentelemetry_api::logs::LoggerProvider for LoggerProvider { type Logger = Logger; - /// Create a new versioned `LogEmitter` instance. + /// Create a new versioned `Logger` instance. fn versioned_logger( &self, name: impl Into>, @@ -49,12 +49,12 @@ impl opentelemetry_api::logs::LoggerProvider for LoggerProvider { } impl LoggerProvider { - /// Build a new log emitter provider. + /// Build a new logger provider. pub(crate) fn new(inner: Arc) -> Self { LoggerProvider { inner } } - /// Create a new `LogEmitterProvider` builder. + /// Create a new `LoggerProvider` builder. pub fn builder() -> Builder { Builder::default() } @@ -77,14 +77,14 @@ impl LoggerProvider { .collect() } - /// Shuts down this `LogEmitterProvider`, panicking on failure. + /// Shuts down this `LoggerProvider`, panicking on failure. pub fn shutdown(&mut self) -> Vec> { self.try_shutdown() - .expect("canont shutdown LogEmitterProvider when child LogEmitters are still active") + .expect("canont shutdown LoggerProvider when child Loggers are still active") } - /// Attempts to shutdown this `LogEmitterProvider`, succeeding only when - /// all cloned `LogEmitterProvider` values have been dropped. + /// Attempts to shutdown this `LoggerProvider`, succeeding only when + /// all cloned `LoggerProvider` values have been dropped. pub fn try_shutdown(&mut self) -> Option>> { Arc::get_mut(&mut self.inner).map(|inner| { inner @@ -100,7 +100,7 @@ impl Drop for LoggerProvider { fn drop(&mut self) { match self.try_shutdown() { None => handle_error(Error::Other( - "canont shutdown LogEmitterProvider when child LogEmitters are still active".into(), + "canont shutdown LoggerProvider when child Loggers are still active".into(), )), Some(results) => { for result in results { @@ -172,7 +172,7 @@ impl Builder { #[derive(Debug)] /// The object for emitting [`LogRecord`]s. /// -/// [`LogRecord`]: crate::log::LogRecord +/// [`LogRecord`]: opentelemetry_api::logs::LogRecord pub struct Logger { include_trace_context: bool, instrumentation_lib: InstrumentationLibrary, @@ -192,12 +192,12 @@ impl Logger { } } - /// LogEmitterProvider associated with this tracer. + /// LoggerProvider associated with this logger. pub fn provider(&self) -> Option { self.provider.upgrade().map(LoggerProvider::new) } - /// Instrumentation library information of this tracer. + /// Instrumentation library information of this logger. pub fn instrumentation_library(&self) -> &InstrumentationLibrary { &self.instrumentation_lib } diff --git a/opentelemetry-sdk/src/logs/log_processor.rs b/opentelemetry-sdk/src/logs/log_processor.rs index a6fea44596..62c1c151de 100644 --- a/opentelemetry-sdk/src/logs/log_processor.rs +++ b/opentelemetry-sdk/src/logs/log_processor.rs @@ -18,9 +18,9 @@ use std::{ time::Duration, }; -/// The interface for plugging into a [`LogEmitter`]. +/// The interface for plugging into a [`Logger`]. /// -/// [`LogEmitter`]: crate::log::LogEmitter +/// [`Logger`]: crate::logs::Logger pub trait LogProcessor: Send + Sync + Debug { /// Called when a log record is ready to processed and exported. fn emit(&self, data: LogData); diff --git a/opentelemetry-sdk/src/logs/runtime.rs b/opentelemetry-sdk/src/logs/runtime.rs index 3d5ad32115..bc15b5cdf8 100644 --- a/opentelemetry-sdk/src/logs/runtime.rs +++ b/opentelemetry-sdk/src/logs/runtime.rs @@ -2,7 +2,7 @@ //! Log runtime is an extension to [`Runtime`]. Currently it provides a channel that used //! by [`BatchLogProcessor`]. //! -//! [`BatchLogProcessor`]: crate::log::BatchLogProcessor +//! [`BatchLogProcessor`]: crate::logs::BatchLogProcessor //! [`Runtime`]: crate::runtime::Runtime use crate::logs::BatchMessage; #[cfg(feature = "rt-async-std")] @@ -34,7 +34,7 @@ const CHANNEL_CLOSED_ERROR: &str = /// Log runtime is an extension to [`Runtime`]. Currently it provides a channel that used /// by [`BatchLogProcessor`]. /// -/// [`BatchLogProcessor`]: crate::log::BatchLogProcessor +/// [`BatchLogProcessor`]: crate::logs::BatchLogProcessor /// [`Runtime`]: crate::runtime::Runtime pub trait LogRuntime: Runtime { /// A future stream to receive the batch messages from channels. diff --git a/opentelemetry-stdout/src/logs/exporter.rs b/opentelemetry-stdout/src/logs/exporter.rs index 2e7d107698..2eefc9ea34 100644 --- a/opentelemetry-stdout/src/logs/exporter.rs +++ b/opentelemetry-stdout/src/logs/exporter.rs @@ -22,6 +22,7 @@ pub struct LogExporter { } impl LogExporter { + /// Create a builder to configure this exporter. pub fn builder() -> LogExporterBuilder { Default::default() } @@ -69,6 +70,7 @@ impl ExportError for Error { } } +/// Configuration for the stdout log exporter #[derive(Default)] pub struct LogExporterBuilder { writer: Option>, @@ -82,6 +84,16 @@ impl fmt::Debug for LogExporterBuilder { } impl LogExporterBuilder { + /// Set the writer that the exporter will write to + /// + /// # Examples + /// + /// ``` + /// use opentelemetry_stdout::LogExporterBuilder; + /// + /// let buffer = Vec::new(); // Any type that implements `Write` + /// let exporter = LogExporterBuilder::default().with_writer(buffer).build(); + /// ``` pub fn with_writer(mut self, writer: W) -> Self where W: Write + Send + Sync + 'static, @@ -90,7 +102,20 @@ impl LogExporterBuilder { self } - pub fn with_exporter(mut self, encoder: E) -> Self + /// Set the encoder that the exporter will use. + /// + /// # Examples + /// + /// ``` + /// use opentelemetry_stdout::LogExporterBuilder; + /// use serde_json; + /// + /// let exporter = LogExporterBuilder::default() + /// .with_encoder(|writer, data| + /// Ok(serde_json::to_writer_pretty(writer, &data).unwrap())) + /// .build(); + /// ``` + pub fn with_encoder(mut self, encoder: E) -> Self where E: Fn(&mut dyn Write, crate::logs::transform::LogData) -> LogResult<()> + Send @@ -101,6 +126,7 @@ impl LogExporterBuilder { self } + /// Create a log exporter with the current configuration. pub fn build(self) -> LogExporter { LogExporter { writer: Some(self.writer.unwrap_or_else(|| Box::new(stdout()))), diff --git a/opentelemetry-stdout/src/logs/mod.rs b/opentelemetry-stdout/src/logs/mod.rs index 87a88bc756..0d381e2cf0 100644 --- a/opentelemetry-stdout/src/logs/mod.rs +++ b/opentelemetry-stdout/src/logs/mod.rs @@ -4,7 +4,7 @@ //! [`Write`] instance. By default it will write to [`Stdout`]. //! //! [`LogExporter`]: opentelemetry_sdk::export::logs::LogExporter -//! [`LogRecord`]: crate::logs::LogRecord +//! [`LogRecord`]: opentelemetry_api::logs::LogRecord //! [`Write`]: std::io::Write //! [`Stdout`]: std::io::Stdout // TODO: Add an example for using this exporter. From ad676ace2d7abba65289981f9e0adbaf8bd0c83a Mon Sep 17 00:00:00 2001 From: Zhongyang Wu Date: Tue, 9 May 2023 19:40:54 -0700 Subject: [PATCH 49/51] fix: lint --- opentelemetry-proto/src/transform/common.rs | 9 +++++++-- opentelemetry-proto/src/transform/logs.rs | 7 ++----- opentelemetry-stdout/src/common.rs | 1 + opentelemetry-stdout/src/logs/exporter.rs | 12 +++++------- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/opentelemetry-proto/src/transform/common.rs b/opentelemetry-proto/src/transform/common.rs index 0050af12cc..f7215fde2a 100644 --- a/opentelemetry-proto/src/transform/common.rs +++ b/opentelemetry-proto/src/transform/common.rs @@ -14,9 +14,12 @@ pub mod tonic { any_value, AnyValue, ArrayValue, InstrumentationScope, KeyValue, }; use opentelemetry_api::{Array, Value}; - use opentelemetry_sdk::{trace::EvictedHashMap, Resource}; + use opentelemetry_sdk::trace::EvictedHashMap; use std::borrow::Cow; + #[cfg(any(feature = "traces", feature = "logs"))] + use opentelemetry_sdk::Resource; + impl From for InstrumentationScope { fn from(library: opentelemetry_sdk::InstrumentationLibrary) -> Self { InstrumentationScope { @@ -134,7 +137,9 @@ pub mod grpcio { use crate::proto::grpcio::common::{AnyValue, ArrayValue, InstrumentationScope, KeyValue}; use opentelemetry_api::{Array, Value}; use opentelemetry_sdk::{trace::EvictedHashMap, Resource}; - use protobuf::{RepeatedField, SingularPtrField}; + use protobuf::RepeatedField; + #[cfg(feature = "logs")] + use protobuf::SingularPtrField; use std::borrow::Cow; impl From for InstrumentationScope { diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index 938d5cdb2a..c8319e1894 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -85,7 +85,7 @@ pub mod tonic { None => SeverityNumber::Unspecified, }; - let record = LogRecord { + LogRecord { time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or_default(), observed_time_unix_nano: log_record .observed_timestamp @@ -113,9 +113,7 @@ pub mod tonic { trace_id: trace_context .map(|ctx| ctx.trace_id.to_bytes().to_vec()) .unwrap_or_default(), - ..Default::default() - }; - record + } } } @@ -136,7 +134,6 @@ pub mod tonic { .unwrap_or_default(), scope: Some(log_data.instrumentation.into()), log_records: vec![log_data.record.into()], - ..Default::default() }], } } diff --git a/opentelemetry-stdout/src/common.rs b/opentelemetry-stdout/src/common.rs index 67e9570ce5..4573fc618c 100644 --- a/opentelemetry-stdout/src/common.rs +++ b/opentelemetry-stdout/src/common.rs @@ -76,6 +76,7 @@ impl From for Key { } #[derive(Debug, Serialize, Clone)] +#[allow(dead_code)] pub(crate) enum Value { #[serde(rename = "boolValue")] Bool(bool), diff --git a/opentelemetry-stdout/src/logs/exporter.rs b/opentelemetry-stdout/src/logs/exporter.rs index 2eefc9ea34..0ea58382f0 100644 --- a/opentelemetry-stdout/src/logs/exporter.rs +++ b/opentelemetry-stdout/src/logs/exporter.rs @@ -44,14 +44,12 @@ impl fmt::Debug for LogExporter { impl opentelemetry_sdk::export::logs::LogExporter for LogExporter { /// Export spans to stdout async fn export(&mut self, batch: Vec) -> ExportResult { - let res = if let Some(writer) = &mut self.writer { - (self.encoder)(writer, crate::logs::LogData::from(batch)) - .and_then(|_| writer.write_all(b"\n").map_err(|e| Error(e).into())) + if let Some(writer) = &mut self.writer { + let result = (self.encoder)(writer, crate::logs::LogData::from(batch)) as LogResult<()>; + result.and_then(|_| writer.write_all(b"\n").map_err(|e| Error(e).into())) } else { Err("exporter is shut down".into()) - }; - - res + } } fn shutdown(&mut self) { @@ -112,7 +110,7 @@ impl LogExporterBuilder { /// /// let exporter = LogExporterBuilder::default() /// .with_encoder(|writer, data| - /// Ok(serde_json::to_writer_pretty(writer, &data).unwrap())) + /// Ok(serde_json::to_writer_pretty(writer, &data).unwrap())) /// .build(); /// ``` pub fn with_encoder(mut self, encoder: E) -> Self From 5efc75dd90c38a20aa322fbff4eea50553fb35f2 Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sat, 13 May 2023 21:47:26 +0530 Subject: [PATCH 50/51] logs: Rename Any to AnyValue. --- opentelemetry-api/src/logs/mod.rs | 2 +- opentelemetry-api/src/logs/record.rs | 66 +++++++++++------------ opentelemetry-proto/src/transform/logs.rs | 55 ++++++++++--------- opentelemetry-stdout/src/common.rs | 22 ++++---- 4 files changed, 74 insertions(+), 71 deletions(-) diff --git a/opentelemetry-api/src/logs/mod.rs b/opentelemetry-api/src/logs/mod.rs index d862b69042..dae7a3c8d4 100644 --- a/opentelemetry-api/src/logs/mod.rs +++ b/opentelemetry-api/src/logs/mod.rs @@ -11,7 +11,7 @@ mod record; pub use logger::{Logger, LoggerProvider}; pub use noop::NoopLoggerProvider; -pub use record::{Any, LogRecord, LogRecordBuilder, Severity, TraceContext}; +pub use record::{AnyValue, LogRecord, LogRecordBuilder, Severity, TraceContext}; /// Describe the result of operations in log SDK. pub type LogResult = Result; diff --git a/opentelemetry-api/src/logs/record.rs b/opentelemetry-api/src/logs/record.rs index 13eb923fc7..9f73aca756 100644 --- a/opentelemetry-api/src/logs/record.rs +++ b/opentelemetry-api/src/logs/record.rs @@ -24,10 +24,10 @@ pub struct LogRecord { pub severity_number: Option, /// Record body - pub body: Option, + pub body: Option, /// Additional attributes associated with this record - pub attributes: Option>, + pub attributes: Option>, } impl LogRecord { @@ -62,7 +62,7 @@ impl From<&SpanContext> for TraceContext { /// Value types for representing arbitrary values in a log record. #[derive(Debug, Clone)] -pub enum Any { +pub enum AnyValue { /// An integer value Int(i64), /// A double value @@ -74,58 +74,58 @@ pub enum Any { /// A byte array Bytes(Vec), /// An array of `Any` values - ListAny(Vec), + ListAny(Vec), /// A map of string keys to `Any` values, arbitrarily nested. - Map(OrderMap), + Map(OrderMap), } macro_rules! impl_trivial_from { ($t:ty, $variant:path) => { - impl From<$t> for Any { - fn from(val: $t) -> Any { + impl From<$t> for AnyValue { + fn from(val: $t) -> AnyValue { $variant(val.into()) } } }; } -impl_trivial_from!(i8, Any::Int); -impl_trivial_from!(i16, Any::Int); -impl_trivial_from!(i32, Any::Int); -impl_trivial_from!(i64, Any::Int); +impl_trivial_from!(i8, AnyValue::Int); +impl_trivial_from!(i16, AnyValue::Int); +impl_trivial_from!(i32, AnyValue::Int); +impl_trivial_from!(i64, AnyValue::Int); -impl_trivial_from!(u8, Any::Int); -impl_trivial_from!(u16, Any::Int); -impl_trivial_from!(u32, Any::Int); +impl_trivial_from!(u8, AnyValue::Int); +impl_trivial_from!(u16, AnyValue::Int); +impl_trivial_from!(u32, AnyValue::Int); -impl_trivial_from!(f64, Any::Double); -impl_trivial_from!(f32, Any::Double); +impl_trivial_from!(f64, AnyValue::Double); +impl_trivial_from!(f32, AnyValue::Double); -impl_trivial_from!(String, Any::String); -impl_trivial_from!(Cow<'static, str>, Any::String); -impl_trivial_from!(&'static str, Any::String); -impl_trivial_from!(StringValue, Any::String); +impl_trivial_from!(String, AnyValue::String); +impl_trivial_from!(Cow<'static, str>, AnyValue::String); +impl_trivial_from!(&'static str, AnyValue::String); +impl_trivial_from!(StringValue, AnyValue::String); -impl_trivial_from!(bool, Any::Boolean); +impl_trivial_from!(bool, AnyValue::Boolean); -impl> FromIterator for Any { +impl> FromIterator for AnyValue { /// Creates an [`Any::ListAny`] value from a sequence of `Into` values. fn from_iter>(iter: I) -> Self { - Any::ListAny(iter.into_iter().map(Into::into).collect()) + AnyValue::ListAny(iter.into_iter().map(Into::into).collect()) } } -impl, V: Into> FromIterator<(K, V)> for Any { +impl, V: Into> FromIterator<(K, V)> for AnyValue { /// Creates an [`Any::Map`] value from a sequence of key-value pairs /// that can be converted into a `Key` and `Any` respectively. fn from_iter>(iter: I) -> Self { - Any::Map(OrderMap::from_iter( + AnyValue::Map(OrderMap::from_iter( iter.into_iter().map(|(k, v)| (k.into(), v.into())), )) } } -impl From for Any { +impl From for AnyValue { fn from(value: Value) -> Self { match value { Value::Bool(b) => b.into(), @@ -133,10 +133,10 @@ impl From for Any { Value::F64(f) => f.into(), Value::String(s) => s.into(), Value::Array(a) => match a { - Array::Bool(b) => Any::from_iter(b), - Array::F64(f) => Any::from_iter(f), - Array::I64(i) => Any::from_iter(i), - Array::String(s) => Any::from_iter(s), + Array::Bool(b) => AnyValue::from_iter(b), + Array::F64(f) => AnyValue::from_iter(f), + Array::I64(i) => AnyValue::from_iter(i), + Array::String(s) => AnyValue::from_iter(s), }, } } @@ -317,7 +317,7 @@ impl LogRecordBuilder { } /// Assign body - pub fn with_body(self, body: Any) -> Self { + pub fn with_body(self, body: AnyValue) -> Self { Self { record: LogRecord { body: Some(body), @@ -327,7 +327,7 @@ impl LogRecordBuilder { } /// Assign attributes, overriding previously set attributes - pub fn with_attributes(self, attributes: OrderMap) -> Self { + pub fn with_attributes(self, attributes: OrderMap) -> Self { Self { record: LogRecord { attributes: Some(attributes), @@ -340,7 +340,7 @@ impl LogRecordBuilder { pub fn with_attribute(mut self, key: K, value: V) -> Self where K: Into, - V: Into, + V: Into, { if let Some(ref mut map) = self.record.attributes { map.insert(key.into(), value.into()); diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index c8319e1894..b02fc99293 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -11,26 +11,26 @@ pub mod tonic { }, transform::common::tonic::resource_attributes, }; - use opentelemetry_api::logs::{Any, Severity}; + use opentelemetry_api::logs::{AnyValue as LogsAnyValue, Severity}; use super::*; - impl From for AnyValue { - fn from(value: Any) -> Self { + impl From for AnyValue { + fn from(value: LogsAnyValue) -> Self { AnyValue { value: Some(value.into()), } } } - impl From for Value { - fn from(value: Any) -> Self { + impl From for Value { + fn from(value: LogsAnyValue) -> Self { match value { - Any::Double(f) => Value::DoubleValue(f), - Any::Int(i) => Value::IntValue(i), - Any::String(s) => Value::StringValue(s.into()), - Any::Boolean(b) => Value::BoolValue(b), - Any::ListAny(v) => Value::ArrayValue(ArrayValue { + LogsAnyValue::Double(f) => Value::DoubleValue(f), + LogsAnyValue::Int(i) => Value::IntValue(i), + LogsAnyValue::String(s) => Value::StringValue(s.into()), + LogsAnyValue::Boolean(b) => Value::BoolValue(b), + LogsAnyValue::ListAny(v) => Value::ArrayValue(ArrayValue { values: v .into_iter() .map(|v| AnyValue { @@ -38,7 +38,7 @@ pub mod tonic { }) .collect(), }), - Any::Map(m) => Value::KvlistValue(KeyValueList { + LogsAnyValue::Map(m) => Value::KvlistValue(KeyValueList { values: m .into_iter() .map(|(key, value)| KeyValue { @@ -49,7 +49,7 @@ pub mod tonic { }) .collect(), }), - Any::Bytes(v) => Value::BytesValue(v), + LogsAnyValue::Bytes(v) => Value::BytesValue(v), } } } @@ -85,7 +85,7 @@ pub mod tonic { None => SeverityNumber::Unspecified, }; - LogRecord { + let record = LogRecord { time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or_default(), observed_time_unix_nano: log_record .observed_timestamp @@ -113,7 +113,9 @@ pub mod tonic { trace_id: trace_context .map(|ctx| ctx.trace_id.to_bytes().to_vec()) .unwrap_or_default(), - } + ..Default::default() + }; + record } } @@ -134,6 +136,7 @@ pub mod tonic { .unwrap_or_default(), scope: Some(log_data.instrumentation.into()), log_records: vec![log_data.record.into()], + ..Default::default() }], } } @@ -151,13 +154,13 @@ pub mod grpcio { }, transform::common::grpcio::resource_attributes, }; - use opentelemetry_api::logs::{Any, Severity}; + use opentelemetry_api::logs::{AnyValue as LogsAnyValue, Severity}; use protobuf::{RepeatedField, SingularPtrField}; use super::*; - impl From for AnyValue { - fn from(value: Any) -> Self { + impl From for AnyValue { + fn from(value: LogsAnyValue) -> Self { AnyValue { value: Some(value.into()), ..Default::default() @@ -165,14 +168,14 @@ pub mod grpcio { } } - impl From for AnyValue_oneof_value { - fn from(value: Any) -> Self { + impl From for AnyValue_oneof_value { + fn from(value: LogsAnyValue) -> Self { match value { - Any::Double(f) => AnyValue_oneof_value::double_value(f), - Any::Int(i) => AnyValue_oneof_value::int_value(i), - Any::String(s) => AnyValue_oneof_value::string_value(s.into()), - Any::Boolean(b) => AnyValue_oneof_value::bool_value(b), - Any::ListAny(v) => AnyValue_oneof_value::array_value(ArrayValue { + LogsAnyValue::Double(f) => AnyValue_oneof_value::double_value(f), + LogsAnyValue::Int(i) => AnyValue_oneof_value::int_value(i), + LogsAnyValue::String(s) => AnyValue_oneof_value::string_value(s.into()), + LogsAnyValue::Boolean(b) => AnyValue_oneof_value::bool_value(b), + LogsAnyValue::ListAny(v) => AnyValue_oneof_value::array_value(ArrayValue { values: RepeatedField::from_vec( v.into_iter() .map(|v| AnyValue { @@ -183,7 +186,7 @@ pub mod grpcio { ), ..Default::default() }), - Any::Map(m) => AnyValue_oneof_value::kvlist_value(KeyValueList { + LogsAnyValue::Map(m) => AnyValue_oneof_value::kvlist_value(KeyValueList { values: RepeatedField::from_vec( m.into_iter() .map(|(key, value)| KeyValue { @@ -198,7 +201,7 @@ pub mod grpcio { ), ..Default::default() }), - Any::Bytes(v) => AnyValue_oneof_value::bytes_value(v), + LogsAnyValue::Bytes(v) => AnyValue_oneof_value::bytes_value(v), } } } diff --git a/opentelemetry-stdout/src/common.rs b/opentelemetry-stdout/src/common.rs index 4573fc618c..1758986941 100644 --- a/opentelemetry-stdout/src/common.rs +++ b/opentelemetry-stdout/src/common.rs @@ -148,17 +148,17 @@ impl From for Value { } #[cfg(feature = "logs")] -impl From for Value { - fn from(value: opentelemetry_api::logs::Any) -> Self { +impl From for Value { + fn from(value: opentelemetry_api::logs::AnyValue) -> Self { match value { - opentelemetry_api::logs::Any::Boolean(b) => Value::Bool(b), - opentelemetry_api::logs::Any::Int(i) => Value::Int(i), - opentelemetry_api::logs::Any::Double(d) => Value::Double(d), - opentelemetry_api::logs::Any::String(s) => Value::String(s.into()), - opentelemetry_api::logs::Any::ListAny(a) => { + opentelemetry_api::logs::AnyValue::Boolean(b) => Value::Bool(b), + opentelemetry_api::logs::AnyValue::Int(i) => Value::Int(i), + opentelemetry_api::logs::AnyValue::Double(d) => Value::Double(d), + opentelemetry_api::logs::AnyValue::String(s) => Value::String(s.into()), + opentelemetry_api::logs::AnyValue::ListAny(a) => { Value::Array(a.into_iter().map(Into::into).collect()) } - opentelemetry_api::logs::Any::Map(m) => Value::KeyValues( + opentelemetry_api::logs::AnyValue::Map(m) => Value::KeyValues( m.into_iter() .map(|(key, value)| KeyValue { key: key.into(), @@ -166,7 +166,7 @@ impl From for Value { }) .collect(), ), - opentelemetry_api::logs::Any::Bytes(b) => Value::BytesValue(b), + opentelemetry_api::logs::AnyValue::Bytes(b) => Value::BytesValue(b), } } } @@ -179,8 +179,8 @@ pub(crate) struct KeyValue { } #[cfg(feature = "logs")] -impl From<(opentelemetry_api::Key, opentelemetry_api::logs::Any)> for KeyValue { - fn from((key, value): (opentelemetry_api::Key, opentelemetry_api::logs::Any)) -> Self { +impl From<(opentelemetry_api::Key, opentelemetry_api::logs::AnyValue)> for KeyValue { + fn from((key, value): (opentelemetry_api::Key, opentelemetry_api::logs::AnyValue)) -> Self { KeyValue { key: key.into(), value: value.into(), From 59469cb9dd73a716f3ab9cb62948754b356e11fc Mon Sep 17 00:00:00 2001 From: Vibhav Pant Date: Sun, 14 May 2023 12:40:33 +0530 Subject: [PATCH 51/51] Address docs and lint issues. --- opentelemetry-api/src/logs/record.rs | 6 +++--- opentelemetry-proto/src/transform/logs.rs | 7 ++----- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/opentelemetry-api/src/logs/record.rs b/opentelemetry-api/src/logs/record.rs index 9f73aca756..f0dbf6b319 100644 --- a/opentelemetry-api/src/logs/record.rs +++ b/opentelemetry-api/src/logs/record.rs @@ -109,15 +109,15 @@ impl_trivial_from!(StringValue, AnyValue::String); impl_trivial_from!(bool, AnyValue::Boolean); impl> FromIterator for AnyValue { - /// Creates an [`Any::ListAny`] value from a sequence of `Into` values. + /// Creates an [`AnyValue::ListAny`] value from a sequence of `Into` values. fn from_iter>(iter: I) -> Self { AnyValue::ListAny(iter.into_iter().map(Into::into).collect()) } } impl, V: Into> FromIterator<(K, V)> for AnyValue { - /// Creates an [`Any::Map`] value from a sequence of key-value pairs - /// that can be converted into a `Key` and `Any` respectively. + /// Creates an [`AnyValue::Map`] value from a sequence of key-value pairs + /// that can be converted into a `Key` and `AnyValue` respectively. fn from_iter>(iter: I) -> Self { AnyValue::Map(OrderMap::from_iter( iter.into_iter().map(|(k, v)| (k.into(), v.into())), diff --git a/opentelemetry-proto/src/transform/logs.rs b/opentelemetry-proto/src/transform/logs.rs index b02fc99293..59d984bec5 100644 --- a/opentelemetry-proto/src/transform/logs.rs +++ b/opentelemetry-proto/src/transform/logs.rs @@ -85,7 +85,7 @@ pub mod tonic { None => SeverityNumber::Unspecified, }; - let record = LogRecord { + LogRecord { time_unix_nano: log_record.timestamp.map(to_nanos).unwrap_or_default(), observed_time_unix_nano: log_record .observed_timestamp @@ -113,9 +113,7 @@ pub mod tonic { trace_id: trace_context .map(|ctx| ctx.trace_id.to_bytes().to_vec()) .unwrap_or_default(), - ..Default::default() - }; - record + } } } @@ -136,7 +134,6 @@ pub mod tonic { .unwrap_or_default(), scope: Some(log_data.instrumentation.into()), log_records: vec![log_data.record.into()], - ..Default::default() }], } }