Skip to content

Commit

Permalink
cleanup, remove unused functions, clippy shenanigans
Browse files Browse the repository at this point in the history
  • Loading branch information
pront committed Oct 4, 2023
1 parent de5b38b commit b24c9b9
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 51 deletions.
4 changes: 2 additions & 2 deletions lib/codecs/tests/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ use std::{
};

use bytes::{Bytes, BytesMut};
use similar_asserts::assert_eq;

use codecs::{
decoding::format::Deserializer, encoding::format::Serializer, NativeDeserializerConfig,
NativeJsonDeserializerConfig, NativeJsonSerializerConfig, NativeSerializerConfig,
};
use similar_asserts::assert_eq;
use vector_core::{config::LogNamespace, event::Event};

#[test]
Expand Down Expand Up @@ -46,7 +47,6 @@ fn roundtrip_current_native_proto_fixtures() {

/// The event proto file was changed in v0.24. This test ensures we can still load the old version
/// binary and that when serialized and deserialized in the new format we still get the same event.
#[ignore]
#[test]
fn reserialize_pre_v24_native_json_fixtures() {
roundtrip_fixtures(
Expand Down
7 changes: 5 additions & 2 deletions lib/codecs/tests/native_json.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use bytes::BytesMut;

use codecs::decoding::format::Deserializer;
use codecs::encoding::format::Serializer;
use codecs::{NativeJsonDeserializerConfig, NativeJsonSerializerConfig};
Expand Down Expand Up @@ -35,7 +34,11 @@ fn histogram_metric_roundtrip() {
MetricValue::AggregatedHistogram {
count: 1,
sum: 1.0,
buckets: buckets!(f64::NEG_INFINITY => 10 , f64::MIN => 10, 1.5 => 10, f64::MAX => 10, f64::INFINITY => 10),
buckets: buckets!(
f64::NEG_INFINITY => 10 ,
f64::MIN => 10, 1.5 => 10,
f64::MAX => 10,
f64::INFINITY => 10),
},
));

Expand Down
67 changes: 20 additions & 47 deletions lib/vector-core/src/event/metric/value.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
use core::fmt;
use std::collections::BTreeSet;
use std::fmt::Debug;
use std::num::{ParseFloatError, ParseIntError};
use std::str::FromStr;

use serde::{Deserialize, Deserializer, Serialize, Serializer};

Expand Down Expand Up @@ -454,7 +451,7 @@ impl fmt::Display for MetricValue {
} => {
write!(fmt, "count={count} sum={sum} ")?;
write_list(fmt, " ", buckets, |fmt, bucket| {
fmt::Display::fmt(&bucket, fmt)
write!(fmt, "{}@{}", bucket.count, bucket.upper_limit)
})
}
MetricValue::AggregatedSummary {
Expand Down Expand Up @@ -603,39 +600,25 @@ impl ByteSizeOf for Sample {
}
}

/// A histogram bucket.
///
/// Histogram buckets represent the `count` of observations where the value of the observations does
/// not exceed the specified `upper_limit`.
#[configurable_component(no_deser, no_ser)]
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct Bucket {
/// The upper limit of values in the bucket.
#[serde(serialize_with = "serialize_f64", deserialize_with = "deserialize_f64")]
pub upper_limit: f64,

/// The number of values tracked in this bucket.
pub count: u64,
}

/// Custom serialization function which converts special f64 values to strings.
#[allow(clippy::trivially_copy_pass_by_ref)]
fn serialize_f64<S>(value: &f64, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
//println!("serializing {}", value);
if value.is_infinite() || value.is_nan() {
serializer.serialize_str(&format!("{}", value))
serializer.serialize_str(&format!("{value}"))
} else {
serializer.serialize_f64(*value)
}
}

/// Custom deserialization function for handling special f64 values.
fn deserialize_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>
where
D: Deserializer<'de>,
{
let value: serde_json::Value = Deserialize::deserialize(deserializer)?;
//println!("deserializing {}", value);
match value {
serde_json::Value::Number(num) => num
.as_f64()
Expand All @@ -650,6 +633,21 @@ where
}
}

/// A histogram bucket.
///
/// Histogram buckets represent the `count` of observations where the value of the observations does
/// not exceed the specified `upper_limit`.
#[configurable_component(no_deser, no_ser)]
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct Bucket {
/// The upper limit of values in the bucket.
#[serde(serialize_with = "serialize_f64", deserialize_with = "deserialize_f64")]
pub upper_limit: f64,

/// The number of values tracked in this bucket.
pub count: u64,
}

impl PartialEq for Bucket {
fn eq(&self, other: &Self) -> bool {
self.count == other.count && float_eq(self.upper_limit, other.upper_limit)
Expand All @@ -662,31 +660,6 @@ impl ByteSizeOf for Bucket {
}
}

impl fmt::Display for Bucket {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}@{}", self.count, self.upper_limit)
}
}

impl FromStr for Bucket {
type Err = vector_common::Error;

fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('@');
let count: u64 = parts
.next()
.ok_or_else(|| "Missing count".to_string())?
.parse()
.map_err(|err: ParseIntError| err.to_string())?;
let upper_limit: f64 = parts
.next()
.ok_or_else(|| "Missing upper limit".to_string())?
.parse()
.map_err(|err: ParseFloatError| err.to_string())?;
Ok(Self { upper_limit, count })
}
}

/// A single quantile observation.
///
/// Quantiles themselves are "cut points dividing the range of a probability distribution into
Expand Down

0 comments on commit b24c9b9

Please sign in to comment.