Skip to content

Commit

Permalink
handle ip adresses in term aggregation (#2319)
Browse files Browse the repository at this point in the history
* handle ip adresses in term aggregation

Stores IpAdresses during the segment term aggregation via u64 representation
and convert to u128(IpV6Adress) via downcast when converting to intermediate results.

Enable Downcasting on `ColumnValues`
Expose u64 variant for u128 encoded data via `open_u64_lenient` method.
Remove lifetime in VecColumn, to avoid 'static lifetime requirement coming
from downcast trait.

* rename method
  • Loading branch information
PSeitz authored Mar 14, 2024
1 parent ec37295 commit b0e6556
Show file tree
Hide file tree
Showing 19 changed files with 256 additions and 63 deletions.
1 change: 1 addition & 0 deletions columnar/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ sstable = { version= "0.2", path = "../sstable", package = "tantivy-sstable" }
common = { version= "0.6", path = "../common", package = "tantivy-common" }
tantivy-bitpacker = { version= "0.5", path = "../bitpacker/" }
serde = "1.0.152"
downcast-rs = "1.2.0"

[dev-dependencies]
proptest = "1"
Expand Down
5 changes: 3 additions & 2 deletions columnar/src/column/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ use std::sync::Arc;
use common::BinarySerializable;
pub use dictionary_encoded::{BytesColumn, StrColumn};
pub use serialize::{
open_column_bytes, open_column_str, open_column_u128, open_column_u64,
serialize_column_mappable_to_u128, serialize_column_mappable_to_u64,
open_column_bytes, open_column_str, open_column_u128, open_column_u128_as_compact_u64,
open_column_u64, serialize_column_mappable_to_u128, serialize_column_mappable_to_u64,
};

use crate::column_index::ColumnIndex;
Expand Down Expand Up @@ -169,6 +169,7 @@ struct FirstValueWithDefault<T: Copy> {
impl<T: PartialOrd + Debug + Send + Sync + Copy + 'static> ColumnValues<T>
for FirstValueWithDefault<T>
{
#[inline(always)]
fn get_val(&self, idx: u32) -> T {
self.column.first(idx).unwrap_or(self.default_value)
}
Expand Down
20 changes: 20 additions & 0 deletions columnar/src/column/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,26 @@ pub fn open_column_u128<T: MonotonicallyMappableToU128>(
})
}

/// Open the column as u64.
///
/// See [`open_u128_as_compact_u64`] for more details.
pub fn open_column_u128_as_compact_u64(bytes: OwnedBytes) -> io::Result<Column<u64>> {
let (body, column_index_num_bytes_payload) = bytes.rsplit(4);
let column_index_num_bytes = u32::from_le_bytes(
column_index_num_bytes_payload
.as_slice()
.try_into()
.unwrap(),
);
let (column_index_data, column_values_data) = body.split(column_index_num_bytes as usize);
let column_index = crate::column_index::open_column_index(column_index_data)?;
let column_values = crate::column_values::open_u128_as_compact_u64(column_values_data)?;
Ok(Column {
index: column_index,
values: column_values,
})
}

pub fn open_column_bytes(data: OwnedBytes) -> io::Result<BytesColumn> {
let (body, dictionary_len_bytes) = data.rsplit(4);
let dictionary_len = u32::from_le_bytes(dictionary_len_bytes.as_slice().try_into().unwrap());
Expand Down
2 changes: 1 addition & 1 deletion columnar/src/column_values/merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ pub(crate) struct MergedColumnValues<'a, T> {
pub(crate) merge_row_order: &'a MergeRowOrder,
}

impl<'a, T: Copy + PartialOrd + Debug> Iterable<T> for MergedColumnValues<'a, T> {
impl<'a, T: Copy + PartialOrd + Debug + 'static> Iterable<T> for MergedColumnValues<'a, T> {
fn boxed_iter(&self) -> Box<dyn Iterator<Item = T> + '_> {
match self.merge_row_order {
MergeRowOrder::Stack(_) => Box::new(
Expand Down
11 changes: 8 additions & 3 deletions columnar/src/column_values/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use std::fmt::Debug;
use std::ops::{Range, RangeInclusive};
use std::sync::Arc;

use downcast_rs::DowncastSync;
pub use monotonic_mapping::{MonotonicallyMappableToU64, StrictlyMonotonicFn};
pub use monotonic_mapping_u128::MonotonicallyMappableToU128;

Expand All @@ -25,7 +26,10 @@ mod monotonic_column;

pub(crate) use merge::MergedColumnValues;
pub use stats::ColumnStats;
pub use u128_based::{open_u128_mapped, serialize_column_values_u128};
pub use u128_based::{
open_u128_as_compact_u64, open_u128_mapped, serialize_column_values_u128,
CompactSpaceU64Accessor,
};
pub use u64_based::{
load_u64_based_column_values, serialize_and_load_u64_based_column_values,
serialize_u64_based_column_values, CodecType, ALL_U64_CODEC_TYPES,
Expand All @@ -41,7 +45,7 @@ use crate::RowId;
///
/// Any methods with a default and specialized implementation need to be called in the
/// wrappers that implement the trait: Arc and MonotonicMappingColumn
pub trait ColumnValues<T: PartialOrd = u64>: Send + Sync {
pub trait ColumnValues<T: PartialOrd = u64>: Send + Sync + DowncastSync {
/// Return the value associated with the given idx.
///
/// This accessor should return as fast as possible.
Expand Down Expand Up @@ -139,6 +143,7 @@ pub trait ColumnValues<T: PartialOrd = u64>: Send + Sync {
Box::new((0..self.num_vals()).map(|idx| self.get_val(idx)))
}
}
downcast_rs::impl_downcast!(sync ColumnValues<T> where T: PartialOrd);

/// Empty column of values.
pub struct EmptyColumnValues;
Expand All @@ -161,7 +166,7 @@ impl<T: PartialOrd + Default> ColumnValues<T> for EmptyColumnValues {
}
}

impl<T: Copy + PartialOrd + Debug> ColumnValues<T> for Arc<dyn ColumnValues<T>> {
impl<T: Copy + PartialOrd + Debug + 'static> ColumnValues<T> for Arc<dyn ColumnValues<T>> {
#[inline(always)]
fn get_val(&self, idx: u32) -> T {
self.as_ref().get_val(idx)
Expand Down
18 changes: 9 additions & 9 deletions columnar/src/column_values/monotonic_column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,10 @@ pub fn monotonic_map_column<C, T, Input, Output>(
monotonic_mapping: T,
) -> impl ColumnValues<Output>
where
C: ColumnValues<Input>,
T: StrictlyMonotonicFn<Input, Output> + Send + Sync,
Input: PartialOrd + Debug + Send + Sync + Clone,
Output: PartialOrd + Debug + Send + Sync + Clone,
C: ColumnValues<Input> + 'static,
T: StrictlyMonotonicFn<Input, Output> + Send + Sync + 'static,
Input: PartialOrd + Debug + Send + Sync + Clone + 'static,
Output: PartialOrd + Debug + Send + Sync + Clone + 'static,
{
MonotonicMappingColumn {
from_column,
Expand All @@ -45,10 +45,10 @@ where

impl<C, T, Input, Output> ColumnValues<Output> for MonotonicMappingColumn<C, T, Input>
where
C: ColumnValues<Input>,
T: StrictlyMonotonicFn<Input, Output> + Send + Sync,
Input: PartialOrd + Send + Debug + Sync + Clone,
Output: PartialOrd + Send + Debug + Sync + Clone,
C: ColumnValues<Input> + 'static,
T: StrictlyMonotonicFn<Input, Output> + Send + Sync + 'static,
Input: PartialOrd + Send + Debug + Sync + Clone + 'static,
Output: PartialOrd + Send + Debug + Sync + Clone + 'static,
{
#[inline(always)]
fn get_val(&self, idx: u32) -> Output {
Expand Down Expand Up @@ -107,7 +107,7 @@ mod tests {
#[test]
fn test_monotonic_mapping_iter() {
let vals: Vec<u64> = (0..100u64).map(|el| el * 10).collect();
let col = VecColumn::from(&vals);
let col = VecColumn::from(vals);
let mapped = monotonic_map_column(
col,
StrictlyMonotonicMappingInverter::from(StrictlyMonotonicMappingToInternal::<i64>::new()),
Expand Down
64 changes: 63 additions & 1 deletion columnar/src/column_values/u128_based/compact_space/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,63 @@ impl BinarySerializable for IPCodecParams {
}
}

/// Exposes the compact space compressed values as u64.
///
/// This allows faster access to the values, as u64 is faster to work with than u128.
/// It also allows to handle u128 values like u64, via the `open_u64_lenient` as a uniform
/// access interface.
///
/// When converting from the internal u64 to u128 `compact_to_u128` can be used.
pub struct CompactSpaceU64Accessor(CompactSpaceDecompressor);
impl CompactSpaceU64Accessor {
pub(crate) fn open(data: OwnedBytes) -> io::Result<CompactSpaceU64Accessor> {
let decompressor = CompactSpaceU64Accessor(CompactSpaceDecompressor::open(data)?);
Ok(decompressor)
}
/// Convert a compact space value to u128
pub fn compact_to_u128(&self, compact: u32) -> u128 {
self.0.compact_to_u128(compact)
}
}

impl ColumnValues<u64> for CompactSpaceU64Accessor {
#[inline]
fn get_val(&self, doc: u32) -> u64 {
let compact = self.0.get_compact(doc);
compact as u64
}

fn min_value(&self) -> u64 {
self.0.u128_to_compact(self.0.min_value()).unwrap() as u64
}

fn max_value(&self) -> u64 {
self.0.u128_to_compact(self.0.max_value()).unwrap() as u64
}

fn num_vals(&self) -> u32 {
self.0.params.num_vals
}

#[inline]
fn iter(&self) -> Box<dyn Iterator<Item = u64> + '_> {
Box::new(self.0.iter_compact().map(|el| el as u64))
}

#[inline]
fn get_row_ids_for_value_range(
&self,
value_range: RangeInclusive<u64>,
position_range: Range<u32>,
positions: &mut Vec<u32>,
) {
let value_range = self.0.compact_to_u128(*value_range.start() as u32)
..=self.0.compact_to_u128(*value_range.end() as u32);
self.0
.get_row_ids_for_value_range(value_range, position_range, positions)
}
}

impl ColumnValues<u128> for CompactSpaceDecompressor {
#[inline]
fn get_val(&self, doc: u32) -> u128 {
Expand Down Expand Up @@ -402,9 +459,14 @@ impl CompactSpaceDecompressor {
.map(|compact| self.compact_to_u128(compact))
}

#[inline]
pub fn get_compact(&self, idx: u32) -> u32 {
self.params.bit_unpacker.get(idx, &self.data) as u32
}

#[inline]
pub fn get(&self, idx: u32) -> u128 {
let compact = self.params.bit_unpacker.get(idx, &self.data) as u32;
let compact = self.get_compact(idx);
self.compact_to_u128(compact)
}

Expand Down
21 changes: 20 additions & 1 deletion columnar/src/column_values/u128_based/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@ use std::sync::Arc;
mod compact_space;

use common::{BinarySerializable, OwnedBytes, VInt};
use compact_space::{CompactSpaceCompressor, CompactSpaceDecompressor};
pub use compact_space::{
CompactSpaceCompressor, CompactSpaceDecompressor, CompactSpaceU64Accessor,
};

use crate::column_values::monotonic_map_column;
use crate::column_values::monotonic_mapping::{
Expand Down Expand Up @@ -108,6 +110,23 @@ pub fn open_u128_mapped<T: MonotonicallyMappableToU128 + Debug>(
StrictlyMonotonicMappingToInternal::<T>::new().into();
Ok(Arc::new(monotonic_map_column(reader, inverted)))
}

/// Returns the u64 representation of the u128 data.
/// The internal representation of the data as u64 is useful for faster processing.
///
/// In order to convert to u128 back cast to `CompactSpaceU64Accessor` and call
/// `compact_to_u128`.
///
/// # Notice
/// In case there are new codecs added, check for usages of `CompactSpaceDecompressorU64` and
/// also handle the new codecs.
pub fn open_u128_as_compact_u64(mut bytes: OwnedBytes) -> io::Result<Arc<dyn ColumnValues<u64>>> {
let header = U128Header::deserialize(&mut bytes)?;
assert_eq!(header.codec_type, U128FastFieldCodecType::CompactSpace);
let reader = CompactSpaceU64Accessor::open(bytes)?;
Ok(Arc::new(reader))
}

#[cfg(test)]
pub mod tests {
use super::*;
Expand Down
1 change: 0 additions & 1 deletion columnar/src/column_values/u64_based/bitpacked.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ impl ColumnValues for BitpackedReader {
fn get_val(&self, doc: u32) -> u64 {
self.stats.min_value + self.stats.gcd.get() * self.bit_unpacker.get(doc, &self.data)
}

#[inline]
fn min_value(&self) -> u64 {
self.stats.min_value
Expand Down
7 changes: 5 additions & 2 deletions columnar/src/column_values/u64_based/blockwise_linear.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,10 @@ impl BlockwiseLinearEstimator {
if self.block.is_empty() {
return;
}
let line = Line::train(&VecColumn::from(&self.block));
let column = VecColumn::from(std::mem::take(&mut self.block));
let line = Line::train(&column);
self.block = column.into();

let mut max_value = 0u64;
for (i, buffer_val) in self.block.iter().enumerate() {
let interpolated_val = line.eval(i as u32);
Expand Down Expand Up @@ -125,7 +128,7 @@ impl ColumnCodecEstimator for BlockwiseLinearEstimator {
*buffer_val = gcd_divider.divide(*buffer_val - stats.min_value);
}

let line = Line::train(&VecColumn::from(&buffer));
let line = Line::train(&VecColumn::from(buffer.to_vec()));

assert!(!buffer.is_empty());

Expand Down
2 changes: 1 addition & 1 deletion columnar/src/column_values/u64_based/line.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ mod tests {
}

fn test_eval_max_err(ys: &[u64]) -> Option<u64> {
let line = Line::train(&VecColumn::from(&ys));
let line = Line::train(&VecColumn::from(ys.to_vec()));
ys.iter()
.enumerate()
.map(|(x, y)| y.wrapping_sub(line.eval(x as u32)))
Expand Down
4 changes: 3 additions & 1 deletion columnar/src/column_values/u64_based/linear.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,9 @@ impl LinearCodecEstimator {
fn collect_before_line_estimation(&mut self, value: u64) {
self.block.push(value);
if self.block.len() == LINE_ESTIMATION_BLOCK_LEN {
let line = Line::train(&VecColumn::from(&self.block));
let column = VecColumn::from(std::mem::take(&mut self.block));
let line = Line::train(&column);
self.block = column.into();
let block = std::mem::take(&mut self.block);
for val in block {
self.collect_after_line_estimation(&line, val);
Expand Down
20 changes: 11 additions & 9 deletions columnar/src/column_values/vec_column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@ use tantivy_bitpacker::minmax;

use crate::ColumnValues;

/// VecColumn provides `Column` over a slice.
pub struct VecColumn<'a, T = u64> {
pub(crate) values: &'a [T],
/// VecColumn provides `Column` over a `Vec<T>`.
pub struct VecColumn<T = u64> {
pub(crate) values: Vec<T>,
pub(crate) min_value: T,
pub(crate) max_value: T,
}

impl<'a, T: Copy + PartialOrd + Send + Sync + Debug> ColumnValues<T> for VecColumn<'a, T> {
impl<T: Copy + PartialOrd + Send + Sync + Debug + 'static> ColumnValues<T> for VecColumn<T> {
fn get_val(&self, position: u32) -> T {
self.values[position as usize]
}
Expand All @@ -37,11 +37,8 @@ impl<'a, T: Copy + PartialOrd + Send + Sync + Debug> ColumnValues<T> for VecColu
}
}

impl<'a, T: Copy + PartialOrd + Default, V> From<&'a V> for VecColumn<'a, T>
where V: AsRef<[T]> + ?Sized
{
fn from(values: &'a V) -> Self {
let values = values.as_ref();
impl<T: Copy + PartialOrd + Default> From<Vec<T>> for VecColumn<T> {
fn from(values: Vec<T>) -> Self {
let (min_value, max_value) = minmax(values.iter().copied()).unwrap_or_default();
Self {
values,
Expand All @@ -50,3 +47,8 @@ where V: AsRef<[T]> + ?Sized
}
}
}
impl From<VecColumn> for Vec<u64> {
fn from(column: VecColumn) -> Self {
column.values
}
}
14 changes: 3 additions & 11 deletions columnar/src/columnar/writer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@ pub(crate) use serializer::ColumnarSerializer;
use stacker::{Addr, ArenaHashMap, MemoryArena};

use crate::column_index::SerializableColumnIndex;
use crate::column_values::{
ColumnValues, MonotonicallyMappableToU128, MonotonicallyMappableToU64, VecColumn,
};
use crate::column_values::{MonotonicallyMappableToU128, MonotonicallyMappableToU64};
use crate::columnar::column_type::ColumnType;
use crate::columnar::writer::column_writers::{
ColumnWriter, NumericalColumnWriter, StrOrBytesColumnWriter,
Expand Down Expand Up @@ -645,10 +643,7 @@ fn send_to_serialize_column_mappable_to_u128<
value_index_builders: &mut PreallocatedIndexBuilders,
values: &mut Vec<T>,
mut wrt: impl io::Write,
) -> io::Result<()>
where
for<'a> VecColumn<'a, T>: ColumnValues<T>,
{
) -> io::Result<()> {
values.clear();
// TODO: split index and values
let serializable_column_index = match cardinality {
Expand Down Expand Up @@ -701,10 +696,7 @@ fn send_to_serialize_column_mappable_to_u64(
value_index_builders: &mut PreallocatedIndexBuilders,
values: &mut Vec<u64>,
mut wrt: impl io::Write,
) -> io::Result<()>
where
for<'a> VecColumn<'a, u64>: ColumnValues<u64>,
{
) -> io::Result<()> {
values.clear();
let serializable_column_index = match cardinality {
Cardinality::Full => {
Expand Down
Loading

0 comments on commit b0e6556

Please sign in to comment.