Skip to content

Commit

Permalink
Cosmetic changes:
Browse files Browse the repository at this point in the history
- Renamed derive macro `Compressed` -> `CompressibleBy`
- Renamed `da_compress` -> `compressible_by`
- Removed default constrains from `Compressible::Compressed`
- Fixed compilation after removing constrains
- Removed `da_compress(bound)`
- Added new `TxId` type
  • Loading branch information
xgreenx committed Sep 4, 2024
1 parent 9a80b08 commit af39dcf
Show file tree
Hide file tree
Showing 33 changed files with 224 additions and 254 deletions.
2 changes: 1 addition & 1 deletion fuel-compression/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ description = "Compression and decompression of Fuel blocks for DA storage."

[dependencies]
fuel-derive = { workspace = true }
fuel-types = { workspace = true }
serde = { version = "1.0", features = ["derive"] }
serde-big-array = "0.5"
122 changes: 59 additions & 63 deletions fuel-compression/src/impls.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
//! Trait impls for Rust types
use super::traits::*;
use core::{
marker::PhantomData,
mem::MaybeUninit,
};
use serde::{
Deserialize,
Serialize,
use crate::RegistryKey;
use core::mem::MaybeUninit;
use fuel_types::{
Address,
AssetId,
BlobId,
BlockHeight,
Bytes32,
ContractId,
Nonce,
Salt,
};

macro_rules! identity_compaction {
Expand Down Expand Up @@ -42,54 +46,54 @@ identity_compaction!(u32);
identity_compaction!(u64);
identity_compaction!(u128);

impl<T> Compressible for Option<T>
where
T: Compressible + Clone,
{
type Compressed = Option<T::Compressed>;
}
identity_compaction!(BlockHeight);
identity_compaction!(BlobId);
identity_compaction!(Bytes32);
identity_compaction!(Salt);
identity_compaction!(Nonce);

impl<T, Ctx, E> CompressibleBy<Ctx, E> for Option<T>
where
T: CompressibleBy<Ctx, E> + Clone,
{
async fn compress(&self, ctx: &mut Ctx) -> Result<Self::Compressed, E> {
if let Some(item) = self {
Ok(Some(item.compress(ctx).await?))
} else {
Ok(None)
macro_rules! array_types_compaction {
($t:ty, $compressed_t:ty) => {
impl Compressible for $t {
type Compressed = $compressed_t;
}
}
}

impl<T, Ctx, E> DecompressibleBy<Ctx, E> for Option<T>
where
T: DecompressibleBy<Ctx, E> + Clone,
{
async fn decompress(c: &Self::Compressed, ctx: &Ctx) -> Result<Self, E> {
if let Some(item) = c {
Ok(Some(T::decompress(item, ctx).await?))
} else {
Ok(None)
impl<Ctx, E> CompressibleBy<Ctx, E> for $t
where
Ctx: CompressionContext<$t, Error = E>,
Ctx: ?Sized,
{
async fn compress(&self, ctx: &mut Ctx) -> Result<$compressed_t, E> {
ctx.compress(self).await
}
}
}

impl<Ctx, E> DecompressibleBy<Ctx, E> for $t
where
Ctx: DecompressionContext<$t, Error = E>,
Ctx: ?Sized,
{
async fn decompress(value: &Self::Compressed, ctx: &Ctx) -> Result<$t, E> {
ctx.decompress(value).await
}
}
};
}

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ArrayWrapper<const S: usize, T: Serialize + for<'a> Deserialize<'a>>(
#[serde(with = "serde_big_array::BigArray")] pub [T; S],
);
array_types_compaction!(Address, RegistryKey);
array_types_compaction!(ContractId, RegistryKey);
array_types_compaction!(AssetId, RegistryKey);

impl<const S: usize, T> Compressible for [T; S]
where
T: Compressible + Clone,
T: Compressible,
{
type Compressed = ArrayWrapper<S, T::Compressed>;
type Compressed = [T::Compressed; S];
}

impl<const S: usize, T, Ctx, E> CompressibleBy<Ctx, E> for [T; S]
where
T: CompressibleBy<Ctx, E> + Clone,
T: CompressibleBy<Ctx, E>,
{
#[allow(unsafe_code)]
async fn compress(&self, ctx: &mut Ctx) -> Result<Self::Compressed, E> {
Expand All @@ -99,13 +103,17 @@ where
unsafe { MaybeUninit::uninit().assume_init() };

// Dropping a `MaybeUninit` does nothing, so we can just overwrite the array.
for (i, v) in self.iter().enumerate() {
tmp[i] = MaybeUninit::new(v.compress(ctx).await?);
// TODO: Handle the case of the error. Currently it will cause a memory leak.
// https://github.com/FuelLabs/fuel-vm/issues/811
for (v, empty) in self.iter().zip(tmp.iter_mut()) {
unsafe {
core::ptr::write(empty.as_mut_ptr(), v.compress(ctx).await?);
}
}

// SAFETY: Every element is initialized.
let result = tmp.map(|v| unsafe { v.assume_init() });
Ok(ArrayWrapper(result))
Ok(result)
}
}

Expand All @@ -120,12 +128,16 @@ where
let mut tmp: [MaybeUninit<T>; S] = unsafe { MaybeUninit::uninit().assume_init() };

// Dropping a `MaybeUninit` does nothing, so we can just overwrite the array.
for (i, v) in c.0.iter().enumerate() {
tmp[i] = MaybeUninit::new(T::decompress(v, ctx).await?);
// TODO: Handle the case of the error. Currently it will cause a memory leak.
// https://github.com/FuelLabs/fuel-vm/issues/811
for (v, empty) in c.iter().zip(tmp.iter_mut()) {
unsafe {
core::ptr::write(empty.as_mut_ptr(), T::decompress(v, ctx).await?);
}
}

// SAFETY: Every element is initialized.
let result: [T; S] = tmp.map(|v| unsafe { v.assume_init() });
let result = tmp.map(|v| unsafe { v.assume_init() });
Ok(result)
}
}
Expand Down Expand Up @@ -162,19 +174,3 @@ where
Ok(result)
}
}

impl<T> Compressible for PhantomData<T> {
type Compressed = ();
}

impl<T, Ctx, E> CompressibleBy<Ctx, E> for PhantomData<T> {
async fn compress(&self, _: &mut Ctx) -> Result<Self::Compressed, E> {
Ok(())
}
}

impl<T, Ctx, E> DecompressibleBy<Ctx, E> for PhantomData<T> {
async fn decompress(_: &Self::Compressed, _: &Ctx) -> Result<Self, E> {
Ok(PhantomData)
}
}
3 changes: 2 additions & 1 deletion fuel-compression/src/key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use serde::{
/// to.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct RegistryKey([u8; Self::SIZE]);

impl RegistryKey {
/// Key mapping to default value for the table type.
pub const DEFAULT_VALUE: Self = Self([u8::MAX; Self::SIZE]);
Expand All @@ -34,7 +35,7 @@ impl RegistryKey {
Self::ZERO
} else {
Self::try_from(next_raw)
.expect("The producedure above always produces a valid key")
.expect("The procedure above always produces a valid key")
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion fuel-compression/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ mod traits;
pub use key::RegistryKey;
pub use traits::*;

pub use fuel_derive::Compressed;
pub use fuel_derive::CompressibleBy;
13 changes: 1 addition & 12 deletions fuel-compression/src/traits.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
#![allow(async_fn_in_trait)] // We control the implementation so this is fine

use serde::{
Deserialize,
Serialize,
};

/// This type can be compressed to a more compact form and back using
/// `CompressibleBy` and `DecompressibleBy` traits.
pub trait Compressible {
/// The compressed type.
type Compressed: Clone + Serialize + for<'a> Deserialize<'a>;
type Compressed: Sized;
}

/// A context that can be used to compress a type.
Expand Down Expand Up @@ -42,12 +37,6 @@ where
async fn decompress(&self, value: &Type::Compressed) -> Result<Type, Self::Error>;
}

/// Error type for context errors.
pub trait CtxError {
/// Context error type
type Error;
}

/// This type can be compressed to a more compact form and back using
/// `CompressionContext`.
pub trait CompressibleBy<Ctx, E>: Compressible
Expand Down
81 changes: 22 additions & 59 deletions fuel-derive/src/compressed.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use proc_macro2::{
Span,
TokenStream as TokenStream2,
TokenTree as TokenTree2,
};
Expand All @@ -13,59 +12,36 @@ use syn::parse::{
ParseStream,
};

const ATTR: &str = "da_compress";
const ATTR: &str = "compressible_by";

/// Structure (struct or enum) attributes
#[derive(Debug)]
pub enum StructureAttrs {
/// Insert bounds for a generic type
/// `#[da_compress(bound(Type))]`
Bound(Vec<String>),
/// Discard generic parameter
/// `#[da_compress(discard(Type))]`
/// `#[compressible_by(discard(Type))]`
Discard(Vec<String>),
}
impl Parse for StructureAttrs {
fn parse(input: ParseStream) -> syn::Result<Self> {
if let Ok(ml) = input.parse::<syn::MetaList>() {
if ml.path.segments.len() == 1 {
match ml.path.segments[0].ident.to_string().as_str() {
"bound" => {
let mut bound = Vec::new();
for item in ml.tokens {
match item {
TokenTree2::Ident(ident) => {
bound.push(ident.to_string());
}
other => {
return Err(syn::Error::new_spanned(
other,
"Expected generic (type) name",
))
}
}
if ml.path.segments.len() == 1
&& ml.path.segments[0].ident.to_string().as_str() == "discard"
{
let mut discard = Vec::new();
for item in ml.tokens {
match item {
TokenTree2::Ident(ident) => {
discard.push(ident.to_string());
}
return Ok(Self::Bound(bound));
}
"discard" => {
let mut discard = Vec::new();
for item in ml.tokens {
match item {
TokenTree2::Ident(ident) => {
discard.push(ident.to_string());
}
other => {
return Err(syn::Error::new_spanned(
other,
"Expected generic (type) name",
))
}
}
other => {
return Err(syn::Error::new_spanned(
other,
"Expected generic (type) name",
))
}
return Ok(Self::Discard(discard));
}
_ => {}
}
return Ok(Self::Discard(discard));
}
}
Err(syn::Error::new_spanned(
Expand Down Expand Up @@ -96,9 +72,9 @@ impl StructureAttrs {
/// Field attributes
pub enum FieldAttrs {
/// Skipped when compressing, and must be reconstructed when decompressing.
/// `#[da_compress(skip)]`
/// `#[compressible_by(skip)]`
Skip,
/// Compresseded recursively.
/// Compressed recursively.
Normal,
}
impl FieldAttrs {
Expand Down Expand Up @@ -308,8 +284,8 @@ fn where_clause_push(w: &mut Option<syn::WhereClause>, p: TokenStream2) {
.push(syn::parse_quote! { #p });
}

/// Derives `Compressed` trait for the given `struct` or `enum`.
pub fn compressed_derive(mut s: synstructure::Structure) -> TokenStream2 {
/// Derives `Compressible` trait for the given `struct` or `enum`.
pub fn compressible_by(mut s: synstructure::Structure) -> TokenStream2 {
s.add_bounds(synstructure::AddBounds::None)
.underscore_const(true);

Expand All @@ -322,23 +298,10 @@ pub fn compressed_derive(mut s: synstructure::Structure) -> TokenStream2 {
let compressed_name = format_ident!("Compressed{}", name);

let mut g = s.ast().generics.clone();
let mut w_structure = g.where_clause.take();
let mut w_impl = w_structure.clone();
let w_structure = g.where_clause.take();
let w_impl = w_structure.clone();
for item in &s_attrs {
match item {
StructureAttrs::Bound(bound) => {
for p in bound {
let id = syn::Ident::new(p, Span::call_site());
where_clause_push(
&mut w_structure,
syn::parse_quote! { #id: ::fuel_compression::Compressible },
);
where_clause_push(
&mut w_impl,
syn::parse_quote! { for<'de> #id: ::fuel_compression::Compressible + serde::Serialize + serde::Deserialize<'de> + Clone },
);
}
}
StructureAttrs::Discard(discard) => {
g.params = g
.params
Expand Down
8 changes: 4 additions & 4 deletions fuel-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ mod deserialize;
mod serialize;

use self::{
compressed::compressed_derive,
compressed::compressible_by,
deserialize::deserialize_derive,
serialize::serialize_derive,
};
Expand All @@ -33,7 +33,7 @@ synstructure::decl_derive!(
serialize_derive
);
synstructure::decl_derive!(
[Compressed, attributes(da_compress)] =>
/// Derives `Compressed` trait for the given `struct` or `enum`.
compressed_derive
[CompressibleBy, attributes(compressible_by)] =>
/// Derives `Compressible` and `CompressibleBy` trait for the given `struct` or `enum`.
compressible_by
);
Loading

0 comments on commit af39dcf

Please sign in to comment.