Skip to content

Commit

Permalink
Add serialization of arrays
Browse files Browse the repository at this point in the history
Signed-off-by: delphi <[email protected]>
  • Loading branch information
asmfreak committed Mar 3, 2022
1 parent e4563eb commit 2e46cbb
Show file tree
Hide file tree
Showing 6 changed files with 382 additions and 122 deletions.
117 changes: 112 additions & 5 deletions src/nunavut/lang/cpp/support/serialization.j2
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,41 @@ using std::bitcast;

#endif // def __cpp_lib_bit_cast


#ifdef NUNAVUT_USE_SPAN_LITE
using nonstd::span;
#else

template<typename T>
class span{
T* ptr_;
{{ typename_unsigned_length }} size_;
public:
template<{{ typename_unsigned_length }} N>
span(std::array<T, N>& data): ptr_(data.data()), size_(data.size()){}
template<{{ typename_unsigned_length }} N>
span(const std::array<T, N>& data): ptr_(data.data()), size_(data.size()){}
template<{{ typename_unsigned_length }} N>
span(T (&data)[N]): ptr_(data), size_(N){}
span(T* ptr, {{ typename_unsigned_length }} size): ptr_(ptr), size_(size){}

T* data(){ return ptr_;}
T* data() const { return ptr_;}
{{ typename_unsigned_length }} size() const{ return size_; }
T& operator[]({{ typename_unsigned_length }} index){
{{ assert('index < size_') }}
return ptr_[index];
}
T& operator[]({{ typename_unsigned_length }} index) const {
{{ assert('index < size_') }}
return ptr_[index];
}
};

#endif
using bytespan = span<{{ typename_byte }}> ;
using const_bytespan = span<const {{ typename_byte }}> ;


#if span_FEATURE( MAKE_SPAN )
using nonstd::make_span;
#endif // span_FEATURE( MAKE_SPAN )
Expand All @@ -106,15 +137,91 @@ enum class Error{
REPRESENTATION_BAD_DELIMITER_HEADER=12
};

inline tl::unexpected<Error> operator-(const Error& e){
return tl::unexpected<Error>{e};
}

#ifdef NUNAVUT_USE_TL_EXPECTED
template<typename Err>
using unexpected = tl::unexpected<Err>;
template<typename Ret>
using Result = tl::expected<Ret, Error>;
#else
template<typename Err>
struct unexpected{
Err value;

explicit unexpected(Err e):value(e){}
};

/// This is a dumbed down version of C++23 std::expected, made better suited for
/// embedded applications. It never throws, but uses NUNAVUT_ASSERT to signal
/// exceptional cases.
/// All versions of Ret are expected to be non-throwing.
template<typename Ret>
class expected{
// We can use a maximum of all types.
using storage_t = typename std::aligned_storage<
(std::max(sizeof(Ret), sizeof(Error))),
(std::max(alignof(Ret), alignof(Error)))>::type;
storage_t storage;
bool is_expected_;
private:
Ret* ret_ptr() { return reinterpret_cast<Ret*>(&storage); }
const Ret* ret_ptr() const { return reinterpret_cast<const Ret*>(&storage); }
Error* error_ptr() { return reinterpret_cast<Error*>(&storage); }
const Error* error_ptr() const { return reinterpret_cast<const Error*>(&storage); }
public:
expected():is_expected_(true){ new(ret_ptr()) Ret(); }
expected(Ret r):is_expected_(true){ new(ret_ptr()) Ret(std::move(r)); }
expected& operator=(Ret other){ this->~expected(); return *new(this) expected(std::move(other)); }
expected(unexpected<Error> err):is_expected_(false){ new(error_ptr()) Error(std::move(err.value)); }
expected(const expected& other): is_expected_(other.is_expected_){
if(is_expected_){ new(ret_ptr()) Ret(*other.ret_ptr()); }
else { new(error_ptr()) Error(*other.error_ptr()); }
}
expected& operator=(const expected& other){
this->~expected(); return *new(this) expected(other);
}
~expected(){
if(is_expected_){ ret_ptr()->~Ret(); }
else{ error_ptr()->~Error(); }
}

Ret& value(){ {{ assert('is_expected_') }} return *ret_ptr(); }
const Ret& value() const { {{ assert('is_expected_') }} return *ret_ptr(); }
Ret& operator*(){ return value(); }
const Ret& operator*()const { return value(); }
Ret* operator->(){ {{ assert('is_expected_') }} return ret_ptr(); }
const Ret* operator->() const { {{ assert('is_expected_') }} return ret_ptr(); }
Error& error(){ {{ assert('not is_expected_') }} return *error_ptr(); }
const Error& error() const { {{ assert('not is_expected_') }} return *error_ptr(); }

bool has_value() const { return is_expected_; }
operator bool() const { return has_value(); }
};

template<>
class expected<void>{
using underlying_type = typename std::underlying_type<Error>::type;
underlying_type e;
public:
expected():e(0){}
expected(unexpected<Error> err):e(static_cast<underlying_type>(err.value)){ }
Error error() const { {{ assert('not has_value()') }} return static_cast<Error>(e); }

bool has_value() const { return e == 0; }
operator bool() const { return has_value(); }
};

template<typename Ret>
using Result = expected<Ret>;
#endif


using VoidResult = Result<void>;
using SerializeResult = Result<{{ typename_unsigned_length }}>;

inline unexpected<Error> operator-(const Error& e){
return unexpected<Error>{e};
}

namespace options
{
{% for key, value in options.items() -%}
Expand Down
68 changes: 66 additions & 2 deletions src/nunavut/lang/cpp/templates/deserialization.j2
Original file line number Diff line number Diff line change
Expand Up @@ -137,13 +137,77 @@

{# ----------------------------------------------------------------------------------------------------------------- #}
{% macro _deserialize_fixed_length_array(t, reference, offset) %}
(void)({{reference}});
{# SPECIAL CASE: PACKED BIT ARRAY #}
{#{% if t.element_type is BooleanType %}
nunavutGetBits(&{{ reference }}_bitpacked_[0], &buffer[0], capacity_bytes, offset_bits, {{ t.capacity }}UL);
offset_bits += {{ t.capacity }}UL;
#}
{# SPECIAL CASE: BYTES-LIKE ARRAY #}
{#{% elif t.element_type is PrimitiveType and t.element_type.bit_length == 8 and t.element_type is zero_cost_primitive %}
nunavutGetBits(&{{ reference }}[0], &buffer[0], capacity_bytes, offset_bits, {{ t.capacity }}UL * 8U);
offset_bits += {{ t.capacity }}UL * 8U;
#}
{# SPECIAL CASE: ZERO-COST PRIMITIVES #}
{#{% elif t.element_type is PrimitiveType and t.element_type is zero_cost_primitive %}
{% if t.element_type is FloatType %}
static_assert(NUNAVUT_PLATFORM_IEEE754_FLOAT, "Native IEEE754 binary32 required. TODO: relax constraint");
{% if t.element_type.bit_length > 32 %}
static_assert(NUNAVUT_PLATFORM_IEEE754_DOUBLE, "Native IEEE754 binary64 required. TODO: relax constraint");
{% endif %}
{% endif %}
nunavutGetBits(&{{ reference }}[0], &buffer[0], capacity_bytes, offset_bits, {# -#}
{# {{ t.capacity }}UL * {{ t.element_type.bit_length }}U);
offset_bits += {{ t.capacity }}UL * {{ t.element_type.bit_length }}U;
#}
{# GENERAL CASE #}
{#{% else %}#}
{# Element offset is the superposition of each individual element offset plus the array's own offset.
# For example, an array like uint8[3] offset by 16 bits would have its element_offset = {16, 24, 32}.
# We can also unroll element deserialization for small arrays (e.g., below ~10 elements) to take advantage of
# spurious alignment of elements but the benefit of such optimization is believed to be negligible. #}
{% set element_offset = offset + t.element_type.bit_length_set.repeat_range(t.capacity - 1) %}
{% set ref_index = 'index'|to_template_unique_name %}
for (size_t {{ ref_index }} = 0U; {{ ref_index }} < {{ t.capacity }}UL; ++{{ ref_index }})
{
{{ _deserialize_any(t.element_type, reference + ('[%s]'|format(ref_index)), element_offset)|trim|indent }}
}
{# Size cannot be checked here because if implicit zero extension rule is applied it won't match. #}
{#{% endif %}#}
{% endmacro %}


{# ----------------------------------------------------------------------------------------------------------------- #}
{% macro _deserialize_variable_length_array(t, reference, offset) %}
(void)({{reference}});
{
{# DESERIALIZE THE IMPLICIT ARRAY LENGTH FIELD #}
{% set ref_size = 'size'|to_template_unique_name %}
// Array length prefix: {{ t.length_field_type }}
{{ _deserialize_integer(t.length_field_type, ('const %s %s'|format((t.length_field_type | declaration), ref_size)) , offset) }}
if ( {{ ref_size}} > {{ t.capacity }}U)
{
return -nunavut::support::Error::REPRESENTATION_BAD_ARRAY_LENGTH;
}
{{ reference }}.resize({{ ref_size }});

{# COMPUTE THE ARRAY ELEMENT OFFSETS #}
{# NOTICE: The offset is no longer valid at this point because we just emitted the array length prefix. #}
{% set element_offset = offset + t.bit_length_set %}
{% set first_element_offset = offset + t.length_field_type.bit_length %}
{% assert (element_offset.min) == (first_element_offset.min) %}
{% if first_element_offset.is_aligned_at_byte() %}
{{ assert('in_buffer.offset_alings_to_byte()') }}
{% endif %}
{# GENERAL CASE #}
{% set ref_index = 'index'|to_template_unique_name %}
for (size_t {{ ref_index }} = 0U; {{ ref_index }} < {{ reference }}.size(); ++{{ ref_index }})
{
{{
_deserialize_any(t.element_type, reference + ('[%s]'|format(ref_index)), element_offset)
|trim|indent
}}
}

}
{% endmacro %}


Expand Down
86 changes: 83 additions & 3 deletions src/nunavut/lang/cpp/templates/serialization.j2
Original file line number Diff line number Diff line change
Expand Up @@ -226,13 +226,93 @@

{# ----------------------------------------------------------------------------------------------------------------- #}
{% macro _serialize_fixed_length_array(t, reference, offset) %}
(void)({{reference}});
{# SPECIAL CASE: PACKED BIT ARRAY #}
{#{% if t.element_type is BooleanType %}
{% if offset.is_aligned_at_byte() %}
// Optimization prospect: this item is aligned at the byte boundary, so it is possible to use memmove().
{% endif %}
nunavutCopyBits(&buffer[0], offset_bits, {{ t.capacity }}UL, &{{ reference }}_bitpacked_[0], 0U);
out_buffer.add_offset({{ t.capacity }}UL);
#}
{# SPECIAL CASE: BYTES-LIKE ARRAY #}
{#
{% elif t.element_type is PrimitiveType and t.element_type.bit_length == 8 and t.element_type is zero_cost_primitive %}
{% if offset.is_aligned_at_byte() %}
// Optimization prospect: this item is aligned at the byte boundary, so it is possible to use memmove().
{% endif %}
nunavutCopyBits(&buffer[0], offset_bits, {{ t.capacity }}UL * 8U, &{{ reference }}[0], 0U);
out_buffer.add_offset({{ t.capacity }}UL * 8U);
#}
{# SPECIAL CASE: ZERO-COST PRIMITIVES #}
{#
{% elif t.element_type is PrimitiveType and t.element_type is zero_cost_primitive %}
// Saturation code not emitted -- assume the native representation is conformant.
{% if t.element_type is FloatType %}
static_assert(NUNAVUT_PLATFORM_IEEE754_FLOAT, "Native IEEE754 binary32 required. TODO: relax constraint");
{% if t.element_type.bit_length > 32 %}
static_assert(NUNAVUT_PLATFORM_IEEE754_DOUBLE, "Native IEEE754 binary64 required. TODO: relax constraint");
{% endif %}
{% endif %}
{% if offset.is_aligned_at_byte() %}
// Optimization prospect: this item is aligned at the byte boundary, so it is possible to use memmove().
{% endif %}
nunavutCopyBits(&buffer[0], offset_bits, {{ t.capacity }}UL * {{ t.element_type.bit_length }}UL, {# -#}
{# &{{ reference }}[0], 0U);
out_buffer.add_offset({{ t.capacity }}UL * {{ t.element_type.bit_length }}UL);
#}
{# GENERAL CASE #}
{# {% else %} #}
{% set ref_origin_offset = 'origin'|to_template_unique_name %}
const {{ typename_unsigned_bit_length }} {{ ref_origin_offset }} = out_buffer.offset();
{# Element offset is the superposition of each individual element offset plus the array's own offset.
# For example, an array like uint8[3] offset by 16 bits would have its element_offset = {16, 24, 32}.
# We can also unroll element deserialization for small arrays (e.g., below ~10 elements) to take advantage of
# spurious alignment of elements but the benefit of such optimization is believed to be negligible. #}
{% set element_offset = offset + t.element_type.bit_length_set.repeat_range(t.capacity - 1) %}
{% set ref_index = 'index'|to_template_unique_name %}
for (size_t {{ ref_index }} = 0U; {{ ref_index }} < {{ t.capacity }}UL; ++{{ ref_index }})
{
{{ _serialize_any(t.element_type, reference + ('[%s]'|format(ref_index)), element_offset)|trim|indent }}
}
// It is assumed that we know the exact type of the serialized entity, hence we expect the size to match.
{% if not t.bit_length_set.fixed_length %}
{{ assert('(out_buffer.offset() - %s) >= %sULL'|format(ref_origin_offset, t.bit_length_set.min)) }}
{{ assert('(out_buffer.offset() - %s) <= %sULL'|format(ref_origin_offset, t.bit_length_set.max)) }}
{% else %}
{{ assert('(out_buffer.offset() - %s) == %sULL'|format(ref_origin_offset, t.bit_length_set.max)) }}
{% endif %}
(void) {{ ref_origin_offset }};
{# {% endif %} #}
{% endmacro %}


{# ----------------------------------------------------------------------------------------------------------------- #}
{% macro _serialize_variable_length_array(t, reference, offset) %}
(void)({{reference}});
if ({{ reference }}.size() > {{ t.capacity }})
{
return -nunavut::support::Error::REPRESENTATION_BAD_ARRAY_LENGTH;
}
// Array length prefix: {{ t.length_field_type }}
{{ _serialize_integer(t.length_field_type, reference + '.size()', offset) }}

{# COMPUTE THE ARRAY ELEMENT OFFSETS #}
{# NOTICE: The offset is no longer valid at this point because we just emitted the array length prefix. #}
{% set element_offset = offset + t.bit_length_set %}
{% set first_element_offset = offset + t.length_field_type.bit_length %}
{% assert (element_offset.min) == (first_element_offset.min) %}
{% if first_element_offset.is_aligned_at_byte() %}
{{ assert('out_buffer.offset_alings_to_byte()') }}
{% endif %}

{# GENERAL CASE #}
{% set ref_index = 'index'|to_template_unique_name %}
for (size_t {{ ref_index }} = 0U; {{ ref_index }} < {{ reference }}.size(); ++{{ ref_index }})
{
{{
_serialize_any(t.element_type, reference + ('[%s]'|format(ref_index)), element_offset)
|trim|indent
}}
}
{% endmacro %}


Expand Down Expand Up @@ -281,5 +361,5 @@
{% endif %}

out_buffer.add_offset({{ ref_size_bytes }} * 8U);
{{ assert('out_buffer.size() >= 0') }}
// {{ assert('out_buffer.size() >= 0') }}
{% endmacro %}
Loading

0 comments on commit 2e46cbb

Please sign in to comment.