diff --git a/pw_tokenizer/BUILD.bazel b/pw_tokenizer/BUILD.bazel index b776067acc..26ce5dcbbb 100644 --- a/pw_tokenizer/BUILD.bazel +++ b/pw_tokenizer/BUILD.bazel @@ -212,6 +212,15 @@ pw_cc_fuzz_test( ], ) +pw_cc_test( + name = "encode_args_test", + srcs = ["encode_args_test.cc"], + deps = [ + ":pw_tokenizer", + "//pw_unit_test", + ], +) + pw_cc_test( name = "hash_test", srcs = [ diff --git a/pw_tokenizer/BUILD.gn b/pw_tokenizer/BUILD.gn index abc73bb0af..99ddc5b8a6 100644 --- a/pw_tokenizer/BUILD.gn +++ b/pw_tokenizer/BUILD.gn @@ -171,6 +171,7 @@ pw_test_group("tests") { ":decode_test", ":detokenize_fuzzer", ":detokenize_test", + ":encode_args_test", ":hash_test", ":simple_tokenize_test", ":token_database_fuzzer", @@ -226,6 +227,11 @@ pw_test("detokenize_test") { enable_if = pw_build_EXECUTABLE_TARGET_TYPE != "arduino_executable" } +pw_test("encode_args_test") { + sources = [ "encode_args_test.cc" ] + deps = [ ":pw_tokenizer" ] +} + pw_test("hash_test") { sources = [ "hash_test.cc", diff --git a/pw_tokenizer/CMakeLists.txt b/pw_tokenizer/CMakeLists.txt index b3d52bd883..bbb21e8c6b 100644 --- a/pw_tokenizer/CMakeLists.txt +++ b/pw_tokenizer/CMakeLists.txt @@ -169,6 +169,16 @@ pw_add_test(pw_tokenizer.detokenize_test pw_tokenizer ) +pw_add_test(pw_tokenizer.encode_args_test + SOURCES + encode_args_test.cc + PRIVATE_DEPS + pw_tokenizer + GROUPS + modules + pw_tokenizer +) + pw_add_test(pw_tokenizer.hash_test SOURCES hash_test.cc diff --git a/pw_tokenizer/docs.rst b/pw_tokenizer/docs.rst index 8f45c1290f..62ea898be9 100644 --- a/pw_tokenizer/docs.rst +++ b/pw_tokenizer/docs.rst @@ -424,6 +424,10 @@ Arguments are encoded as follows: arguments short or avoid encoding them as strings (e.g. encode an enum as an integer instead of a string). See also `Tokenized strings as %s arguments`_. +Buffer sizing helper +-------------------- +.. doxygenfunction:: pw::tokenizer::MinEncodingBufferSizeBytes + Encoding command line utility ----------------------------- The ``pw_tokenizer.encode`` command line tool can be used to encode tokenized diff --git a/pw_tokenizer/encode_args_test.cc b/pw_tokenizer/encode_args_test.cc new file mode 100644 index 0000000000..131f27b9ad --- /dev/null +++ b/pw_tokenizer/encode_args_test.cc @@ -0,0 +1,42 @@ +// Copyright 2022 The Pigweed Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may not +// use this file except in compliance with the License. You may obtain a copy of +// the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations under +// the License. + +#include "pw_tokenizer/encode_args.h" + +#include "gtest/gtest.h" + +namespace pw { +namespace tokenizer { + +static_assert(MinEncodingBufferSizeBytes<>() == 4); +static_assert(MinEncodingBufferSizeBytes() == 4 + 2); +static_assert(MinEncodingBufferSizeBytes() == 4 + 2); +static_assert(MinEncodingBufferSizeBytes() == 4 + 3); +static_assert(MinEncodingBufferSizeBytes() == 4 + 5); +static_assert(MinEncodingBufferSizeBytes() == 4 + 10); +static_assert(MinEncodingBufferSizeBytes() == 4 + 4); +static_assert(MinEncodingBufferSizeBytes() == 4 + 4); +static_assert(MinEncodingBufferSizeBytes() == 4 + 1); +static_assert(MinEncodingBufferSizeBytes() == 4 + 5 || + MinEncodingBufferSizeBytes() == 4 + 10); + +static_assert(MinEncodingBufferSizeBytes() == 4 + 5 + 4); +static_assert(MinEncodingBufferSizeBytes() == + 4 + 5 + 5 + 1); +static_assert( + MinEncodingBufferSizeBytes() == + 4 + 1 + 10 + 5 + 3); + +} // namespace tokenizer +} // namespace pw diff --git a/pw_tokenizer/public/pw_tokenizer/encode_args.h b/pw_tokenizer/public/pw_tokenizer/encode_args.h index 43124e51fe..07e6e31921 100644 --- a/pw_tokenizer/public/pw_tokenizer/encode_args.h +++ b/pw_tokenizer/public/pw_tokenizer/encode_args.h @@ -25,12 +25,52 @@ #include +#include "pw_polyfill/standard.h" #include "pw_span/span.h" #include "pw_tokenizer/config.h" #include "pw_tokenizer/tokenize.h" -namespace pw { -namespace tokenizer { +namespace pw::tokenizer { +namespace internal { + +// Returns the maximum encoded size of an argument of the specified type. +template +constexpr size_t ArgEncodedSizeBytes() { + constexpr pw_tokenizer_ArgTypes kType = VarargsType(); + if constexpr (kType == PW_TOKENIZER_ARG_TYPE_DOUBLE) { + return sizeof(float); + } else if constexpr (kType == PW_TOKENIZER_ARG_TYPE_STRING) { + return 1; // Size of the length byte only + } else if constexpr (kType == PW_TOKENIZER_ARG_TYPE_INT64) { + return 10; // Max size of a varint-encoded 64-bit integer + } else if constexpr (kType == PW_TOKENIZER_ARG_TYPE_INT) { + return sizeof(T) + 1; // Max size of zig-zag varint integer <= 32-bits + } else { + static_assert(sizeof(T) != sizeof(T), "Unsupported argument type"); + } +} + +} // namespace internal + +/// Calculates the minimum buffer size to allocate that is guaranteed to support +/// encoding the specified arguments. +/// +/// The contents of strings are NOT included in this total. The string's +/// length/status byte is guaranteed to fit, but the string contents may be +/// truncated. Encoding is considered to succeed as long as the string's +/// length/status byte is written, even if the actual string is truncated. +/// +/// Examples: +/// +/// - Message with no arguments: +/// `MinEncodingBufferSizeBytes() == 4` +/// - Message with an int argument +/// `MinEncodingBufferSizeBytes() == 9 (4 + 5)` +template +constexpr size_t MinEncodingBufferSizeBytes() { + return (sizeof(pw_tokenizer_Token) + ... + + internal::ArgEncodedSizeBytes()); +} /// Encodes a tokenized string's arguments to a buffer. The /// @cpp_type{pw_tokenizer_ArgTypes} parameter specifies the argument types, in @@ -97,8 +137,7 @@ class EncodedMessage { size_t size_; }; -} // namespace tokenizer -} // namespace pw +} // namespace pw::tokenizer #endif // PW_CXX_STANDARD_IS_SUPPORTED(17)