-
Notifications
You must be signed in to change notification settings - Fork 574
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Chore: Modernize copy_out_be/le #3885
Chore: Modernize copy_out_be/le #3885
Conversation
00f3d42
to
abb2b14
Compare
21260ca
to
c6495cd
Compare
Rebased after #3888. |
c6495cd
to
7ae6f31
Compare
The copy changes look ok. Please revert/split out the aes changes - I see what you’re doing there and it makes sense but IMO should be handled with some dedicated helpers that pair the BufferStuffer and BufferSplitter. |
Co-Authored-By: Fabian Albert <[email protected]>
7ae6f31
to
a88cc06
Compare
Co-Authored-By: Fabian Albert <[email protected]>
a88cc06
to
bd94e8e
Compare
Sure. Makes sense not to mix concerns here. I reverted insofar that it now just blindly wraps spans around the raw pointers and calls the new overload. Below is the removed diff for later reference: diff --git a/src/lib/block/aes/aes.cpp b/src/lib/block/aes/aes.cpp
index 9f110892b..909d320c5 100644
--- a/src/lib/block/aes/aes.cpp
+++ b/src/lib/block/aes/aes.cpp
@@ -11,6 +11,9 @@
#include <botan/internal/ct_utils.h>
#include <botan/internal/loadstor.h>
#include <botan/internal/rotate.h>
+#include <botan/internal/stl_util.h>
+
+#include <span>
namespace Botan {
@@ -501,8 +504,12 @@ void inv_mix_columns(uint32_t B[8]) {
/*
* AES Encryption
*/
-void aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks, const secure_vector<uint32_t>& EK) {
+void aes_encrypt_n(std::span<const uint8_t> in, std::span<uint8_t> out, const secure_vector<uint32_t>& EK) {
+ constexpr size_t BLOCK_SIZE = 16;
+ constexpr size_t BITSLICED_BLOCK_BYTES = 2 * BLOCK_SIZE;
+
BOTAN_ASSERT(EK.size() == 44 || EK.size() == 52 || EK.size() == 60, "Key was set");
+ BOTAN_ASSERT_NOMSG(in.size() == out.size() && in.size() % BLOCK_SIZE == 0);
const size_t rounds = (EK.size() - 4) / 4;
@@ -511,15 +518,16 @@ void aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks, const secur
ks_expand(&KS[8 * i], EK.data(), 4 * i + 4);
}
- const size_t BLOCK_SIZE = 16;
- const size_t BITSLICED_BLOCKS = 8 * sizeof(uint32_t) / BLOCK_SIZE;
+ BufferSlicer in_bs(in);
+ BufferStuffer out_bs(out);
- while(blocks > 0) {
- const size_t this_loop = std::min(blocks, BITSLICED_BLOCKS);
+ while(!in_bs.empty()) {
+ const size_t bytes_this_loop = std::min(in_bs.remaining(), BITSLICED_BLOCK_BYTES);
- uint32_t B[8] = {0};
+ uint32_t B[8] = {};
+ std::span<uint32_t> B_this_loop(B, bytes_this_loop / sizeof(uint32_t));
- load_be(B, in, this_loop * 4);
+ load_be(B_this_loop, in_bs.take(bytes_this_loop));
CT::poison(B, 8);
@@ -550,19 +558,22 @@ void aes_encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks, const secur
CT::unpoison(B, 8);
- copy_out_be(std::span(out, this_loop * 4 * sizeof(uint32_t)), B);
-
- in += this_loop * BLOCK_SIZE;
- out += this_loop * BLOCK_SIZE;
- blocks -= this_loop;
+ store_be(out_bs.next(bytes_this_loop), B_this_loop);
}
+
+ BOTAN_ASSERT_NOMSG(in_bs.empty());
+ BOTAN_ASSERT_NOMSG(out_bs.full());
}
/*
* AES Decryption
*/
-void aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks, const secure_vector<uint32_t>& DK) {
+void aes_decrypt_n(std::span<const uint8_t> in, std::span<uint8_t> out, const secure_vector<uint32_t>& DK) {
+ constexpr size_t BLOCK_SIZE = 16;
+ constexpr size_t BITSLICED_BLOCK_BYTES = 2 * BLOCK_SIZE;
+
BOTAN_ASSERT(DK.size() == 44 || DK.size() == 52 || DK.size() == 60, "Key was set");
+ BOTAN_ASSERT_NOMSG(in.size() == out.size() && in.size() % BLOCK_SIZE == 0);
const size_t rounds = (DK.size() - 4) / 4;
@@ -571,17 +582,18 @@ void aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks, const secur
ks_expand(&KS[8 * i], DK.data(), 4 * i + 4);
}
- const size_t BLOCK_SIZE = 16;
- const size_t BITSLICED_BLOCKS = 8 * sizeof(uint32_t) / BLOCK_SIZE;
+ BufferSlicer in_bs(in);
+ BufferStuffer out_bs(out);
- while(blocks > 0) {
- const size_t this_loop = std::min(blocks, BITSLICED_BLOCKS);
+ while(!in_bs.empty()) {
+ const size_t bytes_this_loop = std::min(in_bs.remaining(), BITSLICED_BLOCK_BYTES);
- uint32_t B[8] = {0};
+ uint32_t B[8] = {};
+ std::span<uint32_t> B_this_loop(B, bytes_this_loop / sizeof(uint32_t));
CT::poison(B, 8);
- load_be(B, in, this_loop * 4);
+ load_be(B_this_loop, in_bs.take(bytes_this_loop));
for(size_t i = 0; i != 8; ++i) {
B[i] ^= DK[i % 4];
@@ -610,12 +622,11 @@ void aes_decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks, const secur
CT::unpoison(B, 8);
- copy_out_be(std::span(out, this_loop * 4 * sizeof(uint32_t)), B);
-
- in += this_loop * BLOCK_SIZE;
- out += this_loop * BLOCK_SIZE;
- blocks -= this_loop;
+ store_be(out_bs.next(bytes_this_loop), B_this_loop);
}
+
+ BOTAN_ASSERT_NOMSG(in_bs.empty());
+ BOTAN_ASSERT_NOMSG(out_bs.full());
}
inline uint32_t xtime32(uint32_t s) {
@@ -824,7 +835,8 @@ void AES_128::encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
}
#endif
- aes_encrypt_n(in, out, blocks, m_EK);
+ // TODO: Adapt once we use std::span<> for BlockCipher::encrypt_n()
+ aes_encrypt_n(std::span<const uint8_t>(in, blocks * BLOCK_SIZE), std::span<uint8_t>(out, blocks * BLOCK_SIZE), m_EK);
}
void AES_128::decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
@@ -842,7 +854,8 @@ void AES_128::decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
}
#endif
- aes_decrypt_n(in, out, blocks, m_DK);
+ // TODO: Adapt once we use std::span<> for BlockCipher::decrypt_n()
+ aes_decrypt_n(std::span<const uint8_t>(in, blocks * BLOCK_SIZE), std::span<uint8_t>(out, blocks * BLOCK_SIZE), m_DK);
}
void AES_128::key_schedule(std::span<const uint8_t> key) {
@@ -887,7 +900,8 @@ void AES_192::encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
}
#endif
- aes_encrypt_n(in, out, blocks, m_EK);
+ // TODO: Adapt once we use std::span<> for BlockCipher::encrypt_n()
+ aes_encrypt_n(std::span(in, blocks * BLOCK_SIZE), std::span(out, blocks * BLOCK_SIZE), m_EK);
}
void AES_192::decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
@@ -905,7 +919,8 @@ void AES_192::decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
}
#endif
- aes_decrypt_n(in, out, blocks, m_DK);
+ // TODO: Adapt once we use std::span<> for BlockCipher::decrypt_n()
+ aes_decrypt_n(std::span<const uint8_t>(in, blocks * BLOCK_SIZE), std::span<uint8_t>(out, blocks * BLOCK_SIZE), m_DK);
}
void AES_192::key_schedule(std::span<const uint8_t> key) {
@@ -950,7 +965,8 @@ void AES_256::encrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
}
#endif
- aes_encrypt_n(in, out, blocks, m_EK);
+ // TODO: Adapt once we use std::span<> for BlockCipher::encrypt_n()
+ aes_encrypt_n(std::span(in, blocks * BLOCK_SIZE), std::span(out, blocks * BLOCK_SIZE), m_EK);
}
void AES_256::decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const {
@@ -968,7 +984,8 @@ void AES_256::decrypt_n(const uint8_t in[], uint8_t out[], size_t blocks) const
}
#endif
- aes_decrypt_n(in, out, blocks, m_DK);
+ // TODO: Adapt once we use std::span<> for BlockCipher::decrypt_n()
+ aes_decrypt_n(std::span<const uint8_t>(in, blocks * BLOCK_SIZE), std::span<uint8_t>(out, blocks * BLOCK_SIZE), m_DK);
}
void AES_256::key_schedule(std::span<const uint8_t> key) { |
Pull Request Dependencies
Description
This removes the legacy
copy_out_{be/le}
helpers inloadstor.h
. We replaced them with more versatile overloads ofload_*
andstore_*
that can handle collections of unsigned integers.Here's the general idea of the helpers that can take collections:
also added tests for the new implementations of
copy_out_x