36template <
typename Builder>
40 auto block_data_copy = block_data;
47 for (
size_t i = 0; i < 16; ++i) {
48 sparse_bytes[15 - i] = lookup[ColumnIdx::C2][i];
58 for (
size_t i = 0; i < 16; ++i) {
59 uint64_t sparse_byte =
uint256_t(sparse_bytes[i].get_value()).
data[0];
60 uint256_t byte = numeric::map_from_sparse_form<AES128_BASE>(sparse_byte);
62 accumulator += (byte);
69 for (
size_t i = 0; i < 16; ++i) {
70 sparse_bytes[15 - i].
assert_equal(lookup[ColumnIdx::C2][i]);
106template <
typename Builder>
113 0x20, 0x40, 0x80, 0x1b, 0x36 };
114 const auto sparse_round_constants = [&]() {
116 for (
size_t i = 0; i < 11; ++i) {
117 result[i] =
field_t<Builder>(ctx,
fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[i])));
134 for (
size_t i = 0; i < 16; ++i) {
135 round_key[i] = sparse_key[i];
139 for (
size_t i = 4; i < 44; ++i) {
140 size_t k = (i - 1) * 4;
142 temp_add_counts[0] = add_counts[k + 0];
143 temp_add_counts[1] = add_counts[k + 1];
144 temp_add_counts[2] = add_counts[k + 2];
145 temp_add_counts[3] = add_counts[k + 3];
147 temp[0] = round_key[k];
148 temp[1] = round_key[k + 1];
149 temp[2] = round_key[k + 2];
150 temp[3] = round_key[k + 3];
153 if ((i & 0x03) == 0) {
155 const auto t = temp[0];
169 temp[0] = temp[0] + sparse_round_constants[i >> 2];
170 ++temp_add_counts[0];
177 round_key[j] = round_key[k] + temp[0];
178 round_key[j + 1] = round_key[k + 1] + temp[1];
179 round_key[j + 2] = round_key[k + 2] + temp[2];
180 round_key[j + 3] = round_key[k + 3] + temp[3];
182 add_counts[j] = add_counts[k] + temp_add_counts[0];
183 add_counts[j + 1] = add_counts[k + 1] + temp_add_counts[1];
184 add_counts[j + 2] = add_counts[k + 2] + temp_add_counts[2];
185 add_counts[j + 3] = add_counts[k + 3] + temp_add_counts[3];
188 constexpr uint64_t target = 3;
189 for (
size_t k = 0; k < 4; ++k) {
192 size_t byte_index = j + k;
193 if (add_counts[byte_index] > target || (add_counts[byte_index] > 1 && (byte_index & 12) == 12)) {
196 add_counts[byte_index] = 1;
216 state[9] = state[13];
220 state[2] = state[10];
223 state[6] = state[14];
227 state[3] = state[15];
228 state[15] = state[11];
229 state[11] = state[7];
262template <
typename Builder>
267 auto t0 = column_pairs[0].first.add_two(column_pairs[3].first, column_pairs[1].second);
269 auto t1 = column_pairs[1].first.add_two(column_pairs[2].first, column_pairs[3].second);
272 auto r0 = t0.add_two(column_pairs[2].first, column_pairs[0].second);
274 auto r1 = t0.add_two(column_pairs[1].first, column_pairs[2].second);
276 auto r2 = t1.add_two(column_pairs[0].first, column_pairs[2].second);
278 auto r3 = t1.add_two(column_pairs[0].second, column_pairs[3].first);
281 column_pairs[0].first = r0 + round_key[(round * 16U)];
282 column_pairs[1].first = r1 + round_key[(round * 16U) + 1];
283 column_pairs[2].first = r2 + round_key[(round * 16U) + 2];
284 column_pairs[3].first = r3 + round_key[(round * 16U) + 3];
287template <
typename Builder>
298 for (
size_t i = 0; i < 16; ++i) {
303template <
typename Builder>
306 for (
size_t i = 0; i < 16; i += 4) {
307 for (
size_t j = 0; j < 4; ++j) {
308 sparse_state[i + j].first += sparse_round_key[(round * 16U) + i + j];
315 for (
size_t i = 0; i < 16; ++i) {
316 state[i].first += iv[i];
320template <
typename Builder>
324 for (
size_t i = 0; i < 16; ++i) {
328 for (
size_t round = 1; round < 10; ++round) {
332 for (
size_t i = 0; i < 16; ++i) {
342template <
typename Builder>
349 for (
const auto& input_block : input) {
350 if (!input_block.is_constant()) {
351 all_constants =
false;
359 std::vector<uint8_t> key_bytes(16);
360 std::vector<uint8_t> iv_bytes(16);
361 std::vector<uint8_t> input_bytes(input.size() * 16);
365 for (
size_t i = 0; i < 16; ++i) {
366 key_bytes[15 - i] =
static_cast<uint8_t
>((key_value >> (i * 8)) & 0xFF);
371 for (
size_t i = 0; i < 16; ++i) {
372 iv_bytes[15 - i] =
static_cast<uint8_t
>((iv_value >> (i * 8)) & 0xFF);
376 for (
size_t block_idx = 0; block_idx < input.size(); ++block_idx) {
377 uint256_t block_value = input[block_idx].get_value();
378 for (
size_t i = 0; i < 16; ++i) {
379 input_bytes[block_idx * 16 + 15 - i] =
static_cast<uint8_t
>((block_value >> (i * 8)) & 0xFF);
387 for (
size_t block_idx = 0; block_idx < input.size(); ++block_idx) {
389 for (
size_t i = 0; i < 16; ++i) {
391 result_value += input_bytes[block_idx * 16 + i];
401 if (!
key.is_constant()) {
402 ctx =
key.get_context();
406 for (
const auto& input_block : input) {
407 if (!input_block.is_constant()) {
408 ctx = input_block.get_context();
418 const size_t num_blocks = input.size();
421 for (
size_t i = 0; i < num_blocks; ++i) {
423 for (
const auto&
byte : bytes) {
424 sparse_state.push_back({ byte,
field_t(ctx,
fr(0)) });
430 for (
size_t i = 0; i < num_blocks; ++i) {
435 for (
size_t j = 0; j < 16; ++j) {
436 sparse_iv[j] = round_state[j].first;
441 for (
auto&
element : sparse_state) {
446 for (
size_t i = 0; i < num_blocks; ++i) {
451#define INSTANTIATE_ENCRYPT_BUFFER_CBC(Builder) \
452 template std::vector<field_t<Builder>> encrypt_buffer_cbc<Builder>( \
453 const std::vector<field_t<Builder>>&, const field_t<Builder>&, const field_t<Builder>&)
#define BB_ASSERT(expression,...)
void assert_equal(const field_t &rhs, std::string const &msg="field_t::assert_equal") const
Copy constraint: constrain that *this field is equal to rhs element.
Builder * get_context() const
bb::fr get_value() const
Given a := *this, compute its value given by a.v * a.mul + a.add.
void convert_constant_to_fixed_witness(Builder *ctx)
void aes128_encrypt_buffer_cbc(uint8_t *buffer, uint8_t *iv, const uint8_t *key, const size_t length)
byte_pair< Builder > apply_aes_sbox_map(Builder *, field_t< Builder > &input)
void mix_columns_and_add_round_key(byte_pair< Builder > *state_pairs, field_t< Builder > *round_key, uint64_t round)
field_t< Builder > normalize_sparse_form(Builder *, field_t< Builder > &byte)
void xor_with_iv(byte_pair< Builder > *state, field_t< Builder > *iv)
constexpr uint32_t AES128_BASE
std::array< field_t< Builder >, 16 > convert_into_sparse_bytes(Builder *ctx, const field_t< Builder > &block_data)
void sub_bytes(Builder *ctx, byte_pair< Builder > *state_pairs)
std::pair< field_t< Builder >, field_t< Builder > > byte_pair
std::array< field_t< Builder >, EXTENDED_KEY_LENGTH > expand_key(Builder *ctx, const field_t< Builder > &key)
Expands a 128-bit AES key into the full key schedule (EXTENDED_KEY_LENGTH bytes / 11 round keys).
constexpr size_t EXTENDED_KEY_LENGTH
void add_round_key(byte_pair< Builder > *sparse_state, field_t< Builder > *sparse_round_key, uint64_t round)
void aes128_cipher(Builder *ctx, byte_pair< Builder > *state, field_t< Builder > *sparse_round_key)
void shift_rows(byte_pair< Builder > *state)
The SHIFTROW() operation as in FIPS 197, Section 5.1.2.
field_t< Builder > convert_from_sparse_bytes(Builder *ctx, field_t< Builder > *sparse_bytes)
std::vector< field_t< Builder > > encrypt_buffer_cbc(const std::vector< field_t< Builder > > &input, const field_t< Builder > &iv, const field_t< Builder > &key)
void mix_column_and_add_round_key(byte_pair< Builder > *column_pairs, field_t< Builder > *round_key, uint64_t round)
Performs MixColumns on a single column and adds the round key (FIPS 197, Sections 5....
std::conditional_t< IsGoblinBigGroup< C, Fq, Fr, G >, element_goblin::goblin_element< C, goblin_field< C >, Fr, G >, element_default::element< C, Fq, Fr, G > > element
element wraps either element_default::element or element_goblin::goblin_element depending on parametr...
field< Bn254FrParams > fr
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
#define INSTANTIATE_ENCRYPT_BUFFER_CBC(Builder)