Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
aes128.cpp
Go to the documentation of this file.
1// === AUDIT STATUS ===
2// internal: { status: not started, auditors: [], date: YYYY-MM-DD }
3// external_1: { status: not started, auditors: [], date: YYYY-MM-DD }
4// external_2: { status: not started, auditors: [], date: YYYY-MM-DD }
5// =====================
6
7#include "./aes128.hpp"
8
12
15
16using namespace bb::crypto;
17
19template <typename Builder> using byte_pair = std::pair<field_t<Builder>, field_t<Builder>>;
20using namespace bb::plookup;
21
22constexpr uint32_t AES128_BASE = 9;
23
25{
27 return result;
28}
29
34
35template <typename Builder>
37{
38 std::array<field_t<Builder>, 16> sparse_bytes;
39 auto block_data_copy = block_data;
40 if (block_data.is_constant()) {
41 // The algorithm expects that the sparse bytes are witnesses, so the block_data_copy must be a witness
42 block_data_copy.convert_constant_to_fixed_witness(ctx);
43 }
44 // Existing lookup logic
45 auto lookup = plookup_read<Builder>::get_lookup_accumulators(AES_INPUT, block_data_copy);
46 for (size_t i = 0; i < 16; ++i) {
47 sparse_bytes[15 - i] = lookup[ColumnIdx::C2][i];
48 }
49 return sparse_bytes;
50}
51
52template <typename Builder> field_t<Builder> convert_from_sparse_bytes(Builder* ctx, field_t<Builder>* sparse_bytes)
53{
55
56 uint256_t accumulator = 0;
57 for (size_t i = 0; i < 16; ++i) {
58 uint64_t sparse_byte = uint256_t(sparse_bytes[i].get_value()).data[0];
59 uint256_t byte = numeric::map_from_sparse_form<AES128_BASE>(sparse_byte);
60 accumulator <<= 8;
61 accumulator += (byte);
62 }
63
64 field_t<Builder> result = witness_t(ctx, fr(accumulator));
65
67
68 for (size_t i = 0; i < 16; ++i) {
69 sparse_bytes[15 - i].assert_equal(lookup[ColumnIdx::C2][i]);
70 }
71
72 return result;
73}
74
75template <typename Builder> std::array<field_t<Builder>, 176> expand_key(Builder* ctx, const field_t<Builder>& key)
76{
77 constexpr uint8_t round_constants[11] = { 0x8d, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36 };
78 std::array<field_t<Builder>, 11> sparse_round_constants{
79 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[0]))),
80 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[1]))),
81 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[2]))),
82 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[3]))),
83 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[4]))),
84 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[5]))),
85 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[6]))),
86 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[7]))),
87 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[8]))),
88 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[9]))),
89 field_t(ctx, fr(numeric::map_into_sparse_form<AES128_BASE>(round_constants[10]))),
90 };
91
92 std::array<field_t<Builder>, 176> round_key{};
93 const auto sparse_key = convert_into_sparse_bytes(ctx, key);
94
95 field_t<Builder> temp[4]{};
96 uint64_t temp_add_counts[4]{};
97 uint64_t add_counts[176]{};
98 for (size_t i = 0; i < 176; ++i) {
99 add_counts[i] = 1;
100 }
101 for (size_t i = 0; i < 16; ++i) {
102 round_key[i] = sparse_key[i];
103 }
104
105 for (size_t i = 4; i < 44; ++i) {
106 size_t k = (i - 1) * 4;
107
108 temp_add_counts[0] = add_counts[k + 0];
109 temp_add_counts[1] = add_counts[k + 1];
110 temp_add_counts[2] = add_counts[k + 2];
111 temp_add_counts[3] = add_counts[k + 3];
112
113 temp[0] = round_key[k];
114 temp[1] = round_key[k + 1];
115 temp[2] = round_key[k + 2];
116 temp[3] = round_key[k + 3];
117
118 if ((i & 0x03) == 0) {
119 const auto t = temp[0];
120 temp[0] = temp[1];
121 temp[1] = temp[2];
122 temp[2] = temp[3];
123 temp[3] = t;
124
125 temp[0] = apply_aes_sbox_map(ctx, temp[0]).first;
126 temp[1] = apply_aes_sbox_map(ctx, temp[1]).first;
127 temp[2] = apply_aes_sbox_map(ctx, temp[2]).first;
128 temp[3] = apply_aes_sbox_map(ctx, temp[3]).first;
129
130 temp[0] = temp[0] + sparse_round_constants[i >> 2];
131 temp[0] = temp[0];
132 ++temp_add_counts[0];
133 }
134
135 size_t j = i * 4;
136 k = (i - 4) * 4;
137 round_key[j] = round_key[k] + temp[0];
138 round_key[j + 1] = round_key[k + 1] + temp[1];
139 round_key[j + 2] = round_key[k + 2] + temp[2];
140 round_key[j + 3] = round_key[k + 3] + temp[3];
141
142 add_counts[j] = add_counts[k] + temp_add_counts[0];
143 add_counts[j + 1] = add_counts[k + 1] + temp_add_counts[1];
144 add_counts[j + 2] = add_counts[k + 2] + temp_add_counts[2];
145 add_counts[j + 3] = add_counts[k + 3] + temp_add_counts[3];
146
147 constexpr uint64_t target = 3;
148 if (add_counts[j] > target || (add_counts[j] > 1 && (j & 12) == 12)) {
149 round_key[j] = normalize_sparse_form(ctx, round_key[j]);
150 add_counts[j] = 1;
151 }
152 if (add_counts[j + 1] > target || (add_counts[j + 1] > 1 && ((j + 1) & 12) == 12)) {
153 round_key[j + 1] = normalize_sparse_form(ctx, round_key[j + 1]);
154 add_counts[j + 1] = 1;
155 }
156 if (add_counts[j + 2] > target || (add_counts[j + 2] > 1 && ((j + 2) & 12) == 12)) {
157 round_key[j + 2] = normalize_sparse_form(ctx, round_key[j + 2]);
158 add_counts[j + 2] = 1;
159 }
160 if (add_counts[j + 3] > target || (add_counts[j + 3] > 1 && ((j + 3) & 12) == 12)) {
161 round_key[j + 3] = normalize_sparse_form(ctx, round_key[j + 3]);
162 add_counts[j + 3] = 1;
163 }
164 }
165
166 return round_key;
167}
168
169template <typename Builder> void shift_rows(byte_pair<Builder>* state)
170{
171 byte_pair<Builder> temp = state[1];
172 state[1] = state[5];
173 state[5] = state[9];
174 state[9] = state[13];
175 state[13] = temp;
176
177 temp = state[2];
178 state[2] = state[10];
179 state[10] = temp;
180 temp = state[6];
181 state[6] = state[14];
182 state[14] = temp;
183
184 temp = state[3];
185 state[3] = state[15];
186 state[15] = state[11];
187 state[11] = state[7];
188 state[7] = temp;
189}
190
191template <typename Builder>
192void mix_column_and_add_round_key(byte_pair<Builder>* column_pairs, field_t<Builder>* round_key, uint64_t round)
193{
194
195 auto t0 = column_pairs[0].first.add_two(column_pairs[3].first, column_pairs[1].second);
196 auto t1 = column_pairs[1].first.add_two(column_pairs[2].first, column_pairs[3].second);
197
198 auto r0 = t0.add_two(column_pairs[2].first, column_pairs[0].second);
199 auto r1 = t0.add_two(column_pairs[1].first, column_pairs[2].second);
200 auto r2 = t1.add_two(column_pairs[0].first, column_pairs[2].second);
201 auto r3 = t1.add_two(column_pairs[0].second, column_pairs[3].first);
202
203 column_pairs[0].first = r0 + round_key[(round * 16U)];
204 column_pairs[1].first = r1 + round_key[(round * 16U) + 1];
205 column_pairs[2].first = r2 + round_key[(round * 16U) + 2];
206 column_pairs[3].first = r3 + round_key[(round * 16U) + 3];
207}
208
209template <typename Builder>
210void mix_columns_and_add_round_key(byte_pair<Builder>* state_pairs, field_t<Builder>* round_key, uint64_t round)
211{
212 mix_column_and_add_round_key(state_pairs, round_key, round);
213 mix_column_and_add_round_key(state_pairs + 4, round_key + 4, round);
214 mix_column_and_add_round_key(state_pairs + 8, round_key + 8, round);
215 mix_column_and_add_round_key(state_pairs + 12, round_key + 12, round);
216}
217
218template <typename Builder> void sub_bytes(Builder* ctx, byte_pair<Builder>* state_pairs)
219{
220 for (size_t i = 0; i < 16; ++i) {
221 state_pairs[i] = apply_aes_sbox_map(ctx, state_pairs[i].first);
222 }
223}
224
225template <typename Builder>
226void add_round_key(byte_pair<Builder>* sparse_state, field_t<Builder>* sparse_round_key, uint64_t round)
227{
228 for (size_t i = 0; i < 16; i += 4) {
229 for (size_t j = 0; j < 4; ++j) {
230 sparse_state[i + j].first += sparse_round_key[(round * 16U) + i + j];
231 }
232 }
233}
234
235template <typename Builder> void xor_with_iv(byte_pair<Builder>* state, field_t<Builder>* iv)
236{
237 for (size_t i = 0; i < 16; ++i) {
238 state[i].first += iv[i];
239 }
240}
241
242template <typename Builder>
243void aes128_cipher(Builder* ctx, byte_pair<Builder>* state, field_t<Builder>* sparse_round_key)
244{
245 add_round_key(state, sparse_round_key, 0);
246 for (size_t i = 0; i < 16; ++i) {
247 state[i].first = normalize_sparse_form(ctx, state[i].first);
248 }
249
250 for (size_t round = 1; round < 10; ++round) {
251 sub_bytes(ctx, state);
252 shift_rows(state);
253 mix_columns_and_add_round_key(state, sparse_round_key, round);
254 for (size_t i = 0; i < 16; ++i) {
255 state[i].first = normalize_sparse_form(ctx, state[i].first);
256 }
257 }
258
259 sub_bytes(ctx, state);
260 shift_rows(state);
261 add_round_key(state, sparse_round_key, 10);
262}
263
264template <typename Builder>
266 const field_t<Builder>& iv,
267 const field_t<Builder>& key)
268{
269 // Check if all inputs are constants
270 bool all_constants = key.is_constant() && iv.is_constant();
271 for (const auto& input_block : input) {
272 if (!input_block.is_constant()) {
273 all_constants = false;
274 break;
275 }
276 }
277
278 if (all_constants) {
279 // Compute result directly using native crypto implementation
281 std::vector<uint8_t> key_bytes(16);
282 std::vector<uint8_t> iv_bytes(16);
283 std::vector<uint8_t> input_bytes(input.size() * 16);
284
285 // Convert key to bytes
286 uint256_t key_value = key.get_value();
287 for (size_t i = 0; i < 16; ++i) {
288 key_bytes[15 - i] = static_cast<uint8_t>((key_value >> (i * 8)) & 0xFF);
289 }
290
291 // Convert IV to bytes
292 uint256_t iv_value = iv.get_value();
293 for (size_t i = 0; i < 16; ++i) {
294 iv_bytes[15 - i] = static_cast<uint8_t>((iv_value >> (i * 8)) & 0xFF);
295 }
296
297 // Convert input blocks to bytes
298 for (size_t block_idx = 0; block_idx < input.size(); ++block_idx) {
299 uint256_t block_value = input[block_idx].get_value();
300 for (size_t i = 0; i < 16; ++i) {
301 input_bytes[block_idx * 16 + 15 - i] = static_cast<uint8_t>((block_value >> (i * 8)) & 0xFF);
302 }
303 }
304
305 // Run native AES encryption
306 crypto::aes128_encrypt_buffer_cbc(input_bytes.data(), iv_bytes.data(), key_bytes.data(), input_bytes.size());
307
308 // Convert result back to field elements
309 for (size_t block_idx = 0; block_idx < input.size(); ++block_idx) {
310 uint256_t result_value = 0;
311 for (size_t i = 0; i < 16; ++i) {
312 result_value <<= 8;
313 result_value += input_bytes[block_idx * 16 + i];
314 }
315 result.push_back(field_t<Builder>(result_value));
316 }
317
318 return result;
319 }
320
321 // Find a valid context from any of the inputs
322 Builder* ctx = nullptr;
323 if (!key.is_constant()) {
324 ctx = key.get_context();
325 } else if (!iv.is_constant()) {
326 ctx = iv.get_context();
327 } else {
328 for (const auto& input_block : input) {
329 if (!input_block.is_constant()) {
330 ctx = input_block.get_context();
331 break;
332 }
333 }
334 }
335
336 ASSERT(ctx);
337
338 auto round_key = expand_key(ctx, key);
339
340 const size_t num_blocks = input.size();
341
342 std::vector<byte_pair<Builder>> sparse_state;
343 for (size_t i = 0; i < num_blocks; ++i) {
344 auto bytes = convert_into_sparse_bytes(ctx, input[i]);
345 for (const auto& byte : bytes) {
346 sparse_state.push_back({ byte, field_t(ctx, fr(0)) });
347 }
348 }
349
350 auto sparse_iv = convert_into_sparse_bytes(ctx, iv);
351
352 for (size_t i = 0; i < num_blocks; ++i) {
353 byte_pair<Builder>* round_state = &sparse_state[i * 16];
354 xor_with_iv(round_state, &sparse_iv[0]);
355 aes128_cipher(ctx, round_state, &round_key[0]);
356
357 for (size_t j = 0; j < 16; ++j) {
358 sparse_iv[j] = round_state[j].first;
359 }
360 }
361
362 std::vector<field_t<Builder>> sparse_output;
363 for (auto& element : sparse_state) {
364 sparse_output.push_back(normalize_sparse_form(ctx, element.first));
365 }
366
368 for (size_t i = 0; i < num_blocks; ++i) {
369 output.push_back(convert_from_sparse_bytes(ctx, &sparse_output[i * 16]));
370 }
371 return output;
372}
373#define INSTANTIATE_ENCRYPT_BUFFER_CBC(Builder) \
374 template std::vector<field_t<Builder>> encrypt_buffer_cbc<Builder>( \
375 const std::vector<field_t<Builder>>&, const field_t<Builder>&, const field_t<Builder>&)
376
379} // namespace bb::stdlib::aes128
#define ASSERT(expression,...)
Definition assert.hpp:49
void assert_equal(const field_t &rhs, std::string const &msg="field_t::assert_equal") const
Copy constraint: constrain that *this field is equal to rhs element.
Definition field.cpp:929
Builder * get_context() const
Definition field.hpp:389
bb::fr get_value() const
Given a := *this, compute its value given by a.v * a.mul + a.add.
Definition field.cpp:827
void convert_constant_to_fixed_witness(Builder *ctx)
Definition field.hpp:414
bool is_constant() const
Definition field.hpp:399
void aes128_encrypt_buffer_cbc(uint8_t *buffer, uint8_t *iv, const uint8_t *key, const size_t length)
Definition aes128.cpp:233
@ AES_NORMALIZE
Definition types.hpp:97
byte_pair< Builder > apply_aes_sbox_map(Builder *, field_t< Builder > &input)
Definition aes128.cpp:30
void mix_columns_and_add_round_key(byte_pair< Builder > *state_pairs, field_t< Builder > *round_key, uint64_t round)
Definition aes128.cpp:210
field_t< Builder > normalize_sparse_form(Builder *, field_t< Builder > &byte)
Definition aes128.cpp:24
void xor_with_iv(byte_pair< Builder > *state, field_t< Builder > *iv)
Definition aes128.cpp:235
constexpr uint32_t AES128_BASE
Definition aes128.cpp:22
std::array< field_t< Builder >, 16 > convert_into_sparse_bytes(Builder *ctx, const field_t< Builder > &block_data)
Definition aes128.cpp:36
void sub_bytes(Builder *ctx, byte_pair< Builder > *state_pairs)
Definition aes128.cpp:218
std::pair< field_t< Builder >, field_t< Builder > > byte_pair
Definition aes128.cpp:19
std::array< field_t< Builder >, 176 > expand_key(Builder *ctx, const field_t< Builder > &key)
Definition aes128.cpp:75
void add_round_key(byte_pair< Builder > *sparse_state, field_t< Builder > *sparse_round_key, uint64_t round)
Definition aes128.cpp:226
void aes128_cipher(Builder *ctx, byte_pair< Builder > *state, field_t< Builder > *sparse_round_key)
Definition aes128.cpp:243
void shift_rows(byte_pair< Builder > *state)
Definition aes128.cpp:169
field_t< Builder > convert_from_sparse_bytes(Builder *ctx, field_t< Builder > *sparse_bytes)
Definition aes128.cpp:52
std::vector< field_t< Builder > > encrypt_buffer_cbc(const std::vector< field_t< Builder > > &input, const field_t< Builder > &iv, const field_t< Builder > &key)
Definition aes128.cpp:265
void mix_column_and_add_round_key(byte_pair< Builder > *column_pairs, field_t< Builder > *round_key, uint64_t round)
Definition aes128.cpp:192
std::conditional_t< IsGoblinBigGroup< C, Fq, Fr, G >, element_goblin::goblin_element< C, goblin_field< C >, Fr, G >, element_default::element< C, Fq, Fr, G > > element
element wraps either element_default::element or element_goblin::goblin_element depending on parametr...
field< Bn254FrParams > fr
Definition fr.hpp:174
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13
#define INSTANTIATE_ENCRYPT_BUFFER_CBC(Builder)
Definition aes128.cpp:373