172 uint32_t round_constant,
181 uint32_t rot_6 =
ror_with_witness(state[4], 6, C::sha256_e_rotr_6, C::sha256_lhs_e_6, C::sha256_rhs_e_6,
trace);
182 trace.set(C::sha256_two_pow_6,
row, 64);
186 trace.set(C::sha256_two_pow_11,
row, 2048);
190 trace.set(C::sha256_two_pow_25,
row, 33554432);
193 trace.set(C::sha256_e_rotr_6_xor_e_rotr_11,
row, rot_6 ^ rot_11);
195 uint64_t S1 = rot_6 ^ rot_11 ^ rot_25;
200 uint32_t not_e = ~state[4];
201 trace.set(C::sha256_not_e,
row, not_e);
203 uint32_t e_and_f = state[4] & state[5];
204 trace.set(C::sha256_e_and_f,
row, e_and_f);
206 uint32_t not_e_and_g = not_e & state[6];
207 trace.set(C::sha256_not_e_and_g,
row, not_e_and_g);
209 uint64_t ch = e_and_f ^ not_e_and_g;
214 uint32_t rot_2 =
ror_with_witness(state[0], 2, C::sha256_a_rotr_2, C::sha256_lhs_a_2, C::sha256_rhs_a_2,
trace);
215 trace.set(C::sha256_two_pow_2,
row, 4);
219 trace.set(C::sha256_two_pow_13,
row, 8192);
223 trace.set(C::sha256_two_pow_22,
row, 4194304);
226 trace.set(C::sha256_a_rotr_2_xor_a_rotr_13,
row, rot_2 ^ rot_13);
228 uint64_t S0 = rot_2 ^ rot_13 ^ rot_22;
233 uint32_t a_and_b = state[0] & state[1];
234 trace.set(C::sha256_a_and_b,
row, a_and_b);
236 uint32_t a_and_c = state[0] & state[2];
237 trace.set(C::sha256_a_and_c,
row, a_and_c);
239 uint32_t b_and_c = state[1] & state[2];
240 trace.set(C::sha256_b_and_c,
row, b_and_c);
242 trace.set(C::sha256_a_and_b_xor_a_and_c,
row, a_and_b ^ a_and_c);
244 uint64_t maj = a_and_b ^ a_and_c ^ b_and_c;
248 uint64_t temp1 =
static_cast<uint64_t
>(state[7]) + S1 + ch + round_constant + round_w;
249 uint64_t temp2 = S0 + maj;
250 uint64_t next_a = temp1 + temp2;
252 trace.set(C::sha256_round_constant,
row, round_constant);
253 uint32_t
a =
static_cast<uint32_t
>(next_a);
256 uint64_t next_e = state[3] + temp1;
258 uint32_t e =
static_cast<uint32_t
>(next_e);
291 for (
const auto&
event : events) {
297 uint64_t state_addr =
static_cast<uint64_t
>(
event.state_addr);
298 uint64_t input_addr =
static_cast<uint64_t
>(
event.input_addr);
299 uint64_t output_addr =
static_cast<uint64_t
>(
event.output_addr);
301 uint64_t max_state_addr = state_addr + 7;
302 uint64_t max_input_addr = input_addr + 15;
303 uint64_t max_output_addr = output_addr + 7;
308 { C::sha256_sel, 1 },
309 { C::sha256_start, 1 },
310 { C::sha256_execution_clk,
event.execution_clk },
311 { C::sha256_space_id,
event.space_id },
314 { C::sha256_state_addr, state_addr },
315 { C::sha256_input_addr, input_addr },
316 { C::sha256_output_addr, output_addr },
319 { C::sha256_max_state_addr, max_state_addr },
320 { C::sha256_max_input_addr, max_input_addr },
321 { C::sha256_max_output_addr, max_output_addr },
322 { C::sha256_input_rounds_rem, 16 },
323 { C::sha256_sel_is_input_round, 1 },
324 { C::sha256_rounds_remaining, 64 },
334 bool out_of_range_err = output_out_of_range || input_out_of_range || state_out_of_range;
335 if (out_of_range_err) {
339 { C::sha256_sel_state_out_of_range_err, state_out_of_range ? 1 : 0 },
340 { C::sha256_sel_input_out_of_range_err, input_out_of_range ? 1 : 0 },
341 { C::sha256_sel_output_out_of_range_err, output_out_of_range ? 1 : 0 },
342 { C::sha256_mem_out_of_range_err, 1 },
343 { C::sha256_err, 1 },
344 { C::sha256_latch, 1 },
361 { C::sha256_sel_mem_state_or_output, 1 },
363 { C::sha256_memory_address_0_, state_addr },
364 { C::sha256_memory_address_1_, state_addr + 1 },
365 { C::sha256_memory_address_2_, state_addr + 2 },
366 { C::sha256_memory_address_3_, state_addr + 3 },
367 { C::sha256_memory_address_4_, state_addr + 4 },
368 { C::sha256_memory_address_5_, state_addr + 5 },
369 { C::sha256_memory_address_6_, state_addr + 6 },
370 { C::sha256_memory_address_7_, state_addr + 7 },
372 { C::sha256_memory_register_0_,
event.state[0].as_ff() },
373 { C::sha256_memory_register_1_,
event.state[1].as_ff() },
374 { C::sha256_memory_register_2_,
event.state[2].as_ff() },
375 { C::sha256_memory_register_3_,
event.state[3].as_ff() },
376 { C::sha256_memory_register_4_,
event.state[4].as_ff() },
377 { C::sha256_memory_register_5_,
event.state[5].as_ff() },
378 { C::sha256_memory_register_6_,
event.state[6].as_ff() },
379 { C::sha256_memory_register_7_,
event.state[7].as_ff() },
381 { C::sha256_init_a,
event.state[0].as_ff() },
382 { C::sha256_init_b,
event.state[1].as_ff() },
383 { C::sha256_init_c,
event.state[2].as_ff() },
384 { C::sha256_init_d,
event.state[3].as_ff() },
385 { C::sha256_init_e,
event.state[4].as_ff() },
386 { C::sha256_init_f,
event.state[5].as_ff() },
387 { C::sha256_init_g,
event.state[6].as_ff() },
388 { C::sha256_init_h,
event.state[7].as_ff() },
390 { C::sha256_memory_tag_0_,
static_cast<uint8_t
>(
event.state[0].get_tag()) },
391 { C::sha256_memory_tag_1_,
static_cast<uint8_t
>(
event.state[1].get_tag()) },
392 { C::sha256_memory_tag_2_,
static_cast<uint8_t
>(
event.state[2].get_tag()) },
393 { C::sha256_memory_tag_3_,
static_cast<uint8_t
>(
event.state[3].get_tag()) },
394 { C::sha256_memory_tag_4_,
static_cast<uint8_t
>(
event.state[4].get_tag()) },
395 { C::sha256_memory_tag_5_,
static_cast<uint8_t
>(
event.state[5].get_tag()) },
396 { C::sha256_memory_tag_6_,
static_cast<uint8_t
>(
event.state[6].get_tag()) },
397 { C::sha256_memory_tag_7_,
static_cast<uint8_t
>(
event.state[7].get_tag()) },
403 bool invalid_state_tag_err = std::ranges::any_of(
404 event.state, [](
const MemoryValue& state) { return state.get_tag() != MemoryTag::U32; });
406 if (invalid_state_tag_err) {
408 uint64_t batched_check = 0;
410 for (uint32_t i = 0; i <
event.state.size(); i++) {
412 (
static_cast<uint64_t
>(
event.state[i].get_tag()) -
static_cast<uint64_t
>(
MemoryTag::U32))
417 { C::sha256_sel_invalid_state_tag_err, 1 },
418 { C::sha256_batch_tag_inv,
FF(batched_check).invert() },
419 { C::sha256_latch, 1 },
420 { C::sha256_err, 1 },
434 bool invalid_tag_err =
event.input.back().get_tag() !=
MemoryTag::U32;
440 for (uint32_t i = 0; i <
event.input.size(); i++) {
441 uint32_t input_rounds_rem = 16 - i;
442 FF input_rounds_rem_inv = input_rounds_rem == 0 ? 0 :
FF(input_rounds_rem).invert();
445 FF input_tag =
FF(
static_cast<uint8_t
>(round_input.
get_tag()));
447 FF input_tag_diff = input_tag - expected_tag;
448 FF input_tag_diff_inv = input_tag_diff == 0 ? 0 : input_tag_diff.invert();
450 bool is_last = (i ==
event.input.size() - 1);
453 { C::sha256_sel, 1 },
455 { C::sha256_execution_clk,
event.execution_clk },
456 { C::sha256_space_id,
event.space_id },
457 { C::sha256_output_addr, output_addr },
458 { C::sha256_sel_is_input_round, 1 },
459 { C::sha256_u32_tag, expected_tag },
460 { C::sha256_sel_read_input_from_memory, 1 },
462 { C::sha256_input_rounds_rem, input_rounds_rem },
463 { C::sha256_input_rounds_rem_inv, input_rounds_rem_inv },
464 { C::sha256_input_addr, input_addr + i },
465 { C::sha256_input, round_input.
as_ff() },
466 { C::sha256_input_tag, input_tag },
467 { C::sha256_input_tag_diff_inv, input_tag_diff_inv },
469 { C::sha256_w, round_input.
as_ff() },
472 { C::sha256_sel_invalid_input_tag_err, invalid_tag_err ? 1 : 0 },
474 { C::sha256_sel_invalid_input_row_tag_err, (is_last && invalid_tag_err) ? 1 : 0 },
475 { C::sha256_err, invalid_tag_err ? 1 : 0 },
476 { C::sha256_latch, (is_last && invalid_tag_err) ? 1 : 0 },
480 if (invalid_tag_err) {
482 row +=
event.input.size();
492 std::array<uint32_t, 8> state;
493 std::ranges::transform(
event.state.begin(),
event.state.end(), state.begin(), [](
const MemoryValue& val) {
494 return val.as<uint32_t>();
498 std::ranges::transform(
event.input.begin(),
500 prev_w_helpers.begin(),
501 [](
const MemoryValue& val) { return val.as<uint32_t>(); });
502 std::array<uint32_t, 8> round_state = state;
509 for (
size_t i = 0; i < 64; i++) {
511 bool is_an_input_round = i < 16;
513 FF inv =
FF(64 - i).invert();
518 { C::sha256_sel, 1 },
520 { C::sha256_execution_clk,
event.execution_clk },
521 { C::sha256_space_id,
event.space_id },
522 { C::sha256_output_addr, output_addr },
524 { C::sha256_two_pow_32, 1UL << 32 },
526 { C::sha256_xor_sel, 2 },
527 { C::sha256_perform_round, 1 },
528 { C::sha256_round_count, i },
529 { C::sha256_rounds_remaining, 64 - i },
530 { C::sha256_rounds_remaining_inv, inv },
531 { C::sha256_w, round_w },
532 { C::sha256_sel_compute_w, is_an_input_round ? 0 : 1 },
546 for (
size_t j = 0; j < 15; j++) {
547 prev_w_helpers[j] = prev_w_helpers[j + 1];
549 prev_w_helpers[15] = round_w;
557 { C::sha256_latch, 1 },
558 { C::sha256_sel, 1 },
559 { C::sha256_xor_sel, 2 },
560 { C::sha256_round_count, 64 },
578 { C::sha256_execution_clk,
event.execution_clk },
579 { C::sha256_space_id,
event.space_id },
580 { C::sha256_sel_mem_state_or_output, 1 },
583 { C::sha256_two_pow_32, 1UL << 32 },
584 { C::sha256_output_addr, output_addr },
586 { C::sha256_memory_address_0_, output_addr },
587 { C::sha256_memory_address_1_, output_addr + 1 },
588 { C::sha256_memory_address_2_, output_addr + 2 },
589 { C::sha256_memory_address_3_, output_addr + 3 },
590 { C::sha256_memory_address_4_, output_addr + 4 },
591 { C::sha256_memory_address_5_, output_addr + 5 },
592 { C::sha256_memory_address_6_, output_addr + 6 },
593 { C::sha256_memory_address_7_, output_addr + 7 },
595 { C::sha256_memory_register_0_, round_state[0] + state[0] },
596 { C::sha256_memory_register_1_, round_state[1] + state[1] },
597 { C::sha256_memory_register_2_, round_state[2] + state[2] },
598 { C::sha256_memory_register_3_, round_state[3] + state[3] },
599 { C::sha256_memory_register_4_, round_state[4] + state[4] },
600 { C::sha256_memory_register_5_, round_state[5] + state[5] },
601 { C::sha256_memory_register_6_, round_state[6] + state[6] },
602 { C::sha256_memory_register_7_, round_state[7] + state[7] },
604 { C::sha256_memory_tag_0_,
static_cast<uint8_t
>(
MemoryTag::U32) },
605 { C::sha256_memory_tag_1_,
static_cast<uint8_t
>(
MemoryTag::U32) },
606 { C::sha256_memory_tag_2_,
static_cast<uint8_t
>(
MemoryTag::U32) },
607 { C::sha256_memory_tag_3_,
static_cast<uint8_t
>(
MemoryTag::U32) },
608 { C::sha256_memory_tag_4_,
static_cast<uint8_t
>(
MemoryTag::U32) },
609 { C::sha256_memory_tag_5_,
static_cast<uint8_t
>(
MemoryTag::U32) },
610 { C::sha256_memory_tag_6_,
static_cast<uint8_t
>(
MemoryTag::U32) },
611 { C::sha256_memory_tag_7_,
static_cast<uint8_t
>(
MemoryTag::U32) },