Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
instr_fetching.test.cpp
Go to the documentation of this file.
1#include <gmock/gmock.h>
2#include <gtest/gtest.h>
3
4#include <cstdint>
5#include <memory>
6#include <vector>
7
21
22namespace bb::avm2::constraining {
23namespace {
24
25using tracegen::BytecodeTraceBuilder;
26using tracegen::PrecomputedTraceBuilder;
27using tracegen::RangeCheckTraceBuilder;
28using tracegen::TestTraceContainer;
29
31using C = Column;
32
33using instr_fetching = instr_fetching<FF>;
34
35using simulation::BytecodeDecompositionEvent;
38using simulation::Instruction;
39using simulation::InstructionFetchingEvent;
41using simulation::RangeCheckEvent;
42
43TEST(InstrFetchingConstrainingTest, EmptyRow)
44{
45 check_relation<instr_fetching>(testing::empty_trace());
46}
47
48// Basic positive test with a hardcoded bytecode for ADD_8
49TEST(InstrFetchingConstrainingTest, Add8WithTraceGen)
50{
51 TestTraceContainer trace;
52 BytecodeTraceBuilder builder;
53 PrecomputedTraceBuilder precomputed_builder;
54
55 Instruction add_8_instruction = {
56 .opcode = WireOpCode::ADD_8,
57 .indirect = 3,
58 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
59 };
60
61 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
62
63 builder.process_instruction_fetching({ { .bytecode_id = 1,
64 .pc = 0,
65 .instruction = add_8_instruction,
66 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecode) } },
67 trace);
68 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
69
70 EXPECT_EQ(trace.get_num_rows(), 2);
71 check_relation<instr_fetching>(trace);
72}
73
74// Basic positive test with a hardcoded bytecode for ECADD
75// Cover the longest amount of operands.
76TEST(InstrFetchingConstrainingTest, EcaddWithTraceGen)
77{
78 TestTraceContainer trace;
79 BytecodeTraceBuilder builder;
80 PrecomputedTraceBuilder precomputed_builder;
81
82 Instruction ecadd_instruction = {
83 .opcode = WireOpCode::ECADD,
84 .indirect = 0x1f1f,
85 .operands = { Operand::from<uint16_t>(0x1279),
86 Operand::from<uint16_t>(0x127a),
87 Operand::from<uint16_t>(0x127b),
88 Operand::from<uint16_t>(0x127c),
89 Operand::from<uint16_t>(0x127d),
90 Operand::from<uint16_t>(0x127e),
91 Operand::from<uint16_t>(0x127f) },
92 };
93
94 std::vector<uint8_t> bytecode = ecadd_instruction.serialize();
95 builder.process_instruction_fetching({ { .bytecode_id = 1,
96 .pc = 0,
97 .instruction = ecadd_instruction,
98 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecode) } },
99 trace);
100 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
101
102 EXPECT_EQ(trace.get_num_rows(), 2);
103 check_relation<instr_fetching>(trace);
104}
105
106// Helper routine generating a vector of instruction fetching events for each
107// opcode.
108std::vector<InstructionFetchingEvent> gen_instr_events_each_opcode()
109{
110 std::vector<uint8_t> bytecode;
111 std::vector<Instruction> instructions;
112 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
113 instructions.reserve(num_opcodes);
115
116 for (size_t i = 0; i < num_opcodes; i++) {
117 pc_positions.at(i) = static_cast<uint32_t>(bytecode.size());
118 const auto instr = testing::random_instruction(static_cast<WireOpCode>(i));
119 instructions.emplace_back(instr);
120 const auto instruction_bytes = instr.serialize();
121 bytecode.insert(bytecode.end(),
122 std::make_move_iterator(instruction_bytes.begin()),
123 std::make_move_iterator(instruction_bytes.end()));
124 }
125
126 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
127 // Always use *bytecode_ptr from now on instead of bytecode as this one was moved.
128
130 instr_events.reserve(num_opcodes);
131 for (size_t i = 0; i < num_opcodes; i++) {
132 instr_events.emplace_back(InstructionFetchingEvent{
133 .bytecode_id = 1, .pc = pc_positions.at(i), .instruction = instructions.at(i), .bytecode = bytecode_ptr });
134 }
135 return instr_events;
136}
137
138// Positive test for each opcode. We assume that decode instruction is working correctly.
139// It works as long as the relations are not constraining the correct range for TAG nor indirect.
140TEST(InstrFetchingConstrainingTest, EachOpcodeWithTraceGen)
141{
142 TestTraceContainer trace;
143 BytecodeTraceBuilder builder;
144 PrecomputedTraceBuilder precomputed_builder;
145
146 builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace);
147 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
148
149 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
150 EXPECT_EQ(trace.get_num_rows(), num_opcodes + 1);
151 check_relation<instr_fetching>(trace);
152}
153
154// Negative test about decomposition of operands. We mutate correct operand values in the trace.
155// This also covers wrong operands which are not "involved" by the instruction.
156// We perform this for a random instruction for opcodes: REVERT_16, CAST_8, TORADIXBE
157TEST(InstrFetchingConstrainingTest, NegativeWrongOperand)
158{
159 BytecodeTraceBuilder builder;
160 PrecomputedTraceBuilder precomputed_builder;
161
163 std::vector<size_t> sub_relations = {
168 };
169
170 constexpr std::array<C, 8> operand_cols = {
171 C::instr_fetching_indirect, C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3,
172 C::instr_fetching_op4, C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7,
173 };
174
175 for (const auto& opcode : opcodes) {
176 TestTraceContainer trace;
177 const auto instr = testing::random_instruction(opcode);
178 builder.process_instruction_fetching(
179 { { .bytecode_id = 1,
180 .pc = 0,
181 .instruction = instr,
182 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
183 trace);
184 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
185
186 check_relation<instr_fetching>(trace);
187
188 EXPECT_EQ(trace.get_num_rows(), 2);
189
190 for (size_t i = 0; i < operand_cols.size(); i++) {
191 auto mutated_trace = trace;
192 const FF mutated_operand = trace.get(operand_cols.at(i), 0) + 1; // Mutate to value + 1
193 mutated_trace.set(operand_cols.at(i), 0, mutated_operand);
194 EXPECT_THROW_WITH_MESSAGE(check_relation<instr_fetching>(mutated_trace, sub_relations.at(i)),
195 instr_fetching::get_subrelation_label(sub_relations.at(i)));
196 }
197 }
198}
199
200// Positive test for interaction with instruction spec table using same events as for the test
201// EachOpcodeWithTraceGen, i.e., one event/row is generated per wire opcode.
202// It works as long as the relations are not constraining the correct range for TAG nor indirect.
203TEST(InstrFetchingConstrainingTest, WireInstructionSpecInteractions)
204{
205 TestTraceContainer trace;
206 BytecodeTraceBuilder bytecode_builder;
207 PrecomputedTraceBuilder precomputed_builder;
208
209 precomputed_builder.process_wire_instruction_spec(trace);
210 precomputed_builder.process_sel_range_8(trace);
211 bytecode_builder.process_instruction_fetching(gen_instr_events_each_opcode(), trace);
212 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
213
214 EXPECT_EQ(trace.get_num_rows(), 1 << 8); // 2^8 for selector against wire_instruction_spec
215
216 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(trace);
217 check_relation<instr_fetching>(trace);
218}
219
220std::vector<RangeCheckEvent> gen_range_check_events(const std::vector<InstructionFetchingEvent>& instr_events)
221{
222 std::vector<RangeCheckEvent> range_check_events;
223 range_check_events.reserve(instr_events.size());
224
225 for (const auto& instr_event : instr_events) {
226 range_check_events.emplace_back(RangeCheckEvent{
227 .value = instr_event.error == InstrDeserializationError::PC_OUT_OF_RANGE
228 ? instr_event.pc - instr_event.bytecode->size()
229 : instr_event.bytecode->size() - instr_event.pc - 1,
230 .num_bits = AVM_PC_SIZE_IN_BITS,
231 });
232 }
233 return range_check_events;
234}
235
236// Positive test for the interaction with bytecode decomposition table.
237// One event/row is generated per wire opcode (same as for test WireInstructionSpecInteractions).
238TEST(InstrFetchingConstrainingTest, BcDecompositionInteractions)
239{
240 TestTraceContainer trace;
241 BytecodeTraceBuilder bytecode_builder;
242 PrecomputedTraceBuilder precomputed_builder;
243
244 const auto instr_fetch_events = gen_instr_events_each_opcode();
245 bytecode_builder.process_instruction_fetching(instr_fetch_events, trace);
246 bytecode_builder.process_decomposition({ {
247 .bytecode_id = instr_fetch_events.at(0).bytecode_id,
248 .bytecode = instr_fetch_events.at(0).bytecode,
249 } },
250 trace);
251 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
252
253 check_interaction<BytecodeTraceBuilder,
256
257 // BC Decomposition trace is the longest here.
258 EXPECT_EQ(trace.get_num_rows(), instr_fetch_events.at(0).bytecode->size() + 1);
259
260 check_relation<instr_fetching>(trace);
261}
262
263void check_all(const std::vector<InstructionFetchingEvent>& instr_events,
264 const std::vector<RangeCheckEvent>& range_check_events,
266{
267 TestTraceContainer trace;
268 BytecodeTraceBuilder bytecode_builder;
269 PrecomputedTraceBuilder precomputed_builder;
270 RangeCheckTraceBuilder range_check_builder;
271
272 precomputed_builder.process_wire_instruction_spec(trace);
273 precomputed_builder.process_sel_range_8(trace);
274 precomputed_builder.process_sel_range_16(trace);
275 precomputed_builder.process_memory_tag_range(trace);
276 bytecode_builder.process_instruction_fetching(instr_events, trace);
277 bytecode_builder.process_decomposition(decomposition_events, trace);
278 range_check_builder.process(range_check_events, trace);
279 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
280
281 check_interaction<BytecodeTraceBuilder,
288
289 EXPECT_EQ(trace.get_num_rows(), 1 << 16); // 2^16 for range checks
290
291 check_relation<instr_fetching>(trace);
292}
293
294void check_without_range_check(const std::vector<InstructionFetchingEvent>& instr_events,
296{
297 TestTraceContainer trace;
298 BytecodeTraceBuilder bytecode_builder;
299 PrecomputedTraceBuilder precomputed_builder;
300
301 precomputed_builder.process_wire_instruction_spec(trace);
302 precomputed_builder.process_sel_range_8(trace);
303 precomputed_builder.process_memory_tag_range(trace);
304 bytecode_builder.process_instruction_fetching(instr_events, trace);
305 bytecode_builder.process_decomposition(decomposition_events, trace);
306 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
307
308 check_interaction<BytecodeTraceBuilder,
314
315 EXPECT_EQ(trace.get_num_rows(), 1 << 8); // 2^8 for range checks
316
317 check_relation<instr_fetching>(trace);
318}
319
320// Positive test with 5 five bytecodes and bytecode_id = 0,1,2,3,4
321// Bytecode i is generated by truncating instr_fetch_events to i * 6 instructions.
322// Check relations and all interactions.
323TEST(InstrFetchingConstrainingTest, MultipleBytecodes)
324{
325 const auto instr_fetch_events = gen_instr_events_each_opcode();
326 constexpr size_t num_of_bytecodes = 5;
329
330 for (size_t i = 0; i < num_of_bytecodes; i++) {
331 std::vector<uint8_t> bytecode;
332 const auto num_of_instr = i * 6;
333
334 for (size_t j = 0; j < num_of_instr; j++) {
335 const auto& instr = instr_fetch_events.at(j).instruction;
336 const auto instruction_bytes = instr.serialize();
337 bytecode.insert(bytecode.end(),
338 std::make_move_iterator(instruction_bytes.begin()),
339 std::make_move_iterator(instruction_bytes.end()));
340 }
341
342 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
343
344 for (size_t j = 0; j < num_of_instr; j++) {
345 auto instr_event = instr_fetch_events.at(j);
346 instr_event.bytecode_id = static_cast<BytecodeId>(i);
347 instr_event.bytecode = bytecode_ptr;
348 instr_events.emplace_back(instr_event);
349 }
350
351 decomposition_events.emplace_back(BytecodeDecompositionEvent{
352 .bytecode_id = static_cast<BytecodeId>(i),
353 .bytecode = bytecode_ptr,
354 });
355 }
356
357 check_all(instr_events, gen_range_check_events(instr_events), decomposition_events);
358}
359
360// Positive test with one single instruction with error INSTRUCTION_OUT_OF_RANGE.
361// The bytecode consists into a serialized single instruction with pc = 0 and
362// the bytecode had the last byte removed. This byte corresponds to a full operand.
363TEST(InstrFetchingConstrainingTest, SingleInstructionOutOfRange)
364{
365 Instruction add_8_instruction = {
366 .opcode = WireOpCode::ADD_8,
367 .indirect = 3,
368 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
369 };
370
371 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
372 bytecode.pop_back(); // Remove last byte
373 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
374
375 const std::vector<InstructionFetchingEvent> instr_events = {
376 {
377 .bytecode_id = 1,
378 .pc = 0,
379 .bytecode = bytecode_ptr,
380 .error = InstrDeserializationError::INSTRUCTION_OUT_OF_RANGE,
381 },
382 };
383
385 {
386 .bytecode_id = 1,
387 .bytecode = bytecode_ptr,
388 },
389 };
390
391 check_without_range_check(instr_events, decomposition_events);
392}
393
394// Positive test with one single instruction (SET_FF) with error INSTRUCTION_OUT_OF_RANGE.
395// The bytecode consists into a serialized single instruction with pc = 0 and
396// the bytecode had the two last bytes removed. The truncated instruction is cut
397// in the middle of an operand.
398TEST(InstrFetchingConstrainingTest, SingleInstructionOutOfRangeSplitOperand)
399{
400 Instruction set_ff_instruction = {
401 .opcode = WireOpCode::SET_FF,
402 .indirect = 0x01,
403 .operands = { Operand::from<uint16_t>(0x1279),
404 Operand::from<uint8_t>(static_cast<uint8_t>(MemoryTag::FF)),
405 Operand::from<FF>(FF::modulus_minus_two) },
406 };
407
408 std::vector<uint8_t> bytecode = set_ff_instruction.serialize();
409 bytecode.resize(bytecode.size() - 2); // Remove last two bytes)
410 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
411
412 const std::vector<InstructionFetchingEvent> instr_events = {
413 {
414 .bytecode_id = 1,
415 .pc = 0,
416 .bytecode = bytecode_ptr,
417 .error = InstrDeserializationError::INSTRUCTION_OUT_OF_RANGE,
418 },
419 };
420
422 {
423 .bytecode_id = 1,
424 .bytecode = bytecode_ptr,
425 },
426 };
427
428 check_without_range_check(instr_events, decomposition_events);
429}
430
431// Positive test with error case PC_OUT_OF_RANGE. We pass a pc which is out of range.
432TEST(InstrFetchingConstrainingTest, SingleInstructionPcOutOfRange)
433{
434 Instruction add_8_instruction = {
435 .opcode = WireOpCode::SUB_8,
436 .indirect = 3,
437 .operands = { Operand::from<uint8_t>(0x34), Operand::from<uint8_t>(0x35), Operand::from<uint8_t>(0x36) },
438 };
439
440 std::vector<uint8_t> bytecode = add_8_instruction.serialize();
441 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
442
443 const std::vector<InstructionFetchingEvent> instr_events = {
444 // We first need a first instruction at pc == 0 as the trace assumes this.
445 {
446 .bytecode_id = 1,
447 .pc = 0,
448 .instruction = add_8_instruction,
449 .bytecode = bytecode_ptr,
450 },
451 {
452 .bytecode_id = 1,
453 .pc = static_cast<uint32_t>(bytecode_ptr->size() + 1),
454 .bytecode = bytecode_ptr,
455 .error = InstrDeserializationError::PC_OUT_OF_RANGE,
456 },
457 };
458
460 {
461 .bytecode_id = 1,
462 .bytecode = bytecode_ptr,
463 },
464 };
465
466 check_all(instr_events, gen_range_check_events(instr_events), decomposition_events);
467}
468
469// Positive test with error case OPCODE_OUT_OF_RANGE. We generate bytecode of a SET_128 instruction and
470// move the PC to a position corresponding to the beginning of the 128-bit immediate value of SET_128.
471// The immediate value in SET_128 starts with byte 0xFF (which we know is not a valid opcode).
472TEST(InstrFetchingConstrainingTest, SingleInstructionOpcodeOutOfRange)
473{
474 Instruction set_128_instruction = {
475 .opcode = WireOpCode::SET_128,
476 .indirect = 0,
477 .operands = { Operand::from<uint16_t>(0x1234),
478 Operand::from<uint8_t>(static_cast<uint8_t>(MemoryTag::U128)),
479 Operand::from<uint128_t>(static_cast<uint128_t>(0xFF) << 120) },
480 };
481
482 std::vector<uint8_t> bytecode = set_128_instruction.serialize();
483 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
484
485 const std::vector<InstructionFetchingEvent> instr_events = {
486 {
487 .bytecode_id = 1,
488 .pc = 0,
489 .instruction = set_128_instruction,
490 .bytecode = bytecode_ptr,
491 },
492 {
493 .bytecode_id = 1,
494 .pc = 5, // We move pc to the beginning of the 128-bit immediate value.
495 .bytecode = bytecode_ptr,
496 .error = InstrDeserializationError::OPCODE_OUT_OF_RANGE,
497 },
498 };
499
501 {
502 .bytecode_id = 1,
503 .bytecode = bytecode_ptr,
504 },
505 };
506
507 check_without_range_check(instr_events, decomposition_events);
508}
509
510// Positive test with one single instruction (SET_16) with error TAG_OUT_OF_RANGE.
511// The bytecode consists into a serialized single instruction with pc = 0.
512// The operand at index 1 is wrongly set to value 12
513TEST(InstrFetchingConstrainingTest, SingleInstructionTagOutOfRange)
514{
515 Instruction set_16_instruction = {
516 .opcode = WireOpCode::SET_16,
517 .indirect = 0,
518 .operands = { Operand::from<uint16_t>(0x1234), Operand::from<uint8_t>(12), Operand::from<uint16_t>(0x5678) },
519 };
520
521 std::vector<uint8_t> bytecode = set_16_instruction.serialize();
522 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(std::move(bytecode));
523
524 const std::vector<InstructionFetchingEvent> instr_events = {
525 {
526 .bytecode_id = 1,
527 .pc = 0,
528 .instruction = set_16_instruction,
529 .bytecode = bytecode_ptr,
530 .error = InstrDeserializationError::TAG_OUT_OF_RANGE,
531 },
532 };
533
535 {
536 .bytecode_id = 1,
537 .bytecode = bytecode_ptr,
538 },
539 };
540
541 check_without_range_check(instr_events, decomposition_events);
542}
543
544// Negative interaction test with some values not matching the instruction spec table.
545TEST(InstrFetchingConstrainingTest, NegativeWrongWireInstructionSpecInteractions)
546{
547 BytecodeTraceBuilder bytecode_builder;
548 PrecomputedTraceBuilder precomputed_builder;
549
550 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
551 // Test works if the following vector is extended to other opcodes though.
553
554 for (const auto& opcode : opcodes) {
555 TestTraceContainer trace;
556 const auto instr = testing::random_instruction(opcode);
557 bytecode_builder.process_instruction_fetching(
558 { { .bytecode_id = 1,
559 .pc = 0,
560 .instruction = instr,
561 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
562 trace);
563 precomputed_builder.process_wire_instruction_spec(trace);
564 precomputed_builder.process_sel_range_8(trace);
565 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
566
567 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(trace);
568
569 ASSERT_EQ(trace.get(C::lookup_instr_fetching_wire_instruction_info_counts, static_cast<uint32_t>(opcode)), 1);
570
571 constexpr std::array<C, 21> mutated_cols = {
572 C::instr_fetching_exec_opcode, C::instr_fetching_instr_size, C::instr_fetching_sel_has_tag,
573 C::instr_fetching_sel_tag_is_op2, C::instr_fetching_sel_op_dc_0, C::instr_fetching_sel_op_dc_1,
574 C::instr_fetching_sel_op_dc_2, C::instr_fetching_sel_op_dc_3, C::instr_fetching_sel_op_dc_4,
575 C::instr_fetching_sel_op_dc_5, C::instr_fetching_sel_op_dc_6, C::instr_fetching_sel_op_dc_7,
576 C::instr_fetching_sel_op_dc_8, C::instr_fetching_sel_op_dc_9, C::instr_fetching_sel_op_dc_10,
577 C::instr_fetching_sel_op_dc_11, C::instr_fetching_sel_op_dc_12, C::instr_fetching_sel_op_dc_13,
578 C::instr_fetching_sel_op_dc_14, C::instr_fetching_sel_op_dc_15, C::instr_fetching_sel_op_dc_16,
579 };
580
581 // Mutate execution opcode
582 for (const auto& col : mutated_cols) {
583 auto mutated_trace = trace;
584 const FF mutated_value = trace.get(col, 1) + 1; // Mutate to value + 1
585 mutated_trace.set(col, 1, mutated_value);
586
588 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_wire_instruction_info_settings>(
589 mutated_trace)),
590 "Failed.*LOOKUP_INSTR_FETCHING_WIRE_INSTRUCTION_INFO.*Could not find tuple in destination.");
591 }
592 }
593}
594
595// Negative interaction test with some values not matching the bytecode decomposition table.
596TEST(InstrFetchingConstrainingTest, NegativeWrongBcDecompositionInteractions)
597{
598 TestTraceContainer trace;
599 BytecodeTraceBuilder bytecode_builder;
600
601 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
602 // Test works if the following vector is extended to other opcodes though.
604
605 for (const auto& opcode : opcodes) {
606 TestTraceContainer trace;
607 const auto instr = testing::random_instruction(opcode);
608 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(instr.serialize());
609 bytecode_builder.process_instruction_fetching({ {
610 .bytecode_id = 1,
611 .pc = 0,
612 .instruction = instr,
613 .bytecode = bytecode_ptr,
614 } },
615 trace);
616 bytecode_builder.process_decomposition({ {
617 .bytecode_id = 1,
618 .bytecode = bytecode_ptr,
619 } },
620 trace);
621
622 auto valid_trace = trace; // Keep original trace before lookup processing
623 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytes_from_bc_dec_settings>(valid_trace);
624
625 constexpr std::array<C, 39> mutated_cols = {
626 C::instr_fetching_pc, C::instr_fetching_bytecode_id, C::instr_fetching_bd0, C::instr_fetching_bd1,
627 C::instr_fetching_bd2, C::instr_fetching_bd3, C::instr_fetching_bd4, C::instr_fetching_bd5,
628 C::instr_fetching_bd6, C::instr_fetching_bd7, C::instr_fetching_bd8, C::instr_fetching_bd9,
629 C::instr_fetching_bd10, C::instr_fetching_bd11, C::instr_fetching_bd12, C::instr_fetching_bd13,
630 C::instr_fetching_bd14, C::instr_fetching_bd15, C::instr_fetching_bd16, C::instr_fetching_bd17,
631 C::instr_fetching_bd18, C::instr_fetching_bd19, C::instr_fetching_bd20, C::instr_fetching_bd21,
632 C::instr_fetching_bd22, C::instr_fetching_bd23, C::instr_fetching_bd24, C::instr_fetching_bd25,
633 C::instr_fetching_bd26, C::instr_fetching_bd27, C::instr_fetching_bd28, C::instr_fetching_bd29,
634 C::instr_fetching_bd30, C::instr_fetching_bd31, C::instr_fetching_bd32, C::instr_fetching_bd33,
635 C::instr_fetching_bd34, C::instr_fetching_bd35, C::instr_fetching_bd36,
636 };
637
638 // Mutate execution opcode
639 for (const auto& col : mutated_cols) {
640 auto mutated_trace = trace;
641 const FF mutated_value = trace.get(col, 1) + 1; // Mutate to value + 1
642 mutated_trace.set(col, 1, mutated_value);
643
644 // This sets the length of the inverse polynomial via SetDummyInverses, so we
645 // still need to call this even though we know it will fail.
647 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytes_from_bc_dec_settings>(
648 mutated_trace)),
649 "Failed.*BYTES_FROM_BC_DEC. Could not find tuple in destination.");
650 }
651 }
652}
653
654// Negative interaction test for #[BYTECODE_SIZE_FROM_BC_DEC] where bytecode_size has the wrong value.
655// We set pc different from zero.
656TEST(InstrFetchingConstrainingTest, NegativeWrongBytecodeSizeBcDecompositionInteractions)
657{
658 TestTraceContainer trace;
659 BytecodeTraceBuilder bytecode_builder;
660 PrecomputedTraceBuilder precomputed_builder;
661
662 const uint32_t pc = 15;
663 std::vector<uint8_t> bytecode(pc, 0x23);
664
665 // Some arbitrary chosen opcodes. We limit to one as this unit test is costly.
666 // Test works if the following vector is extended to other opcodes though.
668
669 for (const auto& opcode : opcodes) {
670 TestTraceContainer trace;
671
672 const auto instr = testing::random_instruction(opcode);
673 const auto instr_bytecode = instr.serialize();
674 bytecode.insert(bytecode.end(),
675 std::make_move_iterator(instr_bytecode.begin()),
676 std::make_move_iterator(instr_bytecode.end()));
677 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
678
679 bytecode_builder.process_instruction_fetching({ {
680 .bytecode_id = 1,
681 .pc = pc,
682 .instruction = instr,
683 .bytecode = bytecode_ptr,
684 } },
685 trace);
686 bytecode_builder.process_decomposition({ {
687 .bytecode_id = 1,
688 .bytecode = bytecode_ptr,
689 } },
690 trace);
691 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
692
693 auto valid_trace = trace; // Keep original trace before lookup processing
694 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytecode_size_from_bc_dec_settings>(valid_trace);
695
696 auto mutated_trace = trace;
697 const FF mutated_value = trace.get(C::instr_fetching_bytecode_size, 1) + 1; // Mutate to value + 1
698 mutated_trace.set(C::instr_fetching_bytecode_size, 1, mutated_value);
699
700 // This sets the length of the inverse polynomial via SetDummyInverses, so we still need to call this
701 // even though we know it will fail.
703 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_bytecode_size_from_bc_dec_settings>(
704 mutated_trace)),
705 "Failed.*BYTECODE_SIZE_FROM_BC_DEC. Could not find tuple in destination.");
706 }
707}
708
709TEST(InstrFetchingConstrainingTest, NegativeWrongTagValidationInteractions)
710{
711 TestTraceContainer trace;
712 BytecodeTraceBuilder bytecode_builder;
713 PrecomputedTraceBuilder precomputed_builder;
714
715 // Some chosen opcode with a tag. We limit to one as this unit test is costly.
716 // Test works if the following vector is extended to other opcodes though.
718
719 for (const auto& opcode : opcodes) {
720 TestTraceContainer trace;
721 const auto instr = testing::random_instruction(opcode);
722 bytecode_builder.process_instruction_fetching(
723 { { .bytecode_id = 1,
724 .pc = 0,
725 .instruction = instr,
726 .bytecode = std::make_shared<std::vector<uint8_t>>(instr.serialize()) } },
727 trace);
728 precomputed_builder.process_memory_tag_range(trace);
729 precomputed_builder.process_sel_range_8(trace);
730 precomputed_builder.process_misc(trace, trace.get_num_rows()); // Limit to the number of rows we need.
731
732 check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_tag_value_validation_settings>(trace);
733
734 auto valid_trace = trace; // Keep original trace before lookup processing
735
736 // Mutate tag out-of-range error
737 auto mutated_trace = trace;
738 ASSERT_EQ(trace.get(C::instr_fetching_tag_out_of_range, 1), 0);
739 mutated_trace.set(C::instr_fetching_tag_out_of_range, 1, 1); // Mutate by toggling the error.
740
742 (check_interaction<BytecodeTraceBuilder, lookup_instr_fetching_tag_value_validation_settings>(
743 mutated_trace)),
744 "Failed.*LOOKUP_INSTR_FETCHING_TAG_VALUE_VALIDATION.*Could not find tuple in destination.");
745 }
746}
747
748// Negative test on not toggling instr_out_of_range when instr_size > bytes_to_read
749TEST(InstrFetchingConstrainingTest, NegativeNotTogglingInstrOutOfRange)
750{
751 TestTraceContainer trace = TestTraceContainer::from_rows({
752 { .precomputed_first_row = 1 },
753 {
754 .instr_fetching_bytes_to_read = 11,
755 .instr_fetching_instr_abs_diff = 0,
756 .instr_fetching_instr_out_of_range = 1, // Will be mutated to zero
757 .instr_fetching_instr_size = 12,
758 .instr_fetching_sel = 1,
759 },
760 });
761
762 check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE);
763
764 trace.set(C::instr_fetching_instr_out_of_range, 1, 0); // Mutate to wrong value
765
767 "INSTR_OUT_OF_RANGE_TOGGLE");
768}
769
770// Negative test on wrongly toggling instr_out_of_range when instr_size <= bytes_to_read
771TEST(InstrFetchingConstrainingTest, NegativeTogglingInstrInRange)
772{
773 TestTraceContainer trace = TestTraceContainer::from_rows({
774 { .precomputed_first_row = 1 },
775 {
776 .instr_fetching_bytes_to_read = 12,
777 .instr_fetching_instr_abs_diff = 0,
778 .instr_fetching_instr_out_of_range = 0, // Will be mutated to 1
779 .instr_fetching_instr_size = 12,
780 .instr_fetching_sel = 1,
781 },
782 });
783
784 check_relation<instr_fetching>(trace, instr_fetching::SR_INSTR_OUT_OF_RANGE_TOGGLE);
785
786 trace.set(C::instr_fetching_instr_out_of_range, 1, 1); // Mutate to wrong value
787
789 "INSTR_OUT_OF_RANGE_TOGGLE");
790}
791
792// Negative test on not toggling pc_out_of_range when pc >= bytecode_size
793TEST(InstrFetchingConstrainingTest, NegativeNotTogglingPcOutOfRange)
794{
795 TestTraceContainer trace = TestTraceContainer::from_rows({
796 { .precomputed_first_row = 1 },
797 {
798 .instr_fetching_bytecode_size = 12,
799 .instr_fetching_pc = 12,
800 .instr_fetching_pc_abs_diff = 0,
801 .instr_fetching_pc_out_of_range = 1, // Will be mutated to 0
802 .instr_fetching_sel = 1,
803 },
804 });
805
806 check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE);
807
808 trace.set(C::instr_fetching_pc_out_of_range, 1, 0); // Mutate to wrong value
809
811 "PC_OUT_OF_RANGE_TOGGLE");
812}
813
814// Negative test on wrongly toggling pc_out_of_range when pc < bytecode_size
815TEST(InstrFetchingConstrainingTest, NegativeTogglingPcInRange)
816{
817 TestTraceContainer trace = TestTraceContainer::from_rows({
818 { .precomputed_first_row = 1 },
819 {
820 .instr_fetching_bytecode_size = 12,
821 .instr_fetching_pc = 11,
822 .instr_fetching_pc_abs_diff = 0,
823 .instr_fetching_pc_out_of_range = 0, // Will be mutated to 1
824 .instr_fetching_sel = 1,
825 },
826 });
827
828 check_relation<instr_fetching>(trace, instr_fetching::SR_PC_OUT_OF_RANGE_TOGGLE);
829
830 trace.set(C::instr_fetching_pc_out_of_range, 1, 1); // Mutate to wrong value
831
833 "PC_OUT_OF_RANGE_TOGGLE");
834}
835
836} // namespace
837} // namespace bb::avm2::constraining
#define AVM_PC_SIZE_IN_BITS
EventEmitter< BytecodeDecompositionEvent > decomposition_events
static constexpr size_t SR_OP1_BYTES_DECOMPOSITION
static constexpr size_t SR_OP3_BYTES_DECOMPOSITION
static constexpr size_t SR_INDIRECT_BYTES_DECOMPOSITION
static constexpr size_t SR_OP6_BYTES_DECOMPOSITION
static constexpr size_t SR_OP4_BYTES_DECOMPOSITION
static constexpr size_t SR_INSTR_OUT_OF_RANGE_TOGGLE
static std::string get_subrelation_label(size_t index)
static constexpr size_t SR_OP7_BYTES_DECOMPOSITION
static constexpr size_t SR_OP5_BYTES_DECOMPOSITION
static constexpr size_t SR_PC_OUT_OF_RANGE_TOGGLE
static constexpr size_t SR_OP2_BYTES_DECOMPOSITION
static TestTraceContainer from_rows(const std::vector< AvmFullRow > &rows)
const FF & get(Column col, uint32_t row) const
void set(Column col, uint32_t row, const FF &value)
RangeCheckTraceBuilder range_check_builder
Definition alu.test.cpp:120
PrecomputedTraceBuilder precomputed_builder
Definition alu.test.cpp:119
AluTraceBuilder builder
Definition alu.test.cpp:123
TestTraceContainer trace
#define EXPECT_THROW_WITH_MESSAGE(code, expectedMessage)
Definition macros.hpp:7
void check_interaction(tracegen::TestTraceContainer &trace)
TEST(TxExecutionConstrainingTest, WriteTreeValue)
Definition tx.test.cpp:508
Instruction random_instruction(WireOpCode w_opcode)
Definition fixtures.cpp:125
TestTraceContainer empty_trace()
Definition fixtures.cpp:153
lookup_settings< lookup_instr_fetching_wire_instruction_info_settings_ > lookup_instr_fetching_wire_instruction_info_settings
lookup_settings< lookup_instr_fetching_bytecode_size_from_bc_dec_settings_ > lookup_instr_fetching_bytecode_size_from_bc_dec_settings
lookup_settings< lookup_instr_fetching_bytes_from_bc_dec_settings_ > lookup_instr_fetching_bytes_from_bc_dec_settings
lookup_settings< lookup_instr_fetching_instr_abs_diff_positive_settings_ > lookup_instr_fetching_instr_abs_diff_positive_settings
lookup_settings< lookup_instr_fetching_pc_abs_diff_positive_settings_ > lookup_instr_fetching_pc_abs_diff_positive_settings
lookup_settings< lookup_instr_fetching_tag_value_validation_settings_ > lookup_instr_fetching_tag_value_validation_settings
typename Flavor::FF FF
Instruction
Enumeration of VM instructions that can be executed.
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13
unsigned __int128 uint128_t
Definition serialize.hpp:44