Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
bytecode_trace.test.cpp
Go to the documentation of this file.
1#include <algorithm>
2#include <cstddef>
3#include <gmock/gmock.h>
4#include <gtest/gtest.h>
5
6#include <cstdint>
7#include <memory>
8#include <sys/types.h>
9#include <vector>
10
18
19namespace bb::avm2::tracegen {
20namespace {
21
22using C = Column;
23
25using simulation::Instruction;
26using simulation::InstructionFetchingEvent;
27
28TEST(BytecodeTraceGenTest, BasicShortLength)
29{
30 TestTraceContainer trace;
31 BytecodeTraceBuilder builder;
32
33 builder.process_decomposition(
34 {
35 simulation::BytecodeDecompositionEvent{
36 .bytecode_id = 43,
37 .bytecode = std::make_shared<std::vector<uint8_t>>(std::vector<uint8_t>{ 12, 31, 5, 2 }),
38 },
39 },
40 trace);
41 auto rows = trace.as_rows();
42
43 // One extra empty row is prepended. Note that precomputed_first_row is not set through process_decomposition()
44 // because it pertains to another subtrace.
45 ASSERT_EQ(rows.size(), 4 + 1);
46
47 // We do not inspect row at index 0 as it is completely empty.
48 EXPECT_THAT(rows.at(1),
49 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
50 ROW_FIELD_EQ(bc_decomposition_id, 43),
51 ROW_FIELD_EQ(bc_decomposition_bytes, 12),
52 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 31),
53 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 5),
54 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_3, 2),
55 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_4, 0),
56 ROW_FIELD_EQ(bc_decomposition_pc, 0),
57 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 4),
58 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
59 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 4).invert()),
60 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
61 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 4),
62 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
63
64 EXPECT_THAT(rows.at(2),
65 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
66 ROW_FIELD_EQ(bc_decomposition_id, 43),
67 ROW_FIELD_EQ(bc_decomposition_bytes, 31),
68 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 5),
69 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 2),
70 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_3, 0),
71 ROW_FIELD_EQ(bc_decomposition_pc, 1),
72 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 3),
73 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
74 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 3).invert()),
75 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
76 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 3),
77 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
78
79 EXPECT_THAT(rows.at(3),
80 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
81 ROW_FIELD_EQ(bc_decomposition_id, 43),
82 ROW_FIELD_EQ(bc_decomposition_bytes, 5),
83 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 2),
84 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_2, 0),
85 ROW_FIELD_EQ(bc_decomposition_pc, 2),
86 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 2),
87 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
88 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 2).invert()),
89 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
90 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 2),
91 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
92
93 EXPECT_THAT(rows.at(4),
94 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
95 ROW_FIELD_EQ(bc_decomposition_id, 43),
96 ROW_FIELD_EQ(bc_decomposition_bytes, 2),
97 ROW_FIELD_EQ(bc_decomposition_bytes_pc_plus_1, 0),
98 ROW_FIELD_EQ(bc_decomposition_pc, 3),
99 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 1),
100 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
101 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 1).invert()),
102 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
103 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 1),
104 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 1)));
105}
106
107TEST(BytecodeTraceGenTest, BasicLongerThanWindowSize)
108{
109 TestTraceContainer trace;
110 BytecodeTraceBuilder builder;
111
112 constexpr auto bytecode_size = DECOMPOSE_WINDOW_SIZE + 8;
113 std::vector<uint8_t> bytecode(bytecode_size);
114 const uint8_t first_byte = 17; // Arbitrary start value and we increment by one. We will hit invalid opcodes
115 // but it should not matter.
116
117 for (uint8_t i = 0; i < bytecode_size; i++) {
118 bytecode[i] = i + first_byte;
119 }
120
121 builder.process_decomposition(
122 {
123 simulation::BytecodeDecompositionEvent{
124 .bytecode_id = 7,
125 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecode),
126 },
127 },
128 trace);
129 auto rows = trace.as_rows();
130
131 // One extra empty row is prepended. Note that precomputed_first_row is not set through process_decomposition()
132 // because it pertains to another subtrace.
133 ASSERT_EQ(rows.size(), bytecode_size + 1);
134
135 // We do not inspect row at index 0 as it is completely empty.
136 EXPECT_THAT(rows.at(1),
137 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
138 ROW_FIELD_EQ(bc_decomposition_id, 7),
139 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte),
140 ROW_FIELD_EQ(bc_decomposition_pc, 0),
141 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytecode_size),
142 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
143 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(-8).invert()),
144 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
145 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE),
146 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
147
148 // We are interested to inspect the boundary aroud bytes_remaining == windows size
149
150 EXPECT_THAT(rows.at(9),
151 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
152 ROW_FIELD_EQ(bc_decomposition_id, 7),
153 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + 8),
154 ROW_FIELD_EQ(bc_decomposition_pc, 8),
155 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, DECOMPOSE_WINDOW_SIZE),
156 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 0),
157 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 0),
158 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 1),
159 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE),
160 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
161
162 EXPECT_THAT(rows.at(10),
163 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
164 ROW_FIELD_EQ(bc_decomposition_id, 7),
165 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + 9),
166 ROW_FIELD_EQ(bc_decomposition_pc, 9),
167 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, DECOMPOSE_WINDOW_SIZE - 1),
168 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
169 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, 1),
170 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
171 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, DECOMPOSE_WINDOW_SIZE - 1),
172 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 0)));
173
174 // Last row
175 EXPECT_THAT(rows.at(bytecode_size),
176 AllOf(ROW_FIELD_EQ(bc_decomposition_sel, 1),
177 ROW_FIELD_EQ(bc_decomposition_id, 7),
178 ROW_FIELD_EQ(bc_decomposition_bytes, first_byte + bytecode_size - 1),
179 ROW_FIELD_EQ(bc_decomposition_pc, bytecode_size - 1),
180 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, 1),
181 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, 1),
182 ROW_FIELD_EQ(bc_decomposition_windows_min_remaining_inv, FF(DECOMPOSE_WINDOW_SIZE - 1).invert()),
183 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, 0),
184 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, 1),
185 ROW_FIELD_EQ(bc_decomposition_last_of_contract, 1)));
186}
187
188TEST(BytecodeTraceGenTest, MultipleEvents)
189{
190 TestTraceContainer trace;
191 BytecodeTraceBuilder builder;
192
193 std::vector<uint32_t> bc_sizes = { DECOMPOSE_WINDOW_SIZE + 2, 17, DECOMPOSE_WINDOW_SIZE, 1 };
195
196 std::transform(bc_sizes.begin(), bc_sizes.end(), bytecodes.begin(), [](uint32_t bc_size) -> std::vector<uint8_t> {
197 std::vector<uint8_t> bytecode(bc_size);
198 for (uint8_t i = 0; i < bc_size; i++) {
199 bytecode[i] = i * i; // Arbitrary bytecode that we will not inspect below
200 }
201
202 return bytecode;
203 });
204
205 builder.process_decomposition(
206 {
207 simulation::BytecodeDecompositionEvent{
208 .bytecode_id = 0,
209 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[0]),
210 },
211 simulation::BytecodeDecompositionEvent{
212 .bytecode_id = 1,
213 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[1]),
214 },
215 simulation::BytecodeDecompositionEvent{
216 .bytecode_id = 2,
217 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[2]),
218 },
219 simulation::BytecodeDecompositionEvent{
220 .bytecode_id = 3,
221 .bytecode = std::make_shared<std::vector<uint8_t>>(bytecodes[3]),
222 },
223 },
224 trace);
225 auto rows = trace.as_rows();
226
227 // One extra empty row is prepended.
228 ASSERT_EQ(rows.size(), 2 * DECOMPOSE_WINDOW_SIZE + 20 + 1);
229
230 size_t row_pos = 1;
231 for (uint32_t i = 0; i < 4; i++) {
232 for (uint32_t j = 0; j < bc_sizes[i]; j++) {
233 const auto bytes_rem = bc_sizes[i] - j;
234 EXPECT_THAT(
235 rows.at(row_pos),
236 AllOf(
237 ROW_FIELD_EQ(bc_decomposition_sel, 1),
238 ROW_FIELD_EQ(bc_decomposition_id, i),
239 ROW_FIELD_EQ(bc_decomposition_pc, j),
240 ROW_FIELD_EQ(bc_decomposition_bytes_remaining, bytes_rem),
241 ROW_FIELD_EQ(bc_decomposition_sel_windows_gt_remaining, DECOMPOSE_WINDOW_SIZE > bytes_rem ? 1 : 0),
243 bc_decomposition_windows_min_remaining_inv,
244 bytes_rem == DECOMPOSE_WINDOW_SIZE ? 0 : (FF(DECOMPOSE_WINDOW_SIZE) - FF(bytes_rem)).invert()),
245 ROW_FIELD_EQ(bc_decomposition_is_windows_eq_remaining, bytes_rem == DECOMPOSE_WINDOW_SIZE ? 1 : 0),
246 ROW_FIELD_EQ(bc_decomposition_bytes_to_read, std::min(DECOMPOSE_WINDOW_SIZE, bytes_rem)),
247 ROW_FIELD_EQ(bc_decomposition_last_of_contract, j == bc_sizes[i] - 1 ? 1 : 0)));
248 row_pos++;
249 }
250 }
251}
252
253TEST(BytecodeTraceGenTest, BasicHashing)
254{
255 TestTraceContainer trace;
256 BytecodeTraceBuilder builder;
257
258 builder.process_hashing(
259 {
260 simulation::BytecodeHashingEvent{
261 .bytecode_id = 0,
262 .bytecode_length = 6,
263 .bytecode_fields = { 10, 20 },
264 },
265 },
266 trace);
267 const auto rows = trace.as_rows();
268
269 // One extra empty row is prepended.
270 EXPECT_THAT(rows.at(1),
271 AllOf(ROW_FIELD_EQ(bc_hashing_sel, 1),
272 ROW_FIELD_EQ(bc_hashing_start, 1),
273 ROW_FIELD_EQ(bc_hashing_latch, 0),
274 ROW_FIELD_EQ(bc_hashing_bytecode_id, 0),
275 ROW_FIELD_EQ(bc_hashing_pc_index, 0),
276 ROW_FIELD_EQ(bc_hashing_packed_field, 10),
277 ROW_FIELD_EQ(bc_hashing_incremental_hash, 6)));
278
279 // Latched row (note we leave out the resulting hash in this test)
280 EXPECT_THAT(rows.at(2),
281 AllOf(ROW_FIELD_EQ(bc_hashing_sel, 1),
282 ROW_FIELD_EQ(bc_hashing_start, 0),
283 ROW_FIELD_EQ(bc_hashing_latch, 1),
284 ROW_FIELD_EQ(bc_hashing_bytecode_id, 0),
285 ROW_FIELD_EQ(bc_hashing_pc_index, 31),
286 ROW_FIELD_EQ(bc_hashing_packed_field, 20)));
287}
288
289std::vector<Instruction> gen_random_instructions(std::span<const WireOpCode> opcodes)
290{
291 std::vector<Instruction> instructions;
292 instructions.reserve(opcodes.size());
293 for (const auto& opcode : opcodes) {
294 instructions.emplace_back(testing::random_instruction(opcode));
295 }
296 return instructions;
297}
298
299std::vector<uint8_t> create_bytecode(std::span<const Instruction> instructions)
300{
301 std::vector<uint8_t> bytecode;
302 for (const auto& instruction : instructions) {
303 auto serialized_instruction = instruction.serialize();
304 bytecode.insert(bytecode.end(),
305 std::make_move_iterator(serialized_instruction.begin()),
306 std::make_move_iterator(serialized_instruction.end()));
307 }
308 return bytecode;
309}
310
311std::vector<size_t> gen_pcs(std::span<const WireOpCode> opcodes)
312{
313 std::vector<size_t> pcs;
314 pcs.reserve(opcodes.size());
315 size_t pc = 0;
316 for (const auto& opcode : opcodes) {
317 pcs.emplace_back(pc);
318 pc += WIRE_INSTRUCTION_SPEC.at(opcode).size_in_bytes;
319 }
320 return pcs;
321}
322
323std::vector<InstructionFetchingEvent> create_instruction_fetching_events(
324 const std::vector<Instruction>& instructions,
325 const std::vector<size_t>& pcs,
326 const std::shared_ptr<std::vector<uint8_t>>& bytecode_ptr,
327 const BytecodeId bytecode_id)
328{
330 events.reserve(instructions.size());
331
332 for (size_t i = 0; i < instructions.size(); i++) {
333 events.emplace_back(InstructionFetchingEvent{
334 .bytecode_id = bytecode_id,
335 .pc = static_cast<uint32_t>(pcs.at(i)),
336 .instruction = instructions.at(i),
337 .bytecode = bytecode_ptr,
338 });
339 }
340 return events;
341}
342
343// We build a random InstructionFetchingEvent for each wire opcode.
344// We then verify that the bytes (bd0, bd1, ...) correspond to the serialized instruction.
345TEST(BytecodeTraceGenTest, InstrDecompositionInBytesEachOpcode)
346{
347 TestTraceContainer trace;
348 BytecodeTraceBuilder builder;
349
350 constexpr std::array<C, 37> bd_columns = {
351 C::instr_fetching_bd0, C::instr_fetching_bd1, C::instr_fetching_bd2, C::instr_fetching_bd3,
352 C::instr_fetching_bd4, C::instr_fetching_bd5, C::instr_fetching_bd6, C::instr_fetching_bd7,
353 C::instr_fetching_bd8, C::instr_fetching_bd9, C::instr_fetching_bd10, C::instr_fetching_bd11,
354 C::instr_fetching_bd12, C::instr_fetching_bd13, C::instr_fetching_bd14, C::instr_fetching_bd15,
355 C::instr_fetching_bd16, C::instr_fetching_bd17, C::instr_fetching_bd18, C::instr_fetching_bd19,
356 C::instr_fetching_bd20, C::instr_fetching_bd21, C::instr_fetching_bd22, C::instr_fetching_bd23,
357 C::instr_fetching_bd24, C::instr_fetching_bd25, C::instr_fetching_bd26, C::instr_fetching_bd27,
358 C::instr_fetching_bd28, C::instr_fetching_bd29, C::instr_fetching_bd30, C::instr_fetching_bd31,
359 C::instr_fetching_bd32, C::instr_fetching_bd33, C::instr_fetching_bd34, C::instr_fetching_bd35,
360 C::instr_fetching_bd36,
361 };
362
363 constexpr std::array<C, 7> operand_columns = {
364 C::instr_fetching_op1, C::instr_fetching_op2, C::instr_fetching_op3, C::instr_fetching_op4,
365 C::instr_fetching_op5, C::instr_fetching_op6, C::instr_fetching_op7,
366 };
367
368 constexpr BytecodeId bytecode_id = 1;
369 constexpr auto num_opcodes = static_cast<size_t>(WireOpCode::LAST_OPCODE_SENTINEL);
370
372 opcodes.reserve(num_opcodes);
373 for (size_t i = 0; i < num_opcodes; i++) {
374 opcodes.emplace_back(static_cast<WireOpCode>(i));
375 }
376
377 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
378 std::vector<size_t> pcs = gen_pcs(opcodes);
379 std::vector<uint8_t> bytecode = create_bytecode(instructions);
380
381 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
383 create_instruction_fetching_events(instructions, pcs, bytecode_ptr, bytecode_id);
384
385 builder.process_instruction_fetching(events, trace);
386
387 for (uint32_t i = 0; i < num_opcodes; i++) {
388 const auto instr = instructions.at(i);
389 const auto instr_encoded = instr.serialize();
390 const auto w_opcode = static_cast<WireOpCode>(i);
391
392 // Check size_in_bytes column
393 const auto expected_size_in_bytes = WIRE_INSTRUCTION_SPEC.at(w_opcode).size_in_bytes;
394 ASSERT_EQ(instr_encoded.size(), expected_size_in_bytes);
395 EXPECT_EQ(FF(expected_size_in_bytes), trace.get(C::instr_fetching_instr_size, i + 1));
396
397 // Inspect each byte
398 for (size_t j = 0; j < static_cast<size_t>(expected_size_in_bytes); j++) {
399 EXPECT_EQ(FF(instr_encoded.at(j)), trace.get(bd_columns.at(j), i + 1));
400 }
401
402 // Check exection opcode
403 EXPECT_EQ(FF(static_cast<uint8_t>(WIRE_INSTRUCTION_SPEC.at(w_opcode).exec_opcode)),
404 trace.get(C::instr_fetching_exec_opcode, i + 1));
405
406 // Check indirect
407 EXPECT_EQ(FF(instr.indirect), trace.get(C::instr_fetching_indirect, i + 1));
408
409 // Check PCs
410 EXPECT_EQ(FF(pcs.at(i)), trace.get(C::instr_fetching_pc, i + 1));
411
412 // Check operands
413 size_t operand_idx = 0;
414 for (const auto& operand : instr.operands) {
415 EXPECT_EQ(FF(operand), trace.get(operand_columns.at(operand_idx++), i + 1));
416 }
417 }
418}
419
420TEST(BytecodeTraceGenTest, InstrFetchingSingleBytecode)
421{
422 TestTraceContainer trace;
423 BytecodeTraceBuilder builder;
424
425 constexpr BytecodeId bytecode_id = 1;
426 constexpr size_t num_of_opcodes = 10;
427 constexpr std::array<WireOpCode, num_of_opcodes> opcodes = {
438 };
439
440 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
441 std::vector<size_t> pcs = gen_pcs(opcodes);
442 std::vector<uint8_t> bytecode = create_bytecode(instructions);
443
444 std::vector<InstructionFetchingEvent> events = create_instruction_fetching_events(
445 instructions, pcs, std::make_shared<std::vector<uint8_t>>(bytecode), bytecode_id);
446
447 builder.process_instruction_fetching(events, trace);
448
449 // One extra empty row is prepended.
450 const auto rows = trace.as_rows();
451 const auto bytecode_size = bytecode.size();
452 EXPECT_EQ(rows.size(), num_of_opcodes + 1);
453
454 for (size_t i = 0; i < num_of_opcodes; i++) {
455 const auto pc = pcs.at(i);
456 const auto instr_size = WIRE_INSTRUCTION_SPEC.at(opcodes.at(i)).size_in_bytes;
457 const auto has_tag = WIRE_INSTRUCTION_SPEC.at(opcodes.at(i)).tag_operand_idx.has_value();
458 const auto tag_is_op2 = has_tag ? WIRE_INSTRUCTION_SPEC.at(opcodes.at(i)).tag_operand_idx.value() == 2 : 0;
459 const auto bytes_remaining = bytecode_size - pc;
460 const auto bytes_to_read = std::min<size_t>(DECOMPOSE_WINDOW_SIZE, bytes_remaining);
461
462 EXPECT_LE(instr_size, bytes_to_read);
463 const auto instr_abs_diff = bytes_to_read - instr_size;
464
465 EXPECT_LT(pc, bytecode_size);
466 const auto pc_abs_diff = bytecode_size - pc - 1;
467
468 ASSERT_LE(bytecode_size, UINT16_MAX);
469
470 EXPECT_THAT(rows.at(i + 1),
471 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
472 ROW_FIELD_EQ(instr_fetching_pc, pc),
473 ROW_FIELD_EQ(instr_fetching_bd0, static_cast<uint8_t>(opcodes.at(i))),
474 ROW_FIELD_EQ(instr_fetching_bytecode_id, bytecode_id),
475 ROW_FIELD_EQ(instr_fetching_bytes_to_read, bytes_to_read),
476 ROW_FIELD_EQ(instr_fetching_bytecode_size, bytecode_size),
477 ROW_FIELD_EQ(instr_fetching_instr_size, instr_size),
478 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, instr_abs_diff),
479 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, pc_abs_diff),
480 ROW_FIELD_EQ(instr_fetching_pc_out_of_range, 0),
481 ROW_FIELD_EQ(instr_fetching_opcode_out_of_range, 0),
482 ROW_FIELD_EQ(instr_fetching_instr_out_of_range, 0),
483 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 0),
484 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 0),
485 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
486 ROW_FIELD_EQ(instr_fetching_sel_has_tag, has_tag),
487 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, tag_is_op2),
488 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1)));
489 }
490}
491
492// Test involving 3 different bytecode_id's for each 2 opcodes (same bytecode).
493TEST(BytecodeTraceGenTest, InstrFetchingMultipleBytecodes)
494{
495 TestTraceContainer trace;
496 BytecodeTraceBuilder builder;
497
498 constexpr size_t num_of_opcodes = 2;
499 constexpr std::array<WireOpCode, num_of_opcodes> opcodes = {
502 };
503
504 std::vector<Instruction> instructions = gen_random_instructions(opcodes);
505 std::vector<size_t> pcs = gen_pcs(opcodes);
506 std::vector<uint8_t> bytecode = create_bytecode(instructions);
507
509 for (size_t i = 0; i < 3; i++) {
510 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
511 auto new_events =
512 create_instruction_fetching_events(instructions, pcs, bytecode_ptr, static_cast<BytecodeId>(i + 1));
513 events.insert(events.end(), new_events.begin(), new_events.end());
514 }
515
516 builder.process_instruction_fetching(events, trace);
517
518 // One extra empty row is prepended.
519 const auto rows = trace.as_rows();
520 EXPECT_EQ(rows.size(), 6 + 1);
521
522 for (size_t i = 0; i < 3; i++) {
523 EXPECT_THAT(rows.at(2 * i + 1), ROW_FIELD_EQ(instr_fetching_pc, 0));
524 }
525}
526
527// Test which processes three single instruction events, each of one with a different parsing error.
528// The bytecode can be filled with trivial bytes of size 20 with all bytes being increasing from 0 to 19.
529// First byte at index 0 is set to LAST_OPCODE_SENTINEL + 1.
530// Then consider for the instruction events pc = 0, pc = 19, pc = 38.
531// pc == 0 will correspond to the error OPCODE_OUT_OF_RANGE
532// pc == 19 will have INSTRUCTION_OUT_OF_RANGE
533// pc == 38 will have PC_OUT_OF_RANGE
534// Check for each row that column instr_fetching_parsing_err in addition to the column of the respective error.
535// It is not an issue that the instruction is generated at random in the event and is not consistent with the
536// bytecode for this test case.
537TEST(BytecodeTraceGenTest, InstrFetchingParsingErrors)
538{
539 TestTraceContainer trace;
540 BytecodeTraceBuilder builder;
541
542 constexpr BytecodeId bytecode_id = 1;
543 constexpr size_t bytecode_size = 20;
544 std::vector<uint8_t> bytecode(bytecode_size);
545 for (size_t i = 0; i < bytecode_size; i++) {
546 bytecode[i] = static_cast<uint8_t>(i);
547 }
548 bytecode[0] = static_cast<uint8_t>(WireOpCode::LAST_OPCODE_SENTINEL) + 1;
549
551 auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
552 events.emplace_back(InstructionFetchingEvent{
553 .bytecode_id = bytecode_id,
554 .pc = 0,
555 .bytecode = bytecode_ptr,
557 });
558 events.emplace_back(InstructionFetchingEvent{
559 .bytecode_id = bytecode_id,
560 .pc = 19,
561 .bytecode = bytecode_ptr,
563 });
564 events.emplace_back(InstructionFetchingEvent{
565 .bytecode_id = bytecode_id,
566 .pc = 38,
567 .bytecode = bytecode_ptr,
569 });
570
571 builder.process_instruction_fetching(events, trace);
572
573 // One extra empty row is prepended.
574 const auto rows = trace.as_rows();
575 ASSERT_EQ(rows.size(), 3 + 1);
576
577 EXPECT_THAT(rows.at(1),
578 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
579 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
580 ROW_FIELD_EQ(instr_fetching_pc, 0),
581 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 20),
582 ROW_FIELD_EQ(instr_fetching_instr_size, 0),
583 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
584 20), // instr_size <= bytes_to_read: bytes_to_read - instr_size
585 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
586 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 19), // bytecode_size - pc - 1 if bytecode_size > pc
587 ROW_FIELD_EQ(instr_fetching_opcode_out_of_range, 1)));
588
589 EXPECT_THAT(rows.at(2),
590 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
591 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
592 ROW_FIELD_EQ(instr_fetching_pc, 19), // OR_16 opcode
593 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 1),
594 ROW_FIELD_EQ(instr_fetching_instr_size, 8), // OR_16 is 8 bytes long
595 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
596 6), // instr_size > bytes_to_read: instr_size - bytes_to_read - 1
597 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
598 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 0), // bytecode_size - pc - 1 if bytecode_size > pc
599 ROW_FIELD_EQ(instr_fetching_instr_out_of_range, 1)));
600
601 EXPECT_THAT(
602 rows.at(3),
603 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
604 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 0),
605 ROW_FIELD_EQ(instr_fetching_pc, 38),
606 ROW_FIELD_EQ(instr_fetching_bytes_to_read, 0),
607 ROW_FIELD_EQ(instr_fetching_instr_size, 0),
608 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, 0), // instr_size <= bytes_to_read: bytes_to_read - instr_size
609 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
610 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, 18), // pc - bytecode_size if bytecode_size <= pc
611 ROW_FIELD_EQ(instr_fetching_pc_out_of_range, 1)));
612}
613
614// Test on error tag out of range
615TEST(BytecodeTraceGenTest, InstrFetchingErrorTagOutOfRange)
616{
620 TestTraceContainer trace;
621 BytecodeTraceBuilder builder;
622
623 auto instr_cast = random_instruction(WireOpCode::CAST_16);
624 auto instr_set = random_instruction(WireOpCode::SET_64);
625 constexpr uint32_t cast_size = 7;
626 constexpr uint32_t set_64_size = 13;
627
628 instr_cast.operands.at(2) = Operand::from<uint8_t>(0x09); // tag operand mutation to 0x09 which is out of range
629 instr_set.operands.at(1) = Operand::from<uint8_t>(0x0A); // tag operand mutation to 0x0A which is out of range
630
631 auto bytecode = instr_cast.serialize();
632 ASSERT_EQ(bytecode.size(), cast_size);
633
634 auto instr_set_bytecode = instr_set.serialize();
635 ASSERT_EQ(instr_set_bytecode.size(), set_64_size);
636
637 bytecode.insert(bytecode.end(), instr_set_bytecode.begin(), instr_set_bytecode.end());
638
639 const auto bytecode_ptr = std::make_shared<std::vector<uint8_t>>(bytecode);
640
642 events.emplace_back(InstructionFetchingEvent{
643 .bytecode_id = 1,
644 .pc = 0,
645 .instruction = deserialize_instruction(bytecode, 0), // Reflect more the real code path than passing instr_cast.
646 .bytecode = bytecode_ptr,
648 });
649
650 events.emplace_back(InstructionFetchingEvent{
651 .bytecode_id = 1,
652 .pc = cast_size,
653 .instruction =
654 deserialize_instruction(bytecode, cast_size), // Reflect more the real code path than passing instr_set.
655 .bytecode = bytecode_ptr,
657 });
658
659 builder.process_instruction_fetching(events, trace);
660
661 // One extra empty row is prepended.
662 const auto rows = trace.as_rows();
663 ASSERT_EQ(rows.size(), 2 + 1);
664
665 EXPECT_THAT(rows.at(1),
666 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
667 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
668 ROW_FIELD_EQ(instr_fetching_sel_has_tag, 1),
669 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, 0),
670 ROW_FIELD_EQ(instr_fetching_tag_value, 9),
671 ROW_FIELD_EQ(instr_fetching_pc, 0),
672 ROW_FIELD_EQ(instr_fetching_bytes_to_read, cast_size + set_64_size),
673 ROW_FIELD_EQ(instr_fetching_instr_size, cast_size),
674 ROW_FIELD_EQ(instr_fetching_instr_abs_diff,
675 set_64_size), // instr_size <= bytes_to_read: bytes_to_read - instr_size
676 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
677 ROW_FIELD_EQ(instr_fetching_pc_abs_diff,
678 cast_size + set_64_size - 1), // bytecode_size - pc - 1 if bytecode_size > pc
679 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 1)));
680
681 EXPECT_THAT(
682 rows.at(2),
683 AllOf(ROW_FIELD_EQ(instr_fetching_sel, 1),
684 ROW_FIELD_EQ(instr_fetching_sel_pc_in_range, 1),
685 ROW_FIELD_EQ(instr_fetching_sel_has_tag, 1),
686 ROW_FIELD_EQ(instr_fetching_sel_tag_is_op2, 1),
687 ROW_FIELD_EQ(instr_fetching_tag_value, 10),
688 ROW_FIELD_EQ(instr_fetching_pc, cast_size),
689 ROW_FIELD_EQ(instr_fetching_bytes_to_read, set_64_size),
690 ROW_FIELD_EQ(instr_fetching_instr_size, set_64_size),
691 ROW_FIELD_EQ(instr_fetching_instr_abs_diff, 0), // instr_size <= bytes_to_read: bytes_to_read - instr_size
692 ROW_FIELD_EQ(instr_fetching_sel_parsing_err, 1),
693 ROW_FIELD_EQ(instr_fetching_pc_abs_diff, set_64_size - 1), // bytecode_size - pc - 1 if bytecode_size > pc
694 ROW_FIELD_EQ(instr_fetching_tag_out_of_range, 1)));
695}
696
697} // namespace
698} // namespace bb::avm2::tracegen
std::vector< AvmFullRowConstRef > as_rows() const
const FF & get(Column col, uint32_t row) const
AluTraceBuilder builder
Definition alu.test.cpp:123
TestTraceContainer trace
Instruction instruction
#define ROW_FIELD_EQ(field_name, expression)
Definition macros.hpp:15
TEST(EmitUnencryptedLogTest, Basic)
Instruction deserialize_instruction(std::span< const uint8_t > bytecode, size_t pos)
Parsing of an instruction in the supplied bytecode at byte position pos. This checks that the WireOpC...
Instruction random_instruction(WireOpCode w_opcode)
Definition fixtures.cpp:125
constexpr uint32_t DECOMPOSE_WINDOW_SIZE
const std::unordered_map< WireOpCode, WireInstructionSpec > WIRE_INSTRUCTION_SPEC
AvmFlavorSettings::FF FF
Definition field.hpp:10
typename Flavor::FF FF
STL namespace.
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13