12#include <gmock/gmock.h>
13#include <gtest/gtest.h>
19using ::testing::NiceMock;
20using ::testing::Return;
21using ::testing::ReturnRef;
23class TxExecutionTest :
public ::testing::Test {
25 TxExecutionTest() =
default;
37 written_public_data_slots_tree_check,
43TEST_F(TxExecutionTest, simulateTx)
47 .hash =
"0x1234567890abcdef",
48 .nonRevertibleAccumulatedData =
54 .revertibleAccumulatedData =
65 AppendOnlyTreeSnapshot dummy_snapshot = {
67 .nextAvailableLeafIndex = 0,
69 TreeStates tree_state = {
70 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
71 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
72 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
73 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
75 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
76 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault(Return(
true));
80 ON_CALL(*setup_context, halted()).WillByDefault(Return(
true));
83 ON_CALL(*app_logic_context, halted()).WillByDefault(Return(
true));
86 ON_CALL(*teardown_context, halted()).WillByDefault(Return(
true));
89 ExecutionResult successful_result = {
92 .gas_used = Gas{ 100, 100 },
93 .side_effect_states = SideEffectStates{},
96 ON_CALL(execution,
execute(_)).WillByDefault(Return(successful_result));
99 .WillOnce(Return(
std::move(setup_context)))
100 .WillOnce(Return(
std::move(app_logic_context)))
101 .WillOnce(Return(
std::move(teardown_context)));
102 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(1);
104 EXPECT_CALL(
merkle_db, pad_trees()).Times(1);
109 bool has_startup_event =
false;
110 auto expected_private_append_tree_events =
111 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
112 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
113 auto actual_private_append_tree_events = 0;
115 auto expected_l2_l1_msg_events =
116 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
117 auto actual_l2_l1_msg_events = 0;
119 auto expected_public_call_events = 3;
120 auto actual_public_call_events = 0;
122 bool has_collect_fee_event =
false;
126 for (
const auto& tx_event : events) {
128 has_startup_event =
true;
133 actual_private_append_tree_events++;
136 actual_l2_l1_msg_events++;
139 actual_public_call_events++;
142 has_collect_fee_event =
true;
146 EXPECT_TRUE(has_startup_event);
147 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
148 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
149 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
150 EXPECT_TRUE(has_collect_fee_event);
153TEST_F(TxExecutionTest, NoteHashLimitReached)
157 .hash =
"0x1234567890abcdef",
158 .nonRevertibleAccumulatedData =
163 .revertibleAccumulatedData =
170 AppendOnlyTreeSnapshot dummy_snapshot = {
172 .nextAvailableLeafIndex = 0,
174 TreeStates tree_state = {
175 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
176 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
177 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
178 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
180 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
181 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault([&](
const auto& ) {
182 tree_state.nullifierTree.counter++;
185 ON_CALL(
merkle_db, siloed_note_hash_write(_)).WillByDefault([&](
const auto& ) {
186 tree_state.noteHashTree.counter++;
189 ON_CALL(
merkle_db, unique_note_hash_write(_)).WillByDefault([&](
const auto& ) {
190 tree_state.noteHashTree.counter++;
194 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(2);
199 bool has_startup_event =
false;
200 auto expected_private_append_tree_events =
201 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
202 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
203 auto actual_private_append_tree_events = 0;
205 auto expected_l2_l1_msg_events =
206 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
207 auto actual_l2_l1_msg_events = 0;
209 auto expected_public_call_events = 0;
210 auto actual_public_call_events = 0;
213 bool has_collect_fee_event =
false;
217 for (
const auto& tx_event : events) {
219 has_startup_event =
true;
223 if (phase_event.reverted) {
226 auto event = phase_event.event;
228 actual_private_append_tree_events++;
231 actual_l2_l1_msg_events++;
234 actual_public_call_events++;
237 has_collect_fee_event =
true;
241 EXPECT_TRUE(has_startup_event);
242 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
243 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
244 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
245 EXPECT_TRUE(has_collect_fee_event);
246 EXPECT_EQ(reverts, 1);
249TEST_F(TxExecutionTest, NullifierLimitReached)
253 .hash =
"0x1234567890abcdef",
254 .nonRevertibleAccumulatedData =
258 .revertibleAccumulatedData =
265 AppendOnlyTreeSnapshot dummy_snapshot = {
267 .nextAvailableLeafIndex = 0,
269 TreeStates tree_state = {
270 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
271 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
272 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
273 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
275 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
276 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault([&](
const auto& ) {
277 tree_state.nullifierTree.counter++;
280 ON_CALL(
merkle_db, siloed_note_hash_write(_)).WillByDefault([&](
const auto& ) {
281 tree_state.noteHashTree.counter++;
284 ON_CALL(
merkle_db, unique_note_hash_write(_)).WillByDefault([&](
const auto& ) {
285 tree_state.noteHashTree.counter++;
289 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(2);
294 bool has_startup_event =
false;
295 auto expected_private_append_tree_events =
296 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
297 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
298 auto actual_private_append_tree_events = 0;
300 auto expected_l2_l1_msg_events =
301 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
302 auto actual_l2_l1_msg_events = 0;
304 auto expected_public_call_events = 0;
305 auto actual_public_call_events = 0;
308 bool has_collect_fee_event =
false;
312 for (
const auto& tx_event : events) {
314 has_startup_event =
true;
318 if (phase_event.reverted) {
321 auto event = phase_event.event;
323 actual_private_append_tree_events++;
326 actual_l2_l1_msg_events++;
329 actual_public_call_events++;
332 has_collect_fee_event =
true;
336 EXPECT_TRUE(has_startup_event);
337 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
338 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
339 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
340 EXPECT_TRUE(has_collect_fee_event);
341 EXPECT_EQ(reverts, 1);
344TEST_F(TxExecutionTest, L2ToL1MessageLimitReached)
348 .hash =
"0x1234567890abcdef",
349 .nonRevertibleAccumulatedData =
354 .revertibleAccumulatedData =
361 AppendOnlyTreeSnapshot dummy_snapshot = {
363 .nextAvailableLeafIndex = 0,
365 TreeStates tree_state = {
366 .noteHashTree = { .tree = dummy_snapshot, .counter = 0 },
367 .nullifierTree = { .tree = dummy_snapshot, .counter = 0 },
368 .l1ToL2MessageTree = { .tree = dummy_snapshot, .counter = 0 },
369 .publicDataTree = { .tree = dummy_snapshot, .counter = 0 },
371 ON_CALL(
merkle_db, get_tree_state()).WillByDefault([&]() {
return tree_state; });
372 ON_CALL(
merkle_db, siloed_nullifier_write(_)).WillByDefault([&](
const auto& ) {
373 tree_state.nullifierTree.counter++;
376 ON_CALL(
merkle_db, siloed_note_hash_write(_)).WillByDefault([&](
const auto& ) {
377 tree_state.noteHashTree.counter++;
380 ON_CALL(
merkle_db, unique_note_hash_write(_)).WillByDefault([&](
const auto& ) {
381 tree_state.noteHashTree.counter++;
385 EXPECT_CALL(
merkle_db, create_checkpoint()).Times(2);
390 bool has_startup_event =
false;
391 auto expected_private_append_tree_events =
392 tx.nonRevertibleAccumulatedData.noteHashes.size() + tx.nonRevertibleAccumulatedData.nullifiers.size() +
393 tx.revertibleAccumulatedData.noteHashes.size() + tx.revertibleAccumulatedData.nullifiers.size();
394 auto actual_private_append_tree_events = 0;
396 auto expected_l2_l1_msg_events =
397 tx.nonRevertibleAccumulatedData.l2ToL1Messages.size() + tx.revertibleAccumulatedData.l2ToL1Messages.size();
398 auto actual_l2_l1_msg_events = 0;
400 auto expected_public_call_events = 0;
401 auto actual_public_call_events = 0;
404 bool has_collect_fee_event =
false;
408 for (
const auto& tx_event : events) {
410 has_startup_event =
true;
414 if (phase_event.reverted) {
417 auto event = phase_event.event;
419 actual_private_append_tree_events++;
422 actual_l2_l1_msg_events++;
425 actual_public_call_events++;
428 has_collect_fee_event =
true;
432 EXPECT_TRUE(has_startup_event);
433 EXPECT_EQ(actual_private_append_tree_events, expected_private_append_tree_events);
434 EXPECT_EQ(expected_l2_l1_msg_events, actual_l2_l1_msg_events);
435 EXPECT_EQ(expected_public_call_events, actual_public_call_events);
436 EXPECT_TRUE(has_collect_fee_event);
437 EXPECT_EQ(reverts, 1);
#define MAX_L2_TO_L1_MSGS_PER_TX
#define MAX_NOTE_HASHES_PER_TX
StrictMock< MockHighLevelMerkleDB > merkle_db
void simulate(const Tx &tx)
std::vector< PublicCallRequestWithCalldata > random_enqueued_calls(size_t n)
std::vector< ScopedL2ToL1Message > random_l2_to_l1_messages(size_t n)
std::vector< FF > random_fields(size_t n)
CommandResponse execute(BBApiRequest &request, Command &&command)
Executes a command by visiting a variant of all possible commands.
TEST_F(IPATest, ChallengesAreZero)
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
simulation::PublicDataTreeReadWriteEvent event
NiceMock< MockFieldGreaterThan > field_gt
NiceMock< MockContextProvider > context_provider
NiceMock< MockExecution > execution
EventEmitter< TxEvent > tx_event_emitter
NiceMock< MockWrittenPublicDataSlotsTreeCheck > written_public_data_slots_tree_check