Barretenberg
The ZK-SNARK library at the core of Aztec
Loading...
Searching...
No Matches
straus_scalar_slice.cpp
Go to the documentation of this file.
1// === AUDIT STATUS ===
2// internal: { status: not started, auditors: [], date: YYYY-MM-DD }
3// external_1: { status: not started, auditors: [], date: YYYY-MM-DD }
4// external_2: { status: not started, auditors: [], date: YYYY-MM-DD }
5// =====================
6
8#include "./cycle_scalar.hpp"
11
12namespace bb::stdlib {
13
27template <typename Builder>
29 const cycle_scalar<Builder>& scalar,
30 const size_t table_bits)
31 : _table_bits(table_bits)
32{
33 using FF = typename Builder::FF;
35
36 constexpr bool IS_ULTRA = Builder::CIRCUIT_TYPE == CircuitType::ULTRA;
37
38 // convert an input cycle_scalar object into a vector of slices, each containing `table_bits` bits.
39 // this also performs an implicit range check on the input slices
40 const auto slice_scalar = [&](const field_t& scalar, const size_t num_bits) {
41 // we record the scalar slices both as field_t circuit elements and u64 values
42 // (u64 values are used to index arrays and we don't want to repeatedly cast a stdlib value to a numeric
43 // primitive as this gets expensive when repeated enough times)
44 std::pair<std::vector<field_t>, std::vector<uint64_t>> result;
45 result.first.reserve(static_cast<size_t>(1ULL) << table_bits);
46 result.second.reserve(static_cast<size_t>(1ULL) << table_bits);
47
48 if (num_bits == 0) {
49 return result;
50 }
51 if (scalar.is_constant()) {
52 const size_t num_slices = (num_bits + table_bits - 1) / table_bits;
53 const uint64_t table_mask = (1ULL << table_bits) - 1ULL;
54 uint256_t raw_value = scalar.get_value();
55 for (size_t i = 0; i < num_slices; ++i) {
56 uint64_t slice_v = static_cast<uint64_t>(raw_value.data[0]) & table_mask;
57 result.first.push_back(field_t(slice_v));
58 result.second.push_back(slice_v);
59 raw_value = raw_value >> table_bits;
60 }
61
62 return result;
63 }
64 uint256_t raw_value = scalar.get_value();
65 const uint64_t table_mask = (1ULL << table_bits) - 1ULL;
66 const size_t num_slices = (num_bits + table_bits - 1) / table_bits;
67 for (size_t i = 0; i < num_slices; ++i) {
68 uint64_t slice_v = static_cast<uint64_t>(raw_value.data[0]) & table_mask;
69 result.second.push_back(slice_v);
70 raw_value = raw_value >> table_bits;
71 }
72
73 if constexpr (IS_ULTRA) {
74 const auto slice_indices =
75 context->decompose_into_default_range(scalar.get_normalized_witness_index(),
76 num_bits,
77 table_bits,
78 "straus_scalar_slice decompose_into_default_range");
79 for (auto& idx : slice_indices) {
80 result.first.emplace_back(field_t::from_witness_index(context, idx));
81 }
82 } else {
83 for (size_t i = 0; i < num_slices; ++i) {
84 uint64_t slice_v = result.second[i];
85 field_t slice(witness_t(context, slice_v));
86
87 context->create_range_constraint(
88 slice.get_witness_index(), table_bits, "straus_scalar_slice create_range_constraint");
89
90 result.first.push_back(slice);
91 }
92 std::vector<field_t> linear_elements;
93 FF scaling_factor = 1;
94 for (size_t i = 0; i < num_slices; ++i) {
95 linear_elements.emplace_back(result.first[i] * scaling_factor);
96 scaling_factor += scaling_factor;
97 }
98 field_t::accumulate(linear_elements).assert_equal(scalar);
99 }
100 return result;
101 };
102
103 const size_t lo_bits =
105 const size_t hi_bits =
107 auto hi_slices = slice_scalar(scalar.hi, hi_bits);
108 auto lo_slices = slice_scalar(scalar.lo, lo_bits);
109
110 std::copy(lo_slices.first.begin(), lo_slices.first.end(), std::back_inserter(slices));
111 std::copy(hi_slices.first.begin(), hi_slices.first.end(), std::back_inserter(slices));
112 std::copy(lo_slices.second.begin(), lo_slices.second.end(), std::back_inserter(slices_native));
113 std::copy(hi_slices.second.begin(), hi_slices.second.end(), std::back_inserter(slices_native));
114 const auto tag = scalar.get_origin_tag();
115 for (auto& element : slices) {
116 // All slices need to have the same origin tag
117 element.set_origin_tag(tag);
118 }
119}
120
131{
132 if (index >= slices.size()) {
133 return std::nullopt;
134 }
135 return slices[index];
136}
137
140
141} // namespace bb::stdlib
cycle_scalar represents a member of the cycle curve SCALAR FIELD. This is NOT the native circuit fiel...
ScalarField get_value() const
OriginTag get_origin_tag() const
Get the origin tag of the cycle_scalar (a merge of the lo and hi tags)
void assert_equal(const field_t &rhs, std::string const &msg="field_t::assert_equal") const
Copy constraint: constrain that *this field is equal to rhs element.
Definition field.cpp:929
static field_t from_witness_index(Builder *ctx, uint32_t witness_index)
Definition field.cpp:59
static field_t accumulate(const std::vector< field_t > &input)
Efficiently compute the sum of vector entries. Using big_add_gate we reduce the number of gates neede...
Definition field.cpp:1147
straus_scalar_slice decomposes an input scalar into table_bits bit-slices. Used in batch_mul,...
stdlib::field_t< Builder > field_t
std::optional< field_t > read(size_t index)
Return a bit-slice associated with round index.
std::vector< uint64_t > slices_native
straus_scalar_slice(Builder *context, const cycle_scalar< Builder > &scalars, size_t table_bits)
Construct a new straus scalar slice::straus scalar slice object.
StrictMock< MockContext > context
std::conditional_t< IsGoblinBigGroup< C, Fq, Fr, G >, element_goblin::goblin_element< C, goblin_field< C >, Fr, G >, element_default::element< C, Fq, Fr, G > > element
element wraps either element_default::element or element_goblin::goblin_element depending on parametr...
typename Flavor::FF FF
C slice(C const &container, size_t start)
Definition container.hpp:9
constexpr decltype(auto) get(::tuplet::tuple< T... > &&t) noexcept
Definition tuple.hpp:13