mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-10 17:53:06 +00:00
Constrain first offset of a segment (#1397)
* Constrain first offset of a segment * Apply comment, revert debugging code * Modify specs * Apply comments
This commit is contained in:
parent
a291d92c01
commit
7eff4e2751
@ -44,7 +44,7 @@ We then impose the following transition constraints:
|
|||||||
\item $(1 - e_i) (a_{i + 1} - a_i) = 0$,
|
\item $(1 - e_i) (a_{i + 1} - a_i) = 0$,
|
||||||
\item $c_i < 2^{32}$.
|
\item $c_i < 2^{32}$.
|
||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
The last constraint emulates a comparison between two addresses or timestamps by bounding their difference; this assumes that all addresses and timestamps fit in 32 bits and that the field is larger than that.
|
The third constraint emulates a comparison between two addresses or timestamps by bounding their difference; this assumes that all addresses and timestamps fit in 32 bits and that the field is larger than that.
|
||||||
|
|
||||||
\subsubsection{Virtual memory}
|
\subsubsection{Virtual memory}
|
||||||
|
|
||||||
@ -56,6 +56,9 @@ In the EVM, each contract call has its own address space. Within that address sp
|
|||||||
\end{enumerate}
|
\end{enumerate}
|
||||||
The comparisons now involve several columns, which requires some minor adaptations to the technique described above; we will leave these as an exercise to the reader.
|
The comparisons now involve several columns, which requires some minor adaptations to the technique described above; we will leave these as an exercise to the reader.
|
||||||
|
|
||||||
|
Note that an additional constraint check is required: whenever we change the context or the segment, the virtual address must be range-checked to $2^{32}$.
|
||||||
|
Without this check, addresses could start at -1 (i.e. $p - 2$) and then increase properly.
|
||||||
|
|
||||||
\subsubsection{Timestamps}
|
\subsubsection{Timestamps}
|
||||||
|
|
||||||
Memory operations are sorted by address $a$ and timestamp $\tau$. For a memory operation in the CPU, we have:
|
Memory operations are sorted by address $a$ and timestamp $\tau$. For a memory operation in the CPU, we have:
|
||||||
|
|||||||
Binary file not shown.
@ -159,8 +159,16 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
|||||||
trace_col_vecs[COUNTER] = (0..height).map(|i| F::from_canonical_usize(i)).collect();
|
trace_col_vecs[COUNTER] = (0..height).map(|i| F::from_canonical_usize(i)).collect();
|
||||||
|
|
||||||
for i in 0..height {
|
for i in 0..height {
|
||||||
let x = trace_col_vecs[RANGE_CHECK][i].to_canonical_u64() as usize;
|
let x_rc = trace_col_vecs[RANGE_CHECK][i].to_canonical_u64() as usize;
|
||||||
trace_col_vecs[FREQUENCIES][x] += F::ONE;
|
trace_col_vecs[FREQUENCIES][x_rc] += F::ONE;
|
||||||
|
if (trace_col_vecs[CONTEXT_FIRST_CHANGE][i] == F::ONE)
|
||||||
|
|| (trace_col_vecs[SEGMENT_FIRST_CHANGE][i] == F::ONE)
|
||||||
|
{
|
||||||
|
// CONTEXT_FIRST_CHANGE and SEGMENT_FIRST_CHANGE should be 0 at the last row, so the index
|
||||||
|
// should never be out of bounds.
|
||||||
|
let x_fo = trace_col_vecs[ADDR_VIRTUAL][i + 1].to_canonical_u64() as usize;
|
||||||
|
trace_col_vecs[FREQUENCIES][x_fo] += F::ONE;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,7 +184,7 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
|||||||
/// reads to the same address, say at timestamps 50 and 80.
|
/// reads to the same address, say at timestamps 50 and 80.
|
||||||
fn fill_gaps(memory_ops: &mut Vec<MemoryOp>) {
|
fn fill_gaps(memory_ops: &mut Vec<MemoryOp>) {
|
||||||
let max_rc = memory_ops.len().next_power_of_two() - 1;
|
let max_rc = memory_ops.len().next_power_of_two() - 1;
|
||||||
for (mut curr, next) in memory_ops.clone().into_iter().tuple_windows() {
|
for (mut curr, mut next) in memory_ops.clone().into_iter().tuple_windows() {
|
||||||
if curr.address.context != next.address.context
|
if curr.address.context != next.address.context
|
||||||
|| curr.address.segment != next.address.segment
|
|| curr.address.segment != next.address.segment
|
||||||
{
|
{
|
||||||
@ -186,6 +194,15 @@ impl<F: RichField + Extendable<D>, const D: usize> MemoryStark<F, D> {
|
|||||||
// Similarly, the number of possible segments is a small constant, so any gap must
|
// Similarly, the number of possible segments is a small constant, so any gap must
|
||||||
// be small. max_rc will always be much larger, as just bootloading the kernel will
|
// be small. max_rc will always be much larger, as just bootloading the kernel will
|
||||||
// trigger thousands of memory operations.
|
// trigger thousands of memory operations.
|
||||||
|
// However, we do check that the first address accessed is range-checkable. If not,
|
||||||
|
// we could start at a negative address and cheat.
|
||||||
|
while next.address.virt > max_rc {
|
||||||
|
let mut dummy_address = next.address;
|
||||||
|
dummy_address.virt -= max_rc;
|
||||||
|
let dummy_read = MemoryOp::new_dummy_read(dummy_address, 0, next.value);
|
||||||
|
memory_ops.push(dummy_read);
|
||||||
|
next = dummy_read;
|
||||||
|
}
|
||||||
} else if curr.address.virt != next.address.virt {
|
} else if curr.address.virt != next.address.virt {
|
||||||
while next.address.virt - curr.address.virt - 1 > max_rc {
|
while next.address.virt - curr.address.virt - 1 > max_rc {
|
||||||
let mut dummy_address = curr.address;
|
let mut dummy_address = curr.address;
|
||||||
@ -530,10 +547,19 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
|
|||||||
|
|
||||||
fn lookups(&self) -> Vec<Lookup<F>> {
|
fn lookups(&self) -> Vec<Lookup<F>> {
|
||||||
vec![Lookup {
|
vec![Lookup {
|
||||||
columns: vec![Column::single(RANGE_CHECK)],
|
columns: vec![
|
||||||
|
Column::single(RANGE_CHECK),
|
||||||
|
Column::single_next_row(ADDR_VIRTUAL),
|
||||||
|
],
|
||||||
table_column: Column::single(COUNTER),
|
table_column: Column::single(COUNTER),
|
||||||
frequencies_column: Column::single(FREQUENCIES),
|
frequencies_column: Column::single(FREQUENCIES),
|
||||||
filter_columns: vec![None],
|
filter_columns: vec![
|
||||||
|
None,
|
||||||
|
Some(Filter::new_simple(Column::sum([
|
||||||
|
CONTEXT_FIRST_CHANGE,
|
||||||
|
SEGMENT_FIRST_CHANGE,
|
||||||
|
]))),
|
||||||
|
],
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user