mirror of
https://github.com/logos-storage/plonky2.git
synced 2026-01-08 16:53:07 +00:00
Remove len column in KeccakSpongeStark (#1334)
* Remove len column in KeccakSpongeStark * Apply comment Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com> --------- Co-authored-by: Robin Salen <30937548+Nashtare@users.noreply.github.com>
This commit is contained in:
parent
099994abe4
commit
19178072b4
@ -41,9 +41,6 @@ pub(crate) struct KeccakSpongeColumnsView<T: Copy> {
|
|||||||
/// The timestamp at which inputs should be read from memory.
|
/// The timestamp at which inputs should be read from memory.
|
||||||
pub timestamp: T,
|
pub timestamp: T,
|
||||||
|
|
||||||
/// The length of the original input, in bytes.
|
|
||||||
pub len: T,
|
|
||||||
|
|
||||||
/// The number of input bytes that have already been absorbed prior to this block.
|
/// The number of input bytes that have already been absorbed prior to this block.
|
||||||
pub already_absorbed_bytes: T,
|
pub already_absorbed_bytes: T,
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::iter::{once, repeat};
|
use std::iter::{self, once, repeat};
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::mem::size_of;
|
use std::mem::size_of;
|
||||||
|
|
||||||
@ -41,15 +41,23 @@ pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
|
|||||||
outputs.push(cur_col);
|
outputs.push(cur_col);
|
||||||
}
|
}
|
||||||
|
|
||||||
Column::singles([
|
// The length of the inputs is `already_absorbed_bytes + is_final_input_len`.
|
||||||
cols.context,
|
let len_col = Column::linear_combination(
|
||||||
cols.segment,
|
iter::once((cols.already_absorbed_bytes, F::ONE)).chain(
|
||||||
cols.virt,
|
cols.is_final_input_len
|
||||||
cols.len,
|
.iter()
|
||||||
cols.timestamp,
|
.enumerate()
|
||||||
])
|
.map(|(i, &elt)| (elt, F::from_canonical_usize(i))),
|
||||||
.chain(outputs)
|
),
|
||||||
.collect()
|
);
|
||||||
|
|
||||||
|
let mut res: Vec<Column<F>> =
|
||||||
|
Column::singles([cols.context, cols.segment, cols.virt]).collect();
|
||||||
|
res.push(len_col);
|
||||||
|
res.push(Column::single(cols.timestamp));
|
||||||
|
res.extend(outputs);
|
||||||
|
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates the vector of `Columns` corresponding to the inputs of the Keccak sponge.
|
/// Creates the vector of `Columns` corresponding to the inputs of the Keccak sponge.
|
||||||
@ -397,7 +405,6 @@ impl<F: RichField + Extendable<D>, const D: usize> KeccakSpongeStark<F, D> {
|
|||||||
row.segment = F::from_canonical_usize(op.base_address.segment);
|
row.segment = F::from_canonical_usize(op.base_address.segment);
|
||||||
row.virt = F::from_canonical_usize(op.base_address.virt);
|
row.virt = F::from_canonical_usize(op.base_address.virt);
|
||||||
row.timestamp = F::from_canonical_usize(op.timestamp);
|
row.timestamp = F::from_canonical_usize(op.timestamp);
|
||||||
row.len = F::from_canonical_usize(op.input.len());
|
|
||||||
row.already_absorbed_bytes = F::from_canonical_usize(already_absorbed_bytes);
|
row.already_absorbed_bytes = F::from_canonical_usize(already_absorbed_bytes);
|
||||||
|
|
||||||
row.original_rate_u32s = sponge_state[..KECCAK_RATE_U32S]
|
row.original_rate_u32s = sponge_state[..KECCAK_RATE_U32S]
|
||||||
@ -584,13 +591,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
|
|||||||
yield_constr.constraint_transition(
|
yield_constr.constraint_transition(
|
||||||
is_dummy * (next_values.is_full_input_block + next_is_final_block),
|
is_dummy * (next_values.is_full_input_block + next_is_final_block),
|
||||||
);
|
);
|
||||||
|
|
||||||
// If this is a final block, is_final_input_len implies `len - already_absorbed == i`.
|
|
||||||
let offset = local_values.len - already_absorbed_bytes;
|
|
||||||
for (i, &is_final_len) in local_values.is_final_input_len.iter().enumerate() {
|
|
||||||
let entry_match = offset - P::from(FE::from_canonical_usize(i));
|
|
||||||
yield_constr.constraint(is_final_len * entry_match);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval_ext_circuit(
|
fn eval_ext_circuit(
|
||||||
@ -728,16 +728,6 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakSpongeS
|
|||||||
builder.mul_extension(is_dummy, tmp)
|
builder.mul_extension(is_dummy, tmp)
|
||||||
};
|
};
|
||||||
yield_constr.constraint_transition(builder, constraint);
|
yield_constr.constraint_transition(builder, constraint);
|
||||||
|
|
||||||
// If this is a final block, is_final_input_len implies `len - already_absorbed == i`.
|
|
||||||
let offset = builder.sub_extension(local_values.len, already_absorbed_bytes);
|
|
||||||
for (i, &is_final_len) in local_values.is_final_input_len.iter().enumerate() {
|
|
||||||
let index = builder.constant_extension(F::from_canonical_usize(i).into());
|
|
||||||
let entry_match = builder.sub_extension(offset, index);
|
|
||||||
|
|
||||||
let constraint = builder.mul_extension(is_final_len, entry_match);
|
|
||||||
yield_constr.constraint(builder, constraint);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn constraint_degree(&self) -> usize {
|
fn constraint_degree(&self) -> usize {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user