diff --git a/Cargo.toml b/Cargo.toml index 357a6278..f52dd5e7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,5 @@ [workspace] -members = ["ecdsa", "evm", "field", "insertion", "maybe_rayon", "plonky2", "starky", "u32", "util", "waksman"] +members = ["ecdsa", "evm", "field", "insertion", "maybe_rayon", "plonky2", "starky", "u32", "util"] [profile.release] opt-level = 3 diff --git a/README.md b/README.md index a72edd5f..e6961d5e 100644 --- a/README.md +++ b/README.md @@ -59,3 +59,4 @@ Plonky2's default hash function is Poseidon, configured with 8 full rounds, 22 p ## Links - [System Zero](https://github.com/mir-protocol/system-zero), a zkVM built on top of Starky (no longer maintained) +- [Waksman](https://github.com/mir-protocol/waksman), Plonky2 gadgets for permutation checking using Waksman networks (no longer maintained) diff --git a/waksman/Cargo.toml b/waksman/Cargo.toml deleted file mode 100644 index 98d76a52..00000000 --- a/waksman/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "plonky2_waksman" -description = "A circuit implementation AS-Waksman networks, useful for checking permutations and sorting" -version = "0.1.0" -edition = "2021" - -[dependencies] -anyhow = "1.0.40" -array_tool = "1.0.3" -bimap = "0.6.1" -itertools = "0.10.0" -"plonky2" = { version = "0.1.0" } -"plonky2_field" = { version = "0.1.0" } -"plonky2_util" = { version = "0.1.0" } -rand = "0.8.4" diff --git a/waksman/LICENSE-APACHE b/waksman/LICENSE-APACHE deleted file mode 100644 index 1e5006dc..00000000 --- a/waksman/LICENSE-APACHE +++ /dev/null @@ -1,202 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - diff --git a/waksman/LICENSE-MIT b/waksman/LICENSE-MIT deleted file mode 100644 index 86d690b2..00000000 --- a/waksman/LICENSE-MIT +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2022 The Plonky2 Authors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/waksman/README.md b/waksman/README.md deleted file mode 100644 index bb4e2d8a..00000000 --- a/waksman/README.md +++ /dev/null @@ -1,13 +0,0 @@ -## License - -Licensed under either of - -* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) -* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/waksman/src/bimap.rs b/waksman/src/bimap.rs deleted file mode 100644 index 28359d9f..00000000 --- a/waksman/src/bimap.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::collections::HashMap; -use std::hash::Hash; - -use bimap::BiMap; - -/// Given two lists which are permutations of one another, creates a BiMap which maps an index in -/// one list to an index in the other list with the same associated value. -/// -/// If the lists contain duplicates, then multiple permutations with this property exist, and an -/// arbitrary one of them will be returned. -pub fn bimap_from_lists(a: Vec, b: Vec) -> BiMap { - assert_eq!(a.len(), b.len(), "Vectors differ in length"); - - let mut b_values_to_indices = HashMap::new(); - for (i, value) in b.iter().enumerate() { - b_values_to_indices - .entry(value) - .or_insert_with(Vec::new) - .push(i); - } - - let mut bimap = BiMap::new(); - for (i, value) in a.iter().enumerate() { - if let Some(j) = b_values_to_indices.get_mut(&value).and_then(Vec::pop) { - bimap.insert(i, j); - } else { - panic!("Value in first list not found in second list"); - } - } - - bimap -} - -#[cfg(test)] -mod tests { - use crate::bimap::bimap_from_lists; - - #[test] - fn empty_lists() { - let empty: Vec = Vec::new(); - let bimap = bimap_from_lists(empty.clone(), empty); - assert!(bimap.is_empty()); - } - - #[test] - fn without_duplicates() { - let bimap = bimap_from_lists(vec!['a', 'b', 'c'], vec!['b', 'c', 'a']); - assert_eq!(bimap.get_by_left(&0), Some(&2)); - assert_eq!(bimap.get_by_left(&1), Some(&0)); - assert_eq!(bimap.get_by_left(&2), Some(&1)); - } - - #[test] - fn with_duplicates() { - let first = vec!['a', 'a', 'b']; - let second = vec!['a', 'b', 'a']; - let bimap = bimap_from_lists(first.clone(), second.clone()); - for i in 0..3 { - let j = *bimap.get_by_left(&i).unwrap(); - assert_eq!(first[i], second[j]); - } - } - - #[test] - #[should_panic] - fn lengths_differ() { - bimap_from_lists(vec!['a', 'a', 'b'], vec!['a', 'b']); - } - - #[test] - #[should_panic] - fn not_a_permutation() { - bimap_from_lists(vec!['a', 'a', 'b'], vec!['a', 'b', 'b']); - } -} diff --git a/waksman/src/gates/assert_le.rs b/waksman/src/gates/assert_le.rs deleted file mode 100644 index 0213dd38..00000000 --- a/waksman/src/gates/assert_le.rs +++ /dev/null @@ -1,629 +0,0 @@ -use std::marker::PhantomData; - -use plonky2::gates::gate::Gate; -use plonky2::gates::packed_util::PackedEvaluableBase; -use plonky2::gates::util::StridedConstraintConsumer; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::ext_target::ExtensionTarget; -use plonky2::iop::generator::{GeneratedValues, SimpleGenerator, WitnessGenerator}; -use plonky2::iop::target::Target; -use plonky2::iop::wire::Wire; -use plonky2::iop::witness::{PartitionWitness, Witness, WitnessWrite}; -use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::plonk_common::{reduce_with_powers, reduce_with_powers_ext_circuit}; -use plonky2::plonk::vars::{ - EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, - EvaluationVarsBasePacked, -}; -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::{Field, Field64}; -use plonky2_util::{bits_u64, ceil_div_usize}; - -// TODO: replace/merge this gate with `ComparisonGate`. - -/// A gate for checking that one value is less than or equal to another. -#[derive(Clone, Debug)] -pub struct AssertLessThanGate, const D: usize> { - pub(crate) num_bits: usize, - pub(crate) num_chunks: usize, - _phantom: PhantomData, -} - -impl, const D: usize> AssertLessThanGate { - pub fn new(num_bits: usize, num_chunks: usize) -> Self { - debug_assert!(num_bits < bits_u64(F::ORDER)); - Self { - num_bits, - num_chunks, - _phantom: PhantomData, - } - } - - pub fn chunk_bits(&self) -> usize { - ceil_div_usize(self.num_bits, self.num_chunks) - } - - pub fn wire_first_input(&self) -> usize { - 0 - } - - pub fn wire_second_input(&self) -> usize { - 1 - } - - pub fn wire_most_significant_diff(&self) -> usize { - 2 - } - - pub fn wire_first_chunk_val(&self, chunk: usize) -> usize { - debug_assert!(chunk < self.num_chunks); - 3 + chunk - } - - pub fn wire_second_chunk_val(&self, chunk: usize) -> usize { - debug_assert!(chunk < self.num_chunks); - 3 + self.num_chunks + chunk - } - - pub fn wire_equality_dummy(&self, chunk: usize) -> usize { - debug_assert!(chunk < self.num_chunks); - 3 + 2 * self.num_chunks + chunk - } - - pub fn wire_chunks_equal(&self, chunk: usize) -> usize { - debug_assert!(chunk < self.num_chunks); - 3 + 3 * self.num_chunks + chunk - } - - pub fn wire_intermediate_value(&self, chunk: usize) -> usize { - debug_assert!(chunk < self.num_chunks); - 3 + 4 * self.num_chunks + chunk - } -} - -impl, const D: usize> Gate for AssertLessThanGate { - fn id(&self) -> String { - format!("{self:?}") - } - - fn eval_unfiltered(&self, vars: EvaluationVars) -> Vec { - let mut constraints = Vec::with_capacity(self.num_constraints()); - - let first_input = vars.local_wires[self.wire_first_input()]; - let second_input = vars.local_wires[self.wire_second_input()]; - - // Get chunks and assert that they match - let first_chunks: Vec = (0..self.num_chunks) - .map(|i| vars.local_wires[self.wire_first_chunk_val(i)]) - .collect(); - let second_chunks: Vec = (0..self.num_chunks) - .map(|i| vars.local_wires[self.wire_second_chunk_val(i)]) - .collect(); - - let first_chunks_combined = reduce_with_powers( - &first_chunks, - F::Extension::from_canonical_usize(1 << self.chunk_bits()), - ); - let second_chunks_combined = reduce_with_powers( - &second_chunks, - F::Extension::from_canonical_usize(1 << self.chunk_bits()), - ); - - constraints.push(first_chunks_combined - first_input); - constraints.push(second_chunks_combined - second_input); - - let chunk_size = 1 << self.chunk_bits(); - - let mut most_significant_diff_so_far = F::Extension::ZERO; - - for i in 0..self.num_chunks { - // Range-check the chunks to be less than `chunk_size`. - let first_product = (0..chunk_size) - .map(|x| first_chunks[i] - F::Extension::from_canonical_usize(x)) - .product(); - let second_product = (0..chunk_size) - .map(|x| second_chunks[i] - F::Extension::from_canonical_usize(x)) - .product(); - constraints.push(first_product); - constraints.push(second_product); - - let difference = second_chunks[i] - first_chunks[i]; - let equality_dummy = vars.local_wires[self.wire_equality_dummy(i)]; - let chunks_equal = vars.local_wires[self.wire_chunks_equal(i)]; - - // Two constraints to assert that `chunks_equal` is valid. - constraints.push(difference * equality_dummy - (F::Extension::ONE - chunks_equal)); - constraints.push(chunks_equal * difference); - - // Update `most_significant_diff_so_far`. - let intermediate_value = vars.local_wires[self.wire_intermediate_value(i)]; - constraints.push(intermediate_value - chunks_equal * most_significant_diff_so_far); - most_significant_diff_so_far = - intermediate_value + (F::Extension::ONE - chunks_equal) * difference; - } - - let most_significant_diff = vars.local_wires[self.wire_most_significant_diff()]; - constraints.push(most_significant_diff - most_significant_diff_so_far); - - // Range check `most_significant_diff` to be less than `chunk_size`. - let product = (0..chunk_size) - .map(|x| most_significant_diff - F::Extension::from_canonical_usize(x)) - .product(); - constraints.push(product); - - constraints - } - - fn eval_unfiltered_base_one( - &self, - _vars: EvaluationVarsBase, - _yield_constr: StridedConstraintConsumer, - ) { - panic!("use eval_unfiltered_base_packed instead"); - } - - fn eval_unfiltered_base_batch(&self, vars_base: EvaluationVarsBaseBatch) -> Vec { - self.eval_unfiltered_base_batch_packed(vars_base) - } - - fn eval_unfiltered_circuit( - &self, - builder: &mut CircuitBuilder, - vars: EvaluationTargets, - ) -> Vec> { - let mut constraints = Vec::with_capacity(self.num_constraints()); - - let first_input = vars.local_wires[self.wire_first_input()]; - let second_input = vars.local_wires[self.wire_second_input()]; - - // Get chunks and assert that they match - let first_chunks: Vec> = (0..self.num_chunks) - .map(|i| vars.local_wires[self.wire_first_chunk_val(i)]) - .collect(); - let second_chunks: Vec> = (0..self.num_chunks) - .map(|i| vars.local_wires[self.wire_second_chunk_val(i)]) - .collect(); - - let chunk_base = builder.constant(F::from_canonical_usize(1 << self.chunk_bits())); - let first_chunks_combined = - reduce_with_powers_ext_circuit(builder, &first_chunks, chunk_base); - let second_chunks_combined = - reduce_with_powers_ext_circuit(builder, &second_chunks, chunk_base); - - constraints.push(builder.sub_extension(first_chunks_combined, first_input)); - constraints.push(builder.sub_extension(second_chunks_combined, second_input)); - - let chunk_size = 1 << self.chunk_bits(); - - let mut most_significant_diff_so_far = builder.zero_extension(); - - let one = builder.one_extension(); - // Find the chosen chunk. - for i in 0..self.num_chunks { - // Range-check the chunks to be less than `chunk_size`. - let mut first_product = one; - let mut second_product = one; - for x in 0..chunk_size { - let x_f = builder.constant_extension(F::Extension::from_canonical_usize(x)); - let first_diff = builder.sub_extension(first_chunks[i], x_f); - let second_diff = builder.sub_extension(second_chunks[i], x_f); - first_product = builder.mul_extension(first_product, first_diff); - second_product = builder.mul_extension(second_product, second_diff); - } - constraints.push(first_product); - constraints.push(second_product); - - let difference = builder.sub_extension(second_chunks[i], first_chunks[i]); - let equality_dummy = vars.local_wires[self.wire_equality_dummy(i)]; - let chunks_equal = vars.local_wires[self.wire_chunks_equal(i)]; - - // Two constraints to assert that `chunks_equal` is valid. - let diff_times_equal = builder.mul_extension(difference, equality_dummy); - let not_equal = builder.sub_extension(one, chunks_equal); - constraints.push(builder.sub_extension(diff_times_equal, not_equal)); - constraints.push(builder.mul_extension(chunks_equal, difference)); - - // Update `most_significant_diff_so_far`. - let intermediate_value = vars.local_wires[self.wire_intermediate_value(i)]; - let old_diff = builder.mul_extension(chunks_equal, most_significant_diff_so_far); - constraints.push(builder.sub_extension(intermediate_value, old_diff)); - - let not_equal = builder.sub_extension(one, chunks_equal); - let new_diff = builder.mul_extension(not_equal, difference); - most_significant_diff_so_far = builder.add_extension(intermediate_value, new_diff); - } - - let most_significant_diff = vars.local_wires[self.wire_most_significant_diff()]; - constraints - .push(builder.sub_extension(most_significant_diff, most_significant_diff_so_far)); - - // Range check `most_significant_diff` to be less than `chunk_size`. - let mut product = builder.one_extension(); - for x in 0..chunk_size { - let x_f = builder.constant_extension(F::Extension::from_canonical_usize(x)); - let diff = builder.sub_extension(most_significant_diff, x_f); - product = builder.mul_extension(product, diff); - } - constraints.push(product); - - constraints - } - - fn generators(&self, row: usize, _local_constants: &[F]) -> Vec>> { - let gen = AssertLessThanGenerator:: { - row, - gate: self.clone(), - }; - vec![Box::new(gen.adapter())] - } - - fn num_wires(&self) -> usize { - self.wire_intermediate_value(self.num_chunks - 1) + 1 - } - - fn num_constants(&self) -> usize { - 0 - } - - fn degree(&self) -> usize { - 1 << self.chunk_bits() - } - - fn num_constraints(&self) -> usize { - 4 + 5 * self.num_chunks - } -} - -impl, const D: usize> PackedEvaluableBase - for AssertLessThanGate -{ - fn eval_unfiltered_base_packed>( - &self, - vars: EvaluationVarsBasePacked

, - mut yield_constr: StridedConstraintConsumer

, - ) { - let first_input = vars.local_wires[self.wire_first_input()]; - let second_input = vars.local_wires[self.wire_second_input()]; - - // Get chunks and assert that they match - let first_chunks: Vec<_> = (0..self.num_chunks) - .map(|i| vars.local_wires[self.wire_first_chunk_val(i)]) - .collect(); - let second_chunks: Vec<_> = (0..self.num_chunks) - .map(|i| vars.local_wires[self.wire_second_chunk_val(i)]) - .collect(); - - let first_chunks_combined = reduce_with_powers( - &first_chunks, - F::from_canonical_usize(1 << self.chunk_bits()), - ); - let second_chunks_combined = reduce_with_powers( - &second_chunks, - F::from_canonical_usize(1 << self.chunk_bits()), - ); - - yield_constr.one(first_chunks_combined - first_input); - yield_constr.one(second_chunks_combined - second_input); - - let chunk_size = 1 << self.chunk_bits(); - - let mut most_significant_diff_so_far = P::ZEROS; - - for i in 0..self.num_chunks { - // Range-check the chunks to be less than `chunk_size`. - let first_product = (0..chunk_size) - .map(|x| first_chunks[i] - F::from_canonical_usize(x)) - .product(); - let second_product = (0..chunk_size) - .map(|x| second_chunks[i] - F::from_canonical_usize(x)) - .product(); - yield_constr.one(first_product); - yield_constr.one(second_product); - - let difference = second_chunks[i] - first_chunks[i]; - let equality_dummy = vars.local_wires[self.wire_equality_dummy(i)]; - let chunks_equal = vars.local_wires[self.wire_chunks_equal(i)]; - - // Two constraints to assert that `chunks_equal` is valid. - yield_constr.one(difference * equality_dummy - (P::ONES - chunks_equal)); - yield_constr.one(chunks_equal * difference); - - // Update `most_significant_diff_so_far`. - let intermediate_value = vars.local_wires[self.wire_intermediate_value(i)]; - yield_constr.one(intermediate_value - chunks_equal * most_significant_diff_so_far); - most_significant_diff_so_far = - intermediate_value + (P::ONES - chunks_equal) * difference; - } - - let most_significant_diff = vars.local_wires[self.wire_most_significant_diff()]; - yield_constr.one(most_significant_diff - most_significant_diff_so_far); - - // Range check `most_significant_diff` to be less than `chunk_size`. - let product = (0..chunk_size) - .map(|x| most_significant_diff - F::from_canonical_usize(x)) - .product(); - yield_constr.one(product); - } -} - -#[derive(Debug)] -struct AssertLessThanGenerator, const D: usize> { - row: usize, - gate: AssertLessThanGate, -} - -impl, const D: usize> SimpleGenerator - for AssertLessThanGenerator -{ - fn dependencies(&self) -> Vec { - let local_target = |column| Target::wire(self.row, column); - - vec![ - local_target(self.gate.wire_first_input()), - local_target(self.gate.wire_second_input()), - ] - } - - fn run_once(&self, witness: &PartitionWitness, out_buffer: &mut GeneratedValues) { - let local_wire = |column| Wire { - row: self.row, - column, - }; - - let get_local_wire = |column| witness.get_wire(local_wire(column)); - - let first_input = get_local_wire(self.gate.wire_first_input()); - let second_input = get_local_wire(self.gate.wire_second_input()); - - let first_input_u64 = first_input.to_canonical_u64(); - let second_input_u64 = second_input.to_canonical_u64(); - - debug_assert!(first_input_u64 < second_input_u64); - - let chunk_size = 1 << self.gate.chunk_bits(); - let first_input_chunks: Vec = (0..self.gate.num_chunks) - .scan(first_input_u64, |acc, _| { - let tmp = *acc % chunk_size; - *acc /= chunk_size; - Some(F::from_canonical_u64(tmp)) - }) - .collect(); - let second_input_chunks: Vec = (0..self.gate.num_chunks) - .scan(second_input_u64, |acc, _| { - let tmp = *acc % chunk_size; - *acc /= chunk_size; - Some(F::from_canonical_u64(tmp)) - }) - .collect(); - - let chunks_equal: Vec = (0..self.gate.num_chunks) - .map(|i| F::from_bool(first_input_chunks[i] == second_input_chunks[i])) - .collect(); - let equality_dummies: Vec = first_input_chunks - .iter() - .zip(second_input_chunks.iter()) - .map(|(&f, &s)| if f == s { F::ONE } else { F::ONE / (s - f) }) - .collect(); - - let mut most_significant_diff_so_far = F::ZERO; - let mut intermediate_values = Vec::new(); - for i in 0..self.gate.num_chunks { - if first_input_chunks[i] != second_input_chunks[i] { - most_significant_diff_so_far = second_input_chunks[i] - first_input_chunks[i]; - intermediate_values.push(F::ZERO); - } else { - intermediate_values.push(most_significant_diff_so_far); - } - } - let most_significant_diff = most_significant_diff_so_far; - - out_buffer.set_wire( - local_wire(self.gate.wire_most_significant_diff()), - most_significant_diff, - ); - for i in 0..self.gate.num_chunks { - out_buffer.set_wire( - local_wire(self.gate.wire_first_chunk_val(i)), - first_input_chunks[i], - ); - out_buffer.set_wire( - local_wire(self.gate.wire_second_chunk_val(i)), - second_input_chunks[i], - ); - out_buffer.set_wire( - local_wire(self.gate.wire_equality_dummy(i)), - equality_dummies[i], - ); - out_buffer.set_wire(local_wire(self.gate.wire_chunks_equal(i)), chunks_equal[i]); - out_buffer.set_wire( - local_wire(self.gate.wire_intermediate_value(i)), - intermediate_values[i], - ); - } - } -} - -#[cfg(test)] -mod tests { - use core::marker::PhantomData; - - use anyhow::Result; - use plonky2::gates::gate::Gate; - use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; - use plonky2::hash::hash_types::HashOut; - use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use plonky2_field::extension::quartic::QuarticExtension; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::{Field, PrimeField64, Sample}; - use rand::Rng; - - use crate::gates::assert_le::AssertLessThanGate; - - #[test] - fn wire_indices() { - type AG = AssertLessThanGate; - let num_bits = 40; - let num_chunks = 5; - - let gate = AG { - num_bits, - num_chunks, - _phantom: PhantomData, - }; - - assert_eq!(gate.wire_first_input(), 0); - assert_eq!(gate.wire_second_input(), 1); - assert_eq!(gate.wire_most_significant_diff(), 2); - assert_eq!(gate.wire_first_chunk_val(0), 3); - assert_eq!(gate.wire_first_chunk_val(4), 7); - assert_eq!(gate.wire_second_chunk_val(0), 8); - assert_eq!(gate.wire_second_chunk_val(4), 12); - assert_eq!(gate.wire_equality_dummy(0), 13); - assert_eq!(gate.wire_equality_dummy(4), 17); - assert_eq!(gate.wire_chunks_equal(0), 18); - assert_eq!(gate.wire_chunks_equal(4), 22); - assert_eq!(gate.wire_intermediate_value(0), 23); - assert_eq!(gate.wire_intermediate_value(4), 27); - } - - #[test] - fn low_degree() { - let num_bits = 20; - let num_chunks = 4; - - test_low_degree::(AssertLessThanGate::<_, 4>::new( - num_bits, num_chunks, - )) - } - - #[test] - fn eval_fns() -> Result<()> { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - - let num_bits = 20; - let num_chunks = 4; - - test_eval_fns::(AssertLessThanGate::<_, D>::new(num_bits, num_chunks)) - } - - #[test] - fn test_gate_constraint() { - type F = GoldilocksField; - type FF = QuarticExtension; - const D: usize = 4; - - let num_bits = 40; - let num_chunks = 5; - let chunk_bits = num_bits / num_chunks; - - // Returns the local wires for an AssertLessThanGate given the two inputs. - let get_wires = |first_input: F, second_input: F| -> Vec { - let mut v = Vec::new(); - - let first_input_u64 = first_input.to_canonical_u64(); - let second_input_u64 = second_input.to_canonical_u64(); - - let chunk_size = 1 << chunk_bits; - let mut first_input_chunks: Vec = (0..num_chunks) - .scan(first_input_u64, |acc, _| { - let tmp = *acc % chunk_size; - *acc /= chunk_size; - Some(F::from_canonical_u64(tmp)) - }) - .collect(); - let mut second_input_chunks: Vec = (0..num_chunks) - .scan(second_input_u64, |acc, _| { - let tmp = *acc % chunk_size; - *acc /= chunk_size; - Some(F::from_canonical_u64(tmp)) - }) - .collect(); - - let mut chunks_equal: Vec = (0..num_chunks) - .map(|i| F::from_bool(first_input_chunks[i] == second_input_chunks[i])) - .collect(); - let mut equality_dummies: Vec = first_input_chunks - .iter() - .zip(second_input_chunks.iter()) - .map(|(&f, &s)| if f == s { F::ONE } else { F::ONE / (s - f) }) - .collect(); - - let mut most_significant_diff_so_far = F::ZERO; - let mut intermediate_values = Vec::new(); - for i in 0..num_chunks { - if first_input_chunks[i] != second_input_chunks[i] { - most_significant_diff_so_far = second_input_chunks[i] - first_input_chunks[i]; - intermediate_values.push(F::ZERO); - } else { - intermediate_values.push(most_significant_diff_so_far); - } - } - let most_significant_diff = most_significant_diff_so_far; - - v.push(first_input); - v.push(second_input); - v.push(most_significant_diff); - v.append(&mut first_input_chunks); - v.append(&mut second_input_chunks); - v.append(&mut equality_dummies); - v.append(&mut chunks_equal); - v.append(&mut intermediate_values); - - v.iter().map(|&x| x.into()).collect() - }; - - let mut rng = rand::thread_rng(); - let max: u64 = 1 << (num_bits - 1); - let first_input_u64 = rng.gen_range(0..max); - let second_input_u64 = { - let mut val = rng.gen_range(0..max); - while val < first_input_u64 { - val = rng.gen_range(0..max); - } - val - }; - - let first_input = F::from_canonical_u64(first_input_u64); - let second_input = F::from_canonical_u64(second_input_u64); - - let less_than_gate = AssertLessThanGate:: { - num_bits, - num_chunks, - _phantom: PhantomData, - }; - let less_than_vars = EvaluationVars { - local_constants: &[], - local_wires: &get_wires(first_input, second_input), - public_inputs_hash: &HashOut::rand(), - }; - assert!( - less_than_gate - .eval_unfiltered(less_than_vars) - .iter() - .all(|x| x.is_zero()), - "Gate constraints are not satisfied." - ); - - let equal_gate = AssertLessThanGate:: { - num_bits, - num_chunks, - _phantom: PhantomData, - }; - let equal_vars = EvaluationVars { - local_constants: &[], - local_wires: &get_wires(first_input, first_input), - public_inputs_hash: &HashOut::rand(), - }; - assert!( - equal_gate - .eval_unfiltered(equal_vars) - .iter() - .all(|x| x.is_zero()), - "Gate constraints are not satisfied." - ); - } -} diff --git a/waksman/src/gates/mod.rs b/waksman/src/gates/mod.rs deleted file mode 100644 index c73890b1..00000000 --- a/waksman/src/gates/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod assert_le; -pub mod switch; diff --git a/waksman/src/gates/switch.rs b/waksman/src/gates/switch.rs deleted file mode 100644 index b868916e..00000000 --- a/waksman/src/gates/switch.rs +++ /dev/null @@ -1,454 +0,0 @@ -use std::marker::PhantomData; - -use array_tool::vec::Union; -use plonky2::gates::gate::Gate; -use plonky2::gates::packed_util::PackedEvaluableBase; -use plonky2::gates::util::StridedConstraintConsumer; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::ext_target::ExtensionTarget; -use plonky2::iop::generator::{GeneratedValues, WitnessGenerator}; -use plonky2::iop::target::Target; -use plonky2::iop::wire::Wire; -use plonky2::iop::witness::{PartitionWitness, Witness, WitnessWrite}; -use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2::plonk::circuit_data::CircuitConfig; -use plonky2::plonk::vars::{ - EvaluationTargets, EvaluationVars, EvaluationVarsBase, EvaluationVarsBaseBatch, - EvaluationVarsBasePacked, -}; -use plonky2_field::extension::Extendable; -use plonky2_field::packed::PackedField; -use plonky2_field::types::Field; - -/// A gate for conditionally swapping input values based on a boolean. -#[derive(Copy, Clone, Debug)] -pub struct SwitchGate, const D: usize> { - pub(crate) chunk_size: usize, - pub(crate) num_copies: usize, - _phantom: PhantomData, -} - -impl, const D: usize> SwitchGate { - pub fn new(num_copies: usize, chunk_size: usize) -> Self { - Self { - chunk_size, - num_copies, - _phantom: PhantomData, - } - } - - pub fn new_from_config(config: &CircuitConfig, chunk_size: usize) -> Self { - let num_copies = Self::max_num_copies(config.num_routed_wires, chunk_size); - Self::new(num_copies, chunk_size) - } - - pub fn max_num_copies(num_routed_wires: usize, chunk_size: usize) -> usize { - num_routed_wires / (4 * chunk_size + 1) - } - - pub fn wire_first_input(&self, copy: usize, element: usize) -> usize { - debug_assert!(element < self.chunk_size); - copy * (4 * self.chunk_size + 1) + element - } - - pub fn wire_second_input(&self, copy: usize, element: usize) -> usize { - debug_assert!(element < self.chunk_size); - copy * (4 * self.chunk_size + 1) + self.chunk_size + element - } - - pub fn wire_first_output(&self, copy: usize, element: usize) -> usize { - debug_assert!(element < self.chunk_size); - copy * (4 * self.chunk_size + 1) + 2 * self.chunk_size + element - } - - pub fn wire_second_output(&self, copy: usize, element: usize) -> usize { - debug_assert!(element < self.chunk_size); - copy * (4 * self.chunk_size + 1) + 3 * self.chunk_size + element - } - - pub fn wire_switch_bool(&self, copy: usize) -> usize { - debug_assert!(copy < self.num_copies); - copy * (4 * self.chunk_size + 1) + 4 * self.chunk_size - } -} - -impl, const D: usize> Gate for SwitchGate { - fn id(&self) -> String { - format!("{self:?}") - } - - fn eval_unfiltered(&self, vars: EvaluationVars) -> Vec { - let mut constraints = Vec::with_capacity(self.num_constraints()); - - for c in 0..self.num_copies { - let switch_bool = vars.local_wires[self.wire_switch_bool(c)]; - let not_switch = F::Extension::ONE - switch_bool; - - for e in 0..self.chunk_size { - let first_input = vars.local_wires[self.wire_first_input(c, e)]; - let second_input = vars.local_wires[self.wire_second_input(c, e)]; - let first_output = vars.local_wires[self.wire_first_output(c, e)]; - let second_output = vars.local_wires[self.wire_second_output(c, e)]; - - constraints.push(switch_bool * (first_input - second_output)); - constraints.push(switch_bool * (second_input - first_output)); - constraints.push(not_switch * (first_input - first_output)); - constraints.push(not_switch * (second_input - second_output)); - } - } - - constraints - } - - fn eval_unfiltered_base_one( - &self, - _vars: EvaluationVarsBase, - _yield_constr: StridedConstraintConsumer, - ) { - panic!("use eval_unfiltered_base_packed instead"); - } - - fn eval_unfiltered_base_batch(&self, vars_base: EvaluationVarsBaseBatch) -> Vec { - self.eval_unfiltered_base_batch_packed(vars_base) - } - - fn eval_unfiltered_circuit( - &self, - builder: &mut CircuitBuilder, - vars: EvaluationTargets, - ) -> Vec> { - let mut constraints = Vec::with_capacity(self.num_constraints()); - - let one = builder.one_extension(); - for c in 0..self.num_copies { - let switch_bool = vars.local_wires[self.wire_switch_bool(c)]; - let not_switch = builder.sub_extension(one, switch_bool); - - for e in 0..self.chunk_size { - let first_input = vars.local_wires[self.wire_first_input(c, e)]; - let second_input = vars.local_wires[self.wire_second_input(c, e)]; - let first_output = vars.local_wires[self.wire_first_output(c, e)]; - let second_output = vars.local_wires[self.wire_second_output(c, e)]; - - let first_switched = builder.sub_extension(first_input, second_output); - let first_switched_constraint = builder.mul_extension(switch_bool, first_switched); - constraints.push(first_switched_constraint); - - let second_switched = builder.sub_extension(second_input, first_output); - let second_switched_constraint = - builder.mul_extension(switch_bool, second_switched); - constraints.push(second_switched_constraint); - - let first_not_switched = builder.sub_extension(first_input, first_output); - let first_not_switched_constraint = - builder.mul_extension(not_switch, first_not_switched); - constraints.push(first_not_switched_constraint); - - let second_not_switched = builder.sub_extension(second_input, second_output); - let second_not_switched_constraint = - builder.mul_extension(not_switch, second_not_switched); - constraints.push(second_not_switched_constraint); - } - } - - constraints - } - - fn generators(&self, row: usize, _local_constants: &[F]) -> Vec>> { - (0..self.num_copies) - .map(|c| { - let g: Box> = Box::new(SwitchGenerator:: { - row, - gate: *self, - copy: c, - }); - g - }) - .collect() - } - - fn num_wires(&self) -> usize { - self.wire_switch_bool(self.num_copies - 1) + 1 - } - - fn num_constants(&self) -> usize { - 0 - } - - fn degree(&self) -> usize { - 2 - } - - fn num_constraints(&self) -> usize { - 4 * self.num_copies * self.chunk_size - } -} - -impl, const D: usize> PackedEvaluableBase for SwitchGate { - fn eval_unfiltered_base_packed>( - &self, - vars: EvaluationVarsBasePacked

, - mut yield_constr: StridedConstraintConsumer

, - ) { - for c in 0..self.num_copies { - let switch_bool = vars.local_wires[self.wire_switch_bool(c)]; - let not_switch = P::ONES - switch_bool; - - for e in 0..self.chunk_size { - let first_input = vars.local_wires[self.wire_first_input(c, e)]; - let second_input = vars.local_wires[self.wire_second_input(c, e)]; - let first_output = vars.local_wires[self.wire_first_output(c, e)]; - let second_output = vars.local_wires[self.wire_second_output(c, e)]; - - yield_constr.one(switch_bool * (first_input - second_output)); - yield_constr.one(switch_bool * (second_input - first_output)); - yield_constr.one(not_switch * (first_input - first_output)); - yield_constr.one(not_switch * (second_input - second_output)); - } - } - } -} - -#[derive(Debug)] -struct SwitchGenerator, const D: usize> { - row: usize, - gate: SwitchGate, - copy: usize, -} - -impl, const D: usize> SwitchGenerator { - fn in_out_dependencies(&self) -> Vec { - let local_target = |column| Target::wire(self.row, column); - - let mut deps = Vec::new(); - for e in 0..self.gate.chunk_size { - deps.push(local_target(self.gate.wire_first_input(self.copy, e))); - deps.push(local_target(self.gate.wire_second_input(self.copy, e))); - deps.push(local_target(self.gate.wire_first_output(self.copy, e))); - deps.push(local_target(self.gate.wire_second_output(self.copy, e))); - } - - deps - } - - fn in_switch_dependencies(&self) -> Vec { - let local_target = |column| Target::wire(self.row, column); - - let mut deps = Vec::new(); - for e in 0..self.gate.chunk_size { - deps.push(local_target(self.gate.wire_first_input(self.copy, e))); - deps.push(local_target(self.gate.wire_second_input(self.copy, e))); - deps.push(local_target(self.gate.wire_switch_bool(self.copy))); - } - - deps - } - - fn run_in_out(&self, witness: &PartitionWitness, out_buffer: &mut GeneratedValues) { - let local_wire = |column| Wire { - row: self.row, - column, - }; - - let get_local_wire = |column| witness.get_wire(local_wire(column)); - - let switch_bool_wire = local_wire(self.gate.wire_switch_bool(self.copy)); - - let mut first_inputs = Vec::new(); - let mut second_inputs = Vec::new(); - let mut first_outputs = Vec::new(); - let mut second_outputs = Vec::new(); - for e in 0..self.gate.chunk_size { - first_inputs.push(get_local_wire(self.gate.wire_first_input(self.copy, e))); - second_inputs.push(get_local_wire(self.gate.wire_second_input(self.copy, e))); - first_outputs.push(get_local_wire(self.gate.wire_first_output(self.copy, e))); - second_outputs.push(get_local_wire(self.gate.wire_second_output(self.copy, e))); - } - - if first_outputs == first_inputs && second_outputs == second_inputs { - out_buffer.set_wire(switch_bool_wire, F::ZERO); - } else if first_outputs == second_inputs && second_outputs == first_inputs { - out_buffer.set_wire(switch_bool_wire, F::ONE); - } else { - panic!("No permutation from given inputs to given outputs"); - } - } - - fn run_in_switch(&self, witness: &PartitionWitness, out_buffer: &mut GeneratedValues) { - let local_wire = |column| Wire { - row: self.row, - column, - }; - - let get_local_wire = |column| witness.get_wire(local_wire(column)); - - let switch_bool = get_local_wire(self.gate.wire_switch_bool(self.copy)); - for e in 0..self.gate.chunk_size { - let first_output_wire = local_wire(self.gate.wire_first_output(self.copy, e)); - let second_output_wire = local_wire(self.gate.wire_second_output(self.copy, e)); - let first_input = get_local_wire(self.gate.wire_first_input(self.copy, e)); - let second_input = get_local_wire(self.gate.wire_second_input(self.copy, e)); - - let (first_output, second_output) = if switch_bool == F::ZERO { - (first_input, second_input) - } else if switch_bool == F::ONE { - (second_input, first_input) - } else { - panic!("Invalid switch bool value"); - }; - - out_buffer.set_wire(first_output_wire, first_output); - out_buffer.set_wire(second_output_wire, second_output); - } - } -} - -impl, const D: usize> WitnessGenerator for SwitchGenerator { - fn watch_list(&self) -> Vec { - self.in_out_dependencies() - .union(self.in_switch_dependencies()) - } - - fn run(&self, witness: &PartitionWitness, out_buffer: &mut GeneratedValues) -> bool { - if witness.contains_all(&self.in_out_dependencies()) { - self.run_in_out(witness, out_buffer); - true - } else if witness.contains_all(&self.in_switch_dependencies()) { - self.run_in_switch(witness, out_buffer); - true - } else { - false - } - } -} - -#[cfg(test)] -mod tests { - use std::marker::PhantomData; - - use anyhow::Result; - use plonky2::gates::gate::Gate; - use plonky2::gates::gate_testing::{test_eval_fns, test_low_degree}; - use plonky2::hash::hash_types::HashOut; - use plonky2::plonk::circuit_data::CircuitConfig; - use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use plonky2::plonk::vars::EvaluationVars; - use plonky2_field::goldilocks_field::GoldilocksField; - use plonky2_field::types::{Field, Sample}; - - use crate::gates::switch::SwitchGate; - - #[test] - fn wire_indices() { - type SG = SwitchGate; - let num_copies = 3; - let chunk_size = 3; - - let gate = SG { - chunk_size, - num_copies, - _phantom: PhantomData, - }; - - assert_eq!(gate.wire_first_input(0, 0), 0); - assert_eq!(gate.wire_first_input(0, 2), 2); - assert_eq!(gate.wire_second_input(0, 0), 3); - assert_eq!(gate.wire_second_input(0, 2), 5); - assert_eq!(gate.wire_first_output(0, 0), 6); - assert_eq!(gate.wire_second_output(0, 2), 11); - assert_eq!(gate.wire_switch_bool(0), 12); - assert_eq!(gate.wire_first_input(1, 0), 13); - assert_eq!(gate.wire_second_output(1, 2), 24); - assert_eq!(gate.wire_switch_bool(1), 25); - assert_eq!(gate.wire_first_input(2, 0), 26); - assert_eq!(gate.wire_second_output(2, 2), 37); - assert_eq!(gate.wire_switch_bool(2), 38); - } - - #[test] - fn low_degree() { - test_low_degree::(SwitchGate::<_, 4>::new_from_config( - &CircuitConfig::standard_recursion_config(), - 3, - )); - } - - #[test] - fn eval_fns() -> Result<()> { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - test_eval_fns::(SwitchGate::<_, D>::new_from_config( - &CircuitConfig::standard_recursion_config(), - 3, - )) - } - - #[test] - fn test_gate_constraint() { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - type FF = >::FE; - const CHUNK_SIZE: usize = 4; - let num_copies = 3; - - /// Returns the local wires for a switch gate given the inputs and the switch booleans. - fn get_wires( - first_inputs: Vec>, - second_inputs: Vec>, - switch_bools: Vec, - ) -> Vec { - let num_copies = first_inputs.len(); - - let mut v = Vec::new(); - for c in 0..num_copies { - let switch = switch_bools[c]; - - let mut first_input_chunk = Vec::with_capacity(CHUNK_SIZE); - let mut second_input_chunk = Vec::with_capacity(CHUNK_SIZE); - let mut first_output_chunk = Vec::with_capacity(CHUNK_SIZE); - let mut second_output_chunk = Vec::with_capacity(CHUNK_SIZE); - for e in 0..CHUNK_SIZE { - let first_input = first_inputs[c][e]; - let second_input = second_inputs[c][e]; - let first_output = if switch { second_input } else { first_input }; - let second_output = if switch { first_input } else { second_input }; - first_input_chunk.push(first_input); - second_input_chunk.push(second_input); - first_output_chunk.push(first_output); - second_output_chunk.push(second_output); - } - v.append(&mut first_input_chunk); - v.append(&mut second_input_chunk); - v.append(&mut first_output_chunk); - v.append(&mut second_output_chunk); - - v.push(F::from_bool(switch)); - } - - v.iter().map(|&x| x.into()).collect() - } - - let first_inputs: Vec> = (0..num_copies).map(|_| F::rand_vec(CHUNK_SIZE)).collect(); - let second_inputs: Vec> = (0..num_copies).map(|_| F::rand_vec(CHUNK_SIZE)).collect(); - let switch_bools = vec![true, false, true]; - - let gate = SwitchGate:: { - chunk_size: CHUNK_SIZE, - num_copies, - _phantom: PhantomData, - }; - - let vars = EvaluationVars { - local_constants: &[], - local_wires: &get_wires(first_inputs, second_inputs, switch_bools), - public_inputs_hash: &HashOut::rand(), - }; - - assert!( - gate.eval_unfiltered(vars).iter().all(|x| x.is_zero()), - "Gate constraints are not satisfied." - ); - } -} diff --git a/waksman/src/lib.rs b/waksman/src/lib.rs deleted file mode 100644 index e9b0d4c5..00000000 --- a/waksman/src/lib.rs +++ /dev/null @@ -1,11 +0,0 @@ -#![allow(clippy::new_without_default)] -#![allow(clippy::too_many_arguments)] -#![allow(clippy::type_complexity)] -#![allow(clippy::len_without_is_empty)] -#![allow(clippy::needless_range_loop)] -#![allow(clippy::return_self_not_must_use)] - -pub mod bimap; -pub mod gates; -pub mod permutation; -pub mod sorting; diff --git a/waksman/src/permutation.rs b/waksman/src/permutation.rs deleted file mode 100644 index 57ede529..00000000 --- a/waksman/src/permutation.rs +++ /dev/null @@ -1,509 +0,0 @@ -use std::collections::BTreeMap; -use std::marker::PhantomData; - -use plonky2::field::extension::Extendable; -use plonky2::field::types::Field; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; -use plonky2::iop::target::Target; -use plonky2::iop::witness::{PartitionWitness, Witness, WitnessWrite}; -use plonky2::plonk::circuit_builder::CircuitBuilder; - -use crate::bimap::bimap_from_lists; -use crate::gates::switch::SwitchGate; - -/// Assert that two lists of expressions evaluate to permutations of one another. -pub fn assert_permutation_circuit, const D: usize>( - builder: &mut CircuitBuilder, - a: Vec>, - b: Vec>, -) { - assert_eq!( - a.len(), - b.len(), - "Permutation must have same number of inputs and outputs" - ); - assert_eq!(a[0].len(), b[0].len(), "Chunk size must be the same"); - - let chunk_size = a[0].len(); - - match a.len() { - // Two empty lists are permutations of one another, trivially. - 0 => (), - // Two singleton lists are permutations of one another as long as their items are equal. - 1 => { - for e in 0..chunk_size { - builder.connect(a[0][e], b[0][e]) - } - } - 2 => assert_permutation_2x2_circuit( - builder, - a[0].clone(), - a[1].clone(), - b[0].clone(), - b[1].clone(), - ), - // For larger lists, we recursively use two smaller permutation networks. - _ => assert_permutation_helper_circuit(builder, a, b), - } -} - -/// Assert that [a1, a2] is a permutation of [b1, b2]. -fn assert_permutation_2x2_circuit, const D: usize>( - builder: &mut CircuitBuilder, - a1: Vec, - a2: Vec, - b1: Vec, - b2: Vec, -) { - assert!( - a1.len() == a2.len() && a2.len() == b1.len() && b1.len() == b2.len(), - "Chunk size must be the same" - ); - - let chunk_size = a1.len(); - - let (_switch, gate_out1, gate_out2) = create_switch_circuit(builder, a1, a2); - for e in 0..chunk_size { - builder.connect(b1[e], gate_out1[e]); - builder.connect(b2[e], gate_out2[e]); - } -} - -/// Given two input wire chunks, add a new switch to the circuit (by adding one copy to a switch -/// gate). Returns the wire for the switch boolean, and the two output wire chunks. -fn create_switch_circuit, const D: usize>( - builder: &mut CircuitBuilder, - a1: Vec, - a2: Vec, -) -> (Target, Vec, Vec) { - assert_eq!(a1.len(), a2.len(), "Chunk size must be the same"); - - let chunk_size = a1.len(); - - let gate = SwitchGate::new_from_config(&builder.config, chunk_size); - let params = vec![F::from_canonical_usize(chunk_size)]; - let (row, next_copy) = builder.find_slot(gate, ¶ms, &[]); - - let mut c = Vec::new(); - let mut d = Vec::new(); - for e in 0..chunk_size { - builder.connect( - a1[e], - Target::wire(row, gate.wire_first_input(next_copy, e)), - ); - builder.connect( - a2[e], - Target::wire(row, gate.wire_second_input(next_copy, e)), - ); - c.push(Target::wire(row, gate.wire_first_output(next_copy, e))); - d.push(Target::wire(row, gate.wire_second_output(next_copy, e))); - } - - let switch = Target::wire(row, gate.wire_switch_bool(next_copy)); - - (switch, c, d) -} - -fn assert_permutation_helper_circuit, const D: usize>( - builder: &mut CircuitBuilder, - a: Vec>, - b: Vec>, -) { - assert_eq!( - a.len(), - b.len(), - "Permutation must have same number of inputs and outputs" - ); - assert_eq!(a[0].len(), b[0].len(), "Chunk size must be the same"); - - let n = a.len(); - let even = n % 2 == 0; - - let mut child_1_a = Vec::new(); - let mut child_1_b = Vec::new(); - let mut child_2_a = Vec::new(); - let mut child_2_b = Vec::new(); - - // See Figure 8 in the AS-Waksman paper. - let a_num_switches = n / 2; - let b_num_switches = if even { - a_num_switches - 1 - } else { - a_num_switches - }; - - let mut a_switches = Vec::new(); - let mut b_switches = Vec::new(); - for i in 0..a_num_switches { - let (switch, out_1, out_2) = - create_switch_circuit(builder, a[i * 2].clone(), a[i * 2 + 1].clone()); - a_switches.push(switch); - child_1_a.push(out_1); - child_2_a.push(out_2); - } - for i in 0..b_num_switches { - let (switch, out_1, out_2) = - create_switch_circuit(builder, b[i * 2].clone(), b[i * 2 + 1].clone()); - b_switches.push(switch); - child_1_b.push(out_1); - child_2_b.push(out_2); - } - - // See Figure 8 in the AS-Waksman paper. - if even { - child_1_b.push(b[n - 2].clone()); - child_2_b.push(b[n - 1].clone()); - } else { - child_2_a.push(a[n - 1].clone()); - child_2_b.push(b[n - 1].clone()); - } - - assert_permutation_circuit(builder, child_1_a, child_1_b); - assert_permutation_circuit(builder, child_2_a, child_2_b); - - builder.add_simple_generator(PermutationGenerator:: { - a, - b, - a_switches, - b_switches, - _phantom: PhantomData, - }); -} - -fn route( - a_values: Vec>, - b_values: Vec>, - a_switches: Vec, - b_switches: Vec, - witness: &PartitionWitness, - out_buffer: &mut GeneratedValues, -) { - assert_eq!(a_values.len(), b_values.len()); - let n = a_values.len(); - let even = n % 2 == 0; - - // We use a bimap to match indices of values in a to indices of the same values in b. - // This means that given a wire on one side, we can easily find the matching wire on the other side. - let ab_map = bimap_from_lists(a_values, b_values); - - let switches = [a_switches, b_switches]; - - // We keep track of the new wires we've routed (after routing some wires, we need to check `witness` - // and `newly_set` instead of just `witness`. - let mut newly_set = [vec![false; n], vec![false; n]]; - - // Given a side and an index, returns the index in the other side that corresponds to the same value. - let ab_map_by_side = |side: usize, index: usize| -> usize { - *match side { - 0 => ab_map.get_by_left(&index), - 1 => ab_map.get_by_right(&index), - _ => panic!("Expected side to be 0 or 1"), - } - .unwrap() - }; - - // We maintain two maps for wires which have been routed to a particular subnetwork on one side - // of the network (left or right) but not the other. The keys are wire indices, and the values - // are subnetwork indices. - let mut partial_routes = [BTreeMap::new(), BTreeMap::new()]; - - // After we route a wire on one side, we find the corresponding wire on the other side and check - // if it still needs to be routed. If so, we add it to partial_routes. - let enqueue_other_side = |partial_routes: &mut [BTreeMap], - witness: &PartitionWitness, - newly_set: &mut [Vec], - side: usize, - this_i: usize, - subnet: bool| { - let other_side = 1 - side; - let other_i = ab_map_by_side(side, this_i); - let other_switch_i = other_i / 2; - - if other_switch_i >= switches[other_side].len() { - // The other wire doesn't go through a switch, so there's no routing to be done. - // This happens in the case of the very last wire. - return; - } - - if witness.contains(switches[other_side][other_switch_i]) - || newly_set[other_side][other_switch_i] - { - // The other switch has already been routed. - return; - } - - let other_i_sibling = 4 * other_switch_i + 1 - other_i; - if let Some(&sibling_subnet) = partial_routes[other_side].get(&other_i_sibling) { - // The other switch's sibling is already pending routing. - assert_ne!(subnet, sibling_subnet); - } else { - let opt_old_subnet = partial_routes[other_side].insert(other_i, subnet); - if let Some(old_subnet) = opt_old_subnet { - assert_eq!(subnet, old_subnet, "Routing conflict (should never happen)"); - } - } - }; - - // See Figure 8 in the AS-Waksman paper. - if even { - enqueue_other_side( - &mut partial_routes, - witness, - &mut newly_set, - 1, - n - 2, - false, - ); - enqueue_other_side(&mut partial_routes, witness, &mut newly_set, 1, n - 1, true); - } else { - enqueue_other_side(&mut partial_routes, witness, &mut newly_set, 0, n - 1, true); - enqueue_other_side(&mut partial_routes, witness, &mut newly_set, 1, n - 1, true); - } - - let route_switch = |partial_routes: &mut [BTreeMap], - witness: &PartitionWitness, - out_buffer: &mut GeneratedValues, - newly_set: &mut [Vec], - side: usize, - switch_index: usize, - swap: bool| { - // First, we actually set the switch configuration. - out_buffer.set_target(switches[side][switch_index], F::from_bool(swap)); - newly_set[side][switch_index] = true; - - // Then, we enqueue the two corresponding wires on the other side of the network, to ensure - // that they get routed in the next step. - let this_i_1 = switch_index * 2; - let this_i_2 = this_i_1 + 1; - enqueue_other_side(partial_routes, witness, newly_set, side, this_i_1, swap); - enqueue_other_side(partial_routes, witness, newly_set, side, this_i_2, !swap); - }; - - // If {a,b}_only_routes is empty, then we can route any switch next. For efficiency, we will - // simply do top-down scans (one on the left side, one on the right side) for switches which - // have not yet been routed. These variables represent the positions of those two scans. - let mut scan_index = [0, 0]; - - // Until both scans complete, we alternate back and worth between the left and right switch - // layers. We process any partially routed wires for that side, or if there aren't any, we route - // the next switch in our scan. - while scan_index[0] < switches[0].len() || scan_index[1] < switches[1].len() { - for side in 0..=1 { - if !partial_routes[side].is_empty() { - for (this_i, subnet) in partial_routes[side].clone().into_iter() { - let this_first_switch_input = this_i % 2 == 0; - let swap = this_first_switch_input == subnet; - let this_switch_i = this_i / 2; - route_switch( - &mut partial_routes, - witness, - out_buffer, - &mut newly_set, - side, - this_switch_i, - swap, - ); - } - partial_routes[side].clear(); - } else { - // We can route any switch next. Continue our scan for pending switches. - while scan_index[side] < switches[side].len() - && (witness.contains(switches[side][scan_index[side]]) - || newly_set[side][scan_index[side]]) - { - scan_index[side] += 1; - } - if scan_index[side] < switches[side].len() { - // Either switch configuration would work; we arbitrarily choose to not swap. - route_switch( - &mut partial_routes, - witness, - out_buffer, - &mut newly_set, - side, - scan_index[side], - false, - ); - scan_index[side] += 1; - } - } - } - } -} - -#[derive(Debug)] -struct PermutationGenerator { - a: Vec>, - b: Vec>, - a_switches: Vec, - b_switches: Vec, - _phantom: PhantomData, -} - -impl SimpleGenerator for PermutationGenerator { - fn dependencies(&self) -> Vec { - self.a.iter().chain(&self.b).flatten().cloned().collect() - } - - fn run_once(&self, witness: &PartitionWitness, out_buffer: &mut GeneratedValues) { - let a_values = self - .a - .iter() - .map(|chunk| chunk.iter().map(|wire| witness.get_target(*wire)).collect()) - .collect(); - let b_values = self - .b - .iter() - .map(|chunk| chunk.iter().map(|wire| witness.get_target(*wire)).collect()) - .collect(); - route( - a_values, - b_values, - self.a_switches.clone(), - self.b_switches.clone(), - witness, - out_buffer, - ); - } -} - -#[cfg(test)] -mod tests { - use anyhow::Result; - use plonky2::field::types::{Field, Sample}; - use plonky2::iop::witness::PartialWitness; - use plonky2::plonk::circuit_data::CircuitConfig; - use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use rand::seq::SliceRandom; - use rand::{thread_rng, Rng}; - - use super::*; - - fn test_permutation_good(size: usize) -> Result<()> { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - - let config = CircuitConfig::standard_recursion_config(); - - let pw = PartialWitness::new(); - let mut builder = CircuitBuilder::::new(config); - - let lst: Vec = (0..size * 2).map(F::from_canonical_usize).collect(); - let a: Vec> = lst[..] - .chunks(2) - .map(|pair| vec![builder.constant(pair[0]), builder.constant(pair[1])]) - .collect(); - let mut b = a.clone(); - b.shuffle(&mut thread_rng()); - - assert_permutation_circuit(&mut builder, a, b); - - let data = builder.build::(); - let proof = data.prove(pw)?; - - data.verify(proof) - } - - fn test_permutation_duplicates(size: usize) -> Result<()> { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - - let config = CircuitConfig::standard_recursion_config(); - - let pw = PartialWitness::new(); - let mut builder = CircuitBuilder::::new(config); - - let mut rng = thread_rng(); - let lst: Vec = (0..size * 2) - .map(|_| F::from_canonical_usize(rng.gen_range(0..2usize))) - .collect(); - let a: Vec> = lst[..] - .chunks(2) - .map(|pair| vec![builder.constant(pair[0]), builder.constant(pair[1])]) - .collect(); - - let mut b = a.clone(); - b.shuffle(&mut thread_rng()); - - assert_permutation_circuit(&mut builder, a, b); - - let data = builder.build::(); - let proof = data.prove(pw)?; - - data.verify(proof) - } - - fn test_permutation_bad(size: usize) -> Result<()> { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - - let config = CircuitConfig::standard_recursion_config(); - - let pw = PartialWitness::new(); - let mut builder = CircuitBuilder::::new(config); - - let lst1: Vec = F::rand_vec(size * 2); - let lst2: Vec = F::rand_vec(size * 2); - let a: Vec> = lst1[..] - .chunks(2) - .map(|pair| vec![builder.constant(pair[0]), builder.constant(pair[1])]) - .collect(); - let b: Vec> = lst2[..] - .chunks(2) - .map(|pair| vec![builder.constant(pair[0]), builder.constant(pair[1])]) - .collect(); - - assert_permutation_circuit(&mut builder, a, b); - - let data = builder.build::(); - data.prove(pw)?; - - Ok(()) - } - - #[test] - fn test_permutations_duplicates() -> Result<()> { - for n in 2..9 { - test_permutation_duplicates(n)?; - } - - Ok(()) - } - - #[test] - fn test_permutations_good() -> Result<()> { - for n in 2..9 { - test_permutation_good(n)?; - } - - Ok(()) - } - - #[test] - #[should_panic] - fn test_permutation_bad_small() { - let size = 2; - - test_permutation_bad(size).unwrap() - } - - #[test] - #[should_panic] - fn test_permutation_bad_medium() { - let size = 6; - - test_permutation_bad(size).unwrap() - } - - #[test] - #[should_panic] - fn test_permutation_bad_large() { - let size = 10; - - test_permutation_bad(size).unwrap() - } -} diff --git a/waksman/src/sorting.rs b/waksman/src/sorting.rs deleted file mode 100644 index 571a066b..00000000 --- a/waksman/src/sorting.rs +++ /dev/null @@ -1,277 +0,0 @@ -use std::marker::PhantomData; - -use itertools::izip; -use plonky2::field::extension::Extendable; -use plonky2::field::types::Field; -use plonky2::hash::hash_types::RichField; -use plonky2::iop::generator::{GeneratedValues, SimpleGenerator}; -use plonky2::iop::target::{BoolTarget, Target}; -use plonky2::iop::witness::{PartitionWitness, Witness, WitnessWrite}; -use plonky2::plonk::circuit_builder::CircuitBuilder; -use plonky2_util::ceil_div_usize; - -use crate::gates::assert_le::AssertLessThanGate; -use crate::permutation::assert_permutation_circuit; - -pub struct MemoryOp { - is_write: bool, - address: F, - timestamp: F, - value: F, -} - -#[derive(Clone, Debug)] -pub struct MemoryOpTarget { - is_write: BoolTarget, - address: Target, - timestamp: Target, - value: Target, -} - -pub fn assert_permutation_memory_ops_circuit, const D: usize>( - builder: &mut CircuitBuilder, - a: &[MemoryOpTarget], - b: &[MemoryOpTarget], -) { - let a_chunks: Vec> = a - .iter() - .map(|op| vec![op.address, op.timestamp, op.is_write.target, op.value]) - .collect(); - let b_chunks: Vec> = b - .iter() - .map(|op| vec![op.address, op.timestamp, op.is_write.target, op.value]) - .collect(); - - assert_permutation_circuit(builder, a_chunks, b_chunks); -} - -/// Add an AssertLessThanGate to assert that `lhs` is less than `rhs`, where their values are at most `bits` bits. -pub fn assert_le_circuit, const D: usize>( - builder: &mut CircuitBuilder, - lhs: Target, - rhs: Target, - bits: usize, - num_chunks: usize, -) { - let gate = AssertLessThanGate::new(bits, num_chunks); - let row = builder.add_gate(gate.clone(), vec![]); - - builder.connect(Target::wire(row, gate.wire_first_input()), lhs); - builder.connect(Target::wire(row, gate.wire_second_input()), rhs); -} - -/// Sort memory operations by address value, then by timestamp value. -/// This is done by combining address and timestamp into one field element (using their given bit lengths). -pub fn sort_memory_ops_circuit, const D: usize>( - builder: &mut CircuitBuilder, - ops: &[MemoryOpTarget], - address_bits: usize, - timestamp_bits: usize, -) -> Vec { - let n = ops.len(); - - let combined_bits = address_bits + timestamp_bits; - let chunk_bits = 3; - let num_chunks = ceil_div_usize(combined_bits, chunk_bits); - - // This is safe because `assert_permutation` will force these targets (in the output list) to match the boolean values from the input list. - let is_write_targets: Vec<_> = builder - .add_virtual_targets(n) - .iter() - .map(|&t| BoolTarget::new_unsafe(t)) - .collect(); - - let address_targets = builder.add_virtual_targets(n); - let timestamp_targets = builder.add_virtual_targets(n); - let value_targets = builder.add_virtual_targets(n); - - let output_targets: Vec<_> = izip!( - is_write_targets, - address_targets, - timestamp_targets, - value_targets - ) - .map(|(i, a, t, v)| MemoryOpTarget { - is_write: i, - address: a, - timestamp: t, - value: v, - }) - .collect(); - - let two_n = builder.constant(F::from_canonical_usize(1 << timestamp_bits)); - let address_timestamp_combined: Vec<_> = output_targets - .iter() - .map(|op| builder.mul_add(op.address, two_n, op.timestamp)) - .collect(); - - for i in 1..n { - assert_le_circuit( - builder, - address_timestamp_combined[i - 1], - address_timestamp_combined[i], - combined_bits, - num_chunks, - ); - } - - assert_permutation_memory_ops_circuit(builder, ops, &output_targets); - - builder.add_simple_generator(MemoryOpSortGenerator:: { - input_ops: ops.to_vec(), - output_ops: output_targets.clone(), - _phantom: PhantomData, - }); - - output_targets -} - -#[derive(Debug)] -struct MemoryOpSortGenerator, const D: usize> { - input_ops: Vec, - output_ops: Vec, - _phantom: PhantomData, -} - -impl, const D: usize> SimpleGenerator - for MemoryOpSortGenerator -{ - fn dependencies(&self) -> Vec { - self.input_ops - .iter() - .flat_map(|op| vec![op.is_write.target, op.address, op.timestamp, op.value]) - .collect() - } - - fn run_once(&self, witness: &PartitionWitness, out_buffer: &mut GeneratedValues) { - let n = self.input_ops.len(); - debug_assert!(self.output_ops.len() == n); - - let mut ops: Vec<_> = self - .input_ops - .iter() - .map(|op| { - let is_write = witness.get_bool_target(op.is_write); - let address = witness.get_target(op.address); - let timestamp = witness.get_target(op.timestamp); - let value = witness.get_target(op.value); - MemoryOp { - is_write, - address, - timestamp, - value, - } - }) - .collect(); - - ops.sort_unstable_by_key(|op| { - ( - op.address.to_canonical_u64(), - op.timestamp.to_canonical_u64(), - ) - }); - - for (op, out_op) in ops.iter().zip(&self.output_ops) { - out_buffer.set_target(out_op.is_write.target, F::from_bool(op.is_write)); - out_buffer.set_target(out_op.address, op.address); - out_buffer.set_target(out_op.timestamp, op.timestamp); - out_buffer.set_target(out_op.value, op.value); - } - } -} - -#[cfg(test)] -mod tests { - use anyhow::Result; - use plonky2::field::types::{Field, PrimeField64, Sample}; - use plonky2::iop::witness::PartialWitness; - use plonky2::plonk::circuit_data::CircuitConfig; - use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig}; - use rand::{thread_rng, Rng}; - - use super::*; - - fn test_sorting(size: usize, address_bits: usize, timestamp_bits: usize) -> Result<()> { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - - let config = CircuitConfig::standard_recursion_config(); - - let mut pw = PartialWitness::new(); - let mut builder = CircuitBuilder::::new(config); - - let mut rng = thread_rng(); - let is_write_vals: Vec<_> = (0..size).map(|_| rng.gen_range(0..2) != 0).collect(); - let address_vals: Vec<_> = (0..size) - .map(|_| F::from_canonical_u64(rng.gen_range(0..1 << address_bits as u64))) - .collect(); - let timestamp_vals: Vec<_> = (0..size) - .map(|_| F::from_canonical_u64(rng.gen_range(0..1 << timestamp_bits as u64))) - .collect(); - let value_vals: Vec<_> = (0..size).map(|_| F::rand()).collect(); - - let input_ops: Vec = izip!( - is_write_vals.clone(), - address_vals.clone(), - timestamp_vals.clone(), - value_vals.clone() - ) - .map(|(is_write, address, timestamp, value)| MemoryOpTarget { - is_write: builder.constant_bool(is_write), - address: builder.constant(address), - timestamp: builder.constant(timestamp), - value: builder.constant(value), - }) - .collect(); - - let combined_vals_u64: Vec<_> = timestamp_vals - .iter() - .zip(&address_vals) - .map(|(&t, &a)| (a.to_canonical_u64() << timestamp_bits as u64) + t.to_canonical_u64()) - .collect(); - let mut input_ops_and_keys: Vec<_> = - izip!(is_write_vals, address_vals, timestamp_vals, value_vals) - .zip(combined_vals_u64) - .collect::>(); - input_ops_and_keys.sort_by_key(|(_, val)| *val); - let input_ops_sorted: Vec<_> = input_ops_and_keys.iter().map(|(x, _)| x).collect(); - - let output_ops = sort_memory_ops_circuit( - &mut builder, - input_ops.as_slice(), - address_bits, - timestamp_bits, - ); - - for i in 0..size { - pw.set_bool_target(output_ops[i].is_write, input_ops_sorted[i].0); - pw.set_target(output_ops[i].address, input_ops_sorted[i].1); - pw.set_target(output_ops[i].timestamp, input_ops_sorted[i].2); - pw.set_target(output_ops[i].value, input_ops_sorted[i].3); - } - - let data = builder.build::(); - let proof = data.prove(pw).unwrap(); - - data.verify(proof) - } - - #[test] - fn test_sorting_small() -> Result<()> { - let size = 5; - let address_bits = 20; - let timestamp_bits = 20; - - test_sorting(size, address_bits, timestamp_bits) - } - - #[test] - fn test_sorting_large() -> Result<()> { - let size = 20; - let address_bits = 20; - let timestamp_bits = 20; - - test_sorting(size, address_bits, timestamp_bits) - } -}