From c8c3cc9a8ffb8cfd8e2820ab13dce3651c1eb835 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Wed, 13 Jul 2022 18:48:25 +0200 Subject: [PATCH 01/15] Files shuffling --- evm/src/cpu/kernel/aggregator.rs | 178 +--- evm/src/cpu/kernel/asm/ecrecover.asm | 0 .../cpu/kernel/asm/secp256k1/curve_add.asm | 340 ++++++++ .../cpu/kernel/asm/secp256k1/curve_mul.asm | 0 evm/src/cpu/kernel/asm/secp256k1/moddiv.asm | 788 ++++++++++++++++++ evm/src/cpu/kernel/mod.rs | 2 + evm/src/cpu/kernel/tests/curve_ops.rs | 134 +++ evm/src/cpu/kernel/tests/exp.rs | 44 + evm/src/cpu/kernel/tests/mod.rs | 14 + 9 files changed, 1326 insertions(+), 174 deletions(-) create mode 100644 evm/src/cpu/kernel/asm/ecrecover.asm create mode 100644 evm/src/cpu/kernel/asm/secp256k1/curve_add.asm create mode 100644 evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm create mode 100644 evm/src/cpu/kernel/asm/secp256k1/moddiv.asm create mode 100644 evm/src/cpu/kernel/tests/curve_ops.rs create mode 100644 evm/src/cpu/kernel/tests/exp.rs create mode 100644 evm/src/cpu/kernel/tests/mod.rs diff --git a/evm/src/cpu/kernel/aggregator.rs b/evm/src/cpu/kernel/aggregator.rs index 2b96aaf3..418c0a22 100644 --- a/evm/src/cpu/kernel/aggregator.rs +++ b/evm/src/cpu/kernel/aggregator.rs @@ -22,6 +22,10 @@ pub(crate) fn combined_kernel() -> Kernel { include_str!("asm/curve_mul.asm"), include_str!("asm/curve_add.asm"), include_str!("asm/moddiv.asm"), + // include_str!("asm/secp256k1/curve_mul.asm"), + // include_str!("asm/secp256k1/curve_add.asm"), + // include_str!("asm/secp256k1/moddiv.asm"), + include_str!("asm/ecrecover.asm"), include_str!("asm/storage_read.asm"), include_str!("asm/storage_write.asm"), ]; @@ -32,14 +36,7 @@ pub(crate) fn combined_kernel() -> Kernel { #[cfg(test)] mod tests { - use std::str::FromStr; - - use anyhow::Result; - use ethereum_types::U256; - use rand::{thread_rng, Rng}; - use crate::cpu::kernel::aggregator::combined_kernel; - use crate::cpu::kernel::interpreter::run; #[test] fn make_kernel() { @@ -47,171 +44,4 @@ mod tests { let kernel = combined_kernel(); println!("Kernel size: {} bytes", kernel.code.len()); } - - fn u256ify<'a>(hexes: impl IntoIterator) -> Result> { - Ok(hexes - .into_iter() - .map(U256::from_str) - .collect::, _>>()?) - } - #[test] - fn test_exp() -> Result<()> { - // Make sure we can parse and assemble the entire kernel. - let kernel = combined_kernel(); - let exp = kernel.global_labels["exp"]; - let mut rng = thread_rng(); - let a = U256([0; 4].map(|_| rng.gen())); - let b = U256([0; 4].map(|_| rng.gen())); - - // Random input - let initial_stack = vec![U256::from_str("0xdeadbeef")?, b, a]; - let stack_with_kernel = run(&kernel.code, exp, initial_stack); - let initial_stack = vec![b, a]; - let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP - let stack_with_opcode = run(&code, 0, initial_stack); - assert_eq!(stack_with_kernel, stack_with_opcode); - - // 0 base - let initial_stack = vec![U256::from_str("0xdeadbeef")?, b, U256::zero()]; - let stack_with_kernel = run(&kernel.code, exp, initial_stack); - let initial_stack = vec![b, U256::zero()]; - let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP - let stack_with_opcode = run(&code, 0, initial_stack); - assert_eq!(stack_with_kernel, stack_with_opcode); - - // 0 exponent - let initial_stack = vec![U256::from_str("0xdeadbeef")?, U256::zero(), a]; - let stack_with_kernel = run(&kernel.code, exp, initial_stack); - let initial_stack = vec![U256::zero(), a]; - let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP - let stack_with_opcode = run(&code, 0, initial_stack); - assert_eq!(stack_with_kernel, stack_with_opcode); - - Ok(()) - } - - #[test] - fn test_ec_ops() -> Result<()> { - // Make sure we can parse and assemble the entire kernel. - let kernel = combined_kernel(); - let ec_add = kernel.global_labels["ec_add"]; - let ec_double = kernel.global_labels["ec_double"]; - let ec_mul = kernel.global_labels["ec_mul"]; - let identity = ("0x0", "0x0"); - let invalid = ("0x0", "0x3"); // Not on curve - let point0 = ( - "0x1feee7ec986e198890cb83be8b8ba09ee953b3f149db6d9bfdaa5c308a33e58d", - "0x2051cc9a9edd46231604fd88f351e95ec72a285be93e289ac59cb48561efb2c6", - ); - let point1 = ( - "0x15b64d0a5f329fb672029298be8050f444626e6de11903caffa74b388075be1b", - "0x2d9e07340bd5cd7b70687b98f2500ff930a89a30d7b6a3e04b1b4d345319d234", - ); - // point2 = point0 + point1 - let point2 = ( - "0x18659c0e0a8fedcb8747cf463fc7cfa05f667d84e771d0a9521fc1a550688f0c", - "0x283ed10b42703e187e7a808aeb45c6b457bc4cc7d704e53b3348a1e3b0bfa55b", - ); - // point3 = 2 * point0 - let point3 = ( - "0x17da2b7b1a01c8dfdf0f5a6415833c7d755d219aa7e2c4cd0ac83d87d0ca4217", - "0xc9ace9de14aac8114541b50c19320eb40f0eeac3621526d9e34dbcf4c3a6c0f", - ); - let s = "0xabb2a34c0e7956cfe6cef9ddb7e810c45ea19a6ebadd79c21959af09f5ba480a"; - // point4 = s * point0 - let point4 = ( - "0xe519344959cc17021fe98878f947f5c1b1675325533a620c1684cfa6367e6c0", - "0x7496a7575b0b6a821e19ce780ecc3e0b156e605327798693defeb9f265b7a6f", - ); - - // Standard addition #1 - let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point1.1, point1.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, u256ify([point2.1, point2.0])?); - // Standard addition #2 - let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, u256ify([point2.1, point2.0])?); - - // Standard doubling #1 - let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, u256ify([point3.1, point3.0])?); - // Standard doubling #2 - let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_double, initial_stack); - assert_eq!(stack, u256ify([point3.1, point3.0])?); - // Standard doubling #3 - let initial_stack = u256ify(["0xdeadbeef", "0x2", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); - assert_eq!(stack, u256ify([point3.1, point3.0])?); - - // Addition with identity #1 - let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, point1.1, point1.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, u256ify([point1.1, point1.0])?); - // Addition with identity #2 - let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, u256ify([point1.1, point1.0])?); - // Addition with identity #3 - let initial_stack = - u256ify(["0xdeadbeef", identity.1, identity.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, u256ify([identity.1, identity.0])?); - - // Addition with invalid point(s) #1 - let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - // Addition with invalid point(s) #2 - let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - // Addition with invalid point(s) #3 - let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - // Addition with invalid point(s) #4 - let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - - // Scalar multiplication #1 - let initial_stack = u256ify(["0xdeadbeef", s, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); - assert_eq!(stack, u256ify([point4.1, point4.0])?); - // Scalar multiplication #2 - let initial_stack = u256ify(["0xdeadbeef", "0x0", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); - assert_eq!(stack, u256ify([identity.1, identity.0])?); - // Scalar multiplication #3 - let initial_stack = u256ify(["0xdeadbeef", "0x1", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); - assert_eq!(stack, u256ify([point0.1, point0.0])?); - // Scalar multiplication #4 - let initial_stack = u256ify(["0xdeadbeef", s, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); - assert_eq!(stack, u256ify([identity.1, identity.0])?); - // Scalar multiplication #5 - let initial_stack = u256ify(["0xdeadbeef", s, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - - // Multiple calls - let ec_mul_hex = format!("0x{:x}", ec_mul); - let initial_stack = u256ify([ - "0xdeadbeef", - s, - &ec_mul_hex, - identity.1, - identity.0, - point0.1, - point0.0, - ])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, u256ify([point4.1, point4.0])?); - - Ok(()) - } } diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm new file mode 100644 index 00000000..e69de29b diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm new file mode 100644 index 00000000..c1f1cf46 --- /dev/null +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm @@ -0,0 +1,340 @@ +// #define N 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 // Secp256k1 scalar field order + +// Secp256k1 elliptic curve addition. +// Uses the standard affine addition formula. +global ec_add_secp: + JUMPDEST + // stack: x0, y0, x1, y1, retdest + + // Check if points are valid Secp256k1 points. + DUP2 + // stack: y0, x0, y0, x1, y1, retdest + DUP2 + // stack: x0, y0, x0, y0, x1, y1, retdest + %ec_check_secp + // stack: isValid(x0, y0), x0, y0, x1, y1, retdest + DUP5 + // stack: x1, isValid(x0, y0), x0, y0, x1, y1, retdest + DUP5 + // stack: x1, y1, isValid(x0, y0), x0, y0, x1, y1, retdest + %ec_check_secp + // stack: isValid(x1, y1), isValid(x0, y0), x0, y0, x1, y1, retdest + AND + // stack: isValid(x1, y1) & isValid(x0, y0), x0, y0, x1, y1, retdest + %jumpi(ec_add_valid_points_secp) + // stack: x0, y0, x1, y1, retdest + + // Otherwise return + %pop4 + // stack: retdest + %ec_invalid_input + +// Secp256k1 elliptic curve addition. +// Assumption: (x0,y0) and (x1,y1) are valid points. +global ec_add_valid_points_secp: + JUMPDEST + // stack: x0, y0, x1, y1, retdest + + // Check if the first point is the identity. + DUP2 + // stack: y0, x0, y0, x1, y1, retdest + DUP2 + // stack: x0, y0, x0, y0, x1, y1, retdest + %ec_isidentity + // stack: (x0,y0)==(0,0), x0, y0, x1, y1, retdest + %jumpi(ec_add_first_zero) + // stack: x0, y0, x1, y1, retdest + + // Check if the first point is the identity. + DUP4 + // stack: y1, x0, y0, x1, y1, retdest + DUP4 + // stack: x1, y1, x0, y0, x1, y1, retdest + %ec_isidentity + // stack: (x1,y1)==(0,0), x0, y0, x1, y1, retdest + %jumpi(ec_add_snd_zero) + // stack: x0, y0, x1, y1, retdest + + // Check if both points have the same x-coordinate. + DUP3 + // stack: x1, x0, y0, x1, y1, retdest + DUP2 + // stack: x0, x1, x0, y0, x1, y1, retdest + EQ + // stack: x0 == x1, x0, y0, x1, y1, retdest + %jumpi(ec_add_equal_first_coord) + // stack: x0, y0, x1, y1, retdest + + // Otherwise, we can use the standard formula. + // Compute lambda = (y0 - y1)/(x0 - x1) + DUP4 + // stack: y1, x0, y0, x1, y1, retdest + DUP3 + // stack: y0, y1, x0, y0, x1, y1, retdest + %submod + // stack: y0 - y1, x0, y0, x1, y1, retdest + DUP4 + // stack: x1, y0 - y1, x0, y0, x1, y1, retdest + DUP3 + // stack: x0, x1, y0 - y1, x0, y0, x1, y1, retdest + %submod + // stack: x0 - x1, y0 - y1, x0, y0, x1, y1, retdest + %moddiv_secp + // stack: lambda, x0, y0, x1, y1, retdest + %jump(ec_add_valid_points_with_lambda) + +// Secp256k1 elliptic curve addition. +// Assumption: (x0,y0) == (0,0) +ec_add_first_zero: + JUMPDEST + // stack: x0, y0, x1, y1, retdest + + // Just return (x1,y1) + %pop2 + // stack: x1, y1, retdest + SWAP1 + // stack: y1, x1, retdest + SWAP2 + // stack: retdest, x1, y1 + JUMP + +// Secp256k1 elliptic curve addition. +// Assumption: (x1,y1) == (0,0) +ec_add_snd_zero: + JUMPDEST + // stack: x0, y0, x1, y1, retdest + + // Just return (x1,y1) + SWAP2 + // stack: x1, y0, x0, y1, retdest + POP + // stack: y0, x0, y1, retdest + SWAP2 + // stack: y1, x0, y0, retdest + POP + // stack: x0, y0, retdest + SWAP1 + // stack: y0, x0, retdest + SWAP2 + // stack: retdest, x0, y0 + JUMP + +// Secp256k1 elliptic curve addition. +// Assumption: lambda = (y0 - y1)/(x0 - x1) +ec_add_valid_points_with_lambda: + JUMPDEST + // stack: lambda, x0, y0, x1, y1, retdest + + // Compute x2 = lambda^2 - x1 - x0 + DUP2 + // stack: x0, lambda, x0, y0, x1, y1, retdest + DUP5 + // stack: x1, x0, lambda, x0, y0, x1, y1, retdest + %secp_base + // stack: N, x1, x0, lambda, x0, y0, x1, y1, retdest + DUP4 + // stack: lambda, N, x1, x0, lambda, x0, y0, x1, y1, retdest + DUP1 + // stack: lambda, lambda, N, x1, x0, lambda, x0, y0, x1, y1, retdest + MULMOD + // stack: lambda^2, x1, x0, lambda, x0, y0, x1, y1, retdest + %submod + // stack: lambda^2 - x1, x0, lambda, x0, y0, x1, y1, retdest + %submod + // stack: x2, lambda, x0, y0, x1, y1, retdest + + // Compute y2 = lambda*(x1 - x2) - y1 + %secp_base + // stack: N, x2, lambda, x0, y0, x1, y1, retdest + DUP2 + // stack: x2, N, x2, lambda, x0, y0, x1, y1, retdest + DUP7 + // stack: x1, x2, N, x2, lambda, x0, y0, x1, y1, retdest + %submod + // stack: x1 - x2, N, x2, lambda, x0, y0, x1, y1, retdest + DUP4 + // stack: lambda, x1 - x2, N, x2, lambda, x0, y0, x1, y1, retdest + MULMOD + // stack: lambda * (x1 - x2), x2, lambda, x0, y0, x1, y1, retdest + DUP7 + // stack: y1, lambda * (x1 - x2), x2, lambda, x0, y0, x1, y1, retdest + SWAP1 + // stack: lambda * (x1 - x2), y1, x2, lambda, x0, y0, x1, y1, retdest + %submod + // stack: y2, x2, lambda, x0, y0, x1, y1, retdest + + // Return x2,y2 + SWAP5 + // stack: x1, x2, lambda, x0, y0, y2, y1, retdest + POP + // stack: x2, lambda, x0, y0, y2, y1, retdest + SWAP5 + // stack: y1, lambda, x0, y0, y2, x2, retdest + %pop4 + // stack: y2, x2, retdest + SWAP2 + // stack: retdest, x2, y2 + JUMP + +// Secp256k1 elliptic curve addition. +// Assumption: (x0,y0) and (x1,y1) are valid points and x0 == x1 +ec_add_equal_first_coord: + JUMPDEST + // stack: x0, y0, x1, y1, retdest with x0 == x1 + + // Check if the points are equal + DUP2 + // stack: y0, x0, y0, x1, y1, retdest + DUP5 + // stack: y1, y0, x0, y0, x1, y1, retdest + EQ + // stack: y1 == y0, x0, y0, x1, y1, retdest + %jumpi(ec_add_equal_points) + // stack: x0, y0, x1, y1, retdest + + // Otherwise, one is the negation of the other so we can return (0,0). + %pop4 + // stack: retdest + PUSH 0 + // stack: 0, retdest + PUSH 0 + // stack: 0, 0, retdest + SWAP2 + // stack: retdest, 0, 0 + JUMP + + +// Secp256k1 elliptic curve addition. +// Assumption: x0 == x1 and y0 == y1 +// Standard doubling formula. +ec_add_equal_points: + JUMPDEST + // stack: x0, y0, x1, y1, retdest + + // Compute lambda = 3/2 * x0^2 / y0 + %secp_base + // stack: N, x0, y0, x1, y1, retdest + %secp_base + // stack: N, N, x0, y0, x1, y1, retdest + DUP3 + // stack: x0, N, N, x0, y0, x1, y1, retdest + DUP1 + // stack: x0, x0, N, N, x0, y0, x1, y1, retdest + MULMOD + // stack: x0^2, N, x0, y0, x1, y1, retdest with + PUSH 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffff7ffffe19 // 3/2 in the base field + // stack: 3/2, x0^2, N, x0, y0, x1, y1, retdest + MULMOD + // stack: 3/2 * x0^2, x0, y0, x1, y1, retdest + DUP3 + // stack: y0, 3/2 * x0^2, x0, y0, x1, y1, retdest + %moddiv_secp + // stack: lambda, x0, y0, x1, y1, retdest + %jump(ec_add_valid_points_with_lambda) + +// BN254 elliptic curve doubling. +// Assumption: (x0,y0) is a valid point. +// Standard doubling formula. +global ec_double_secp: + JUMPDEST + // stack: x0, y0, retdest + DUP2 + // stack: y0, x0, y0, retdest + DUP2 + // stack: x0, y0, x0, y0, retdest + %jump(ec_add_equal_points) + +// Push the order of the Secp256k1 scalar field. +%macro secp_base + PUSH 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 +%endmacro + +// Assumption: x, y < N and 2N < 2^256. +// Note: Doesn't hold for Secp256k1 base field. +%macro submod_secp + // stack: x, y + SWAP1 + // stack: y, x + DUP2 + // stack: x, y, x + SUB + // stack: x - y, x + DUP1 + // stack: x - y, x - y, x + SWAP2 + // stack: x, x - y, x - y + LT + // stack: x < x - y, x - y + %secp_base + // stack: N, x < x - y, x - y + MUL + // stack: N * (x < x - y), x - y + ADD + // (x-y) % N +%endmacro + +// Check if (x,y) is a valid curve point. +// Puts y^2 % N == (x^3 + 3) % N & (x < N) & (y < N) || (x,y)==(0,0) on top of the stack. +%macro ec_check_secp + // stack: x, y + %secp_base + // stack: N, x, y + DUP2 + // stack: x, N, x, y + LT + // stack: x < N, x, y + %secp_base + // stack: N, x < N, x, y + DUP4 + // stack: y, N, x < N, x, y + LT + // stack: y < N, x < N, x, y + AND + // stack: (y < N) & (x < N), x, y + SWAP2 + // stack: y, x, (y < N) & (x < N), x + SWAP1 + // stack: x, y, (y < N) & (x < N) + %secp_base + // stack: N, x, y, b + %secp_base + // stack: N, N, x, y, b + DUP3 + // stack: x, N, N, x, y, b + %secp_base + // stack: N, x, N, N, x, y, b + DUP2 + // stack: x, N, x, N, N, x, y, b + DUP1 + // stack: x, x, N, x, N, N, x, y, b + MULMOD + // stack: x^2 % N, x, N, N, x, y, b + MULMOD + // stack: x^3 % N, N, x, y, b + PUSH 3 + // stack: 3, x^3 % N, N, x, y, b + ADDMOD + // stack: (x^3 + 3) % N, x, y, b + DUP3 + // stack: y, (x^3 + 3) % N, x, y, b + %secp_base + // stack: N, y, (x^3 + 3) % N, x, y, b + SWAP1 + // stack: y, N, (x^3 + 3) % N, x, y, b + DUP1 + // stack: y, y, N, (x^3 + 3) % N, x, y, b + MULMOD + // stack: y^2 % N, (x^3 + 3) % N, x, y, b + EQ + // stack: y^2 % N == (x^3 + 3) % N, x, y, b + SWAP2 + // stack: y, x, y^2 % N == (x^3 + 3) % N, b + %ec_isidentity + // stack: (x,y)==(0,0), y^2 % N == (x^3 + 3) % N, b + SWAP2 + // stack: b, y^2 % N == (x^3 + 3) % N, (x,y)==(0,0) + AND + // stack: y^2 % N == (x^3 + 3) % N & (x < N) & (y < N), (x,y)==(0,0) + OR + // stack: y^2 % N == (x^3 + 3) % N & (x < N) & (y < N) || (x,y)==(0,0) +%endmacro \ No newline at end of file diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm new file mode 100644 index 00000000..e69de29b diff --git a/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm b/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm new file mode 100644 index 00000000..28ba24c2 --- /dev/null +++ b/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm @@ -0,0 +1,788 @@ +/// Division modulo 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f, the Secp256k1 base field order +/// To replace with more efficient method using non-determinism later. + +// Returns y * (x^-1) where the inverse is taken modulo N +%macro moddiv_secp + // stack: x, y + %inverse + // stack: x^-1, y + %mulmodn +%endmacro + +%macro mulmodn_secp + // stack: x, y + %secp_base + // stack: N, x, y + SWAP2 + // stack: y, x, N + MULMOD +%endmacro + +%macro squaremodn_secp + // stack: x + DUP1 + // stack: x, x + %mulmodn_secp +%endmacro + +// Computes the inverse modulo N using x^-1 = x^(N-2) mod N and square-and-multiply modular exponentiation. +%macro inverse + DUP1 + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + %squaremodn + %squaremodn + %squaremodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + %squaremodn + DUP2 + %mulmodn + SWAP1 + // stack: x, x^-1 + POP + // stack: x^-1 +%endmacro diff --git a/evm/src/cpu/kernel/mod.rs b/evm/src/cpu/kernel/mod.rs index ae765b99..694b6ab6 100644 --- a/evm/src/cpu/kernel/mod.rs +++ b/evm/src/cpu/kernel/mod.rs @@ -6,6 +6,8 @@ mod parser; #[cfg(test)] mod interpreter; +#[cfg(test)] +mod tests; use assembler::assemble; use parser::parse; diff --git a/evm/src/cpu/kernel/tests/curve_ops.rs b/evm/src/cpu/kernel/tests/curve_ops.rs new file mode 100644 index 00000000..7ce36ffd --- /dev/null +++ b/evm/src/cpu/kernel/tests/curve_ops.rs @@ -0,0 +1,134 @@ +#[cfg(test)] +mod bn { + use anyhow::Result; + use ethereum_types::U256; + + use crate::cpu::kernel::aggregator::combined_kernel; + use crate::cpu::kernel::interpreter::run; + use crate::cpu::kernel::tests::u256ify; + + #[test] + fn test_ec_ops() -> Result<()> { + // Make sure we can parse and assemble the entire kernel. + let kernel = combined_kernel(); + let ec_add = kernel.global_labels["ec_add"]; + let ec_double = kernel.global_labels["ec_double"]; + let ec_mul = kernel.global_labels["ec_mul"]; + let identity = ("0x0", "0x0"); + let invalid = ("0x0", "0x3"); // Not on curve + let point0 = ( + "0x1feee7ec986e198890cb83be8b8ba09ee953b3f149db6d9bfdaa5c308a33e58d", + "0x2051cc9a9edd46231604fd88f351e95ec72a285be93e289ac59cb48561efb2c6", + ); + let point1 = ( + "0x15b64d0a5f329fb672029298be8050f444626e6de11903caffa74b388075be1b", + "0x2d9e07340bd5cd7b70687b98f2500ff930a89a30d7b6a3e04b1b4d345319d234", + ); + // point2 = point0 + point1 + let point2 = ( + "0x18659c0e0a8fedcb8747cf463fc7cfa05f667d84e771d0a9521fc1a550688f0c", + "0x283ed10b42703e187e7a808aeb45c6b457bc4cc7d704e53b3348a1e3b0bfa55b", + ); + // point3 = 2 * point0 + let point3 = ( + "0x17da2b7b1a01c8dfdf0f5a6415833c7d755d219aa7e2c4cd0ac83d87d0ca4217", + "0xc9ace9de14aac8114541b50c19320eb40f0eeac3621526d9e34dbcf4c3a6c0f", + ); + let s = "0xabb2a34c0e7956cfe6cef9ddb7e810c45ea19a6ebadd79c21959af09f5ba480a"; + // point4 = s * point0 + let point4 = ( + "0xe519344959cc17021fe98878f947f5c1b1675325533a620c1684cfa6367e6c0", + "0x7496a7575b0b6a821e19ce780ecc3e0b156e605327798693defeb9f265b7a6f", + ); + + // Standard addition #1 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point1.1, point1.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point2.1, point2.0])?); + // Standard addition #2 + let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point2.1, point2.0])?); + + // Standard doubling #1 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point3.1, point3.0])?); + // Standard doubling #2 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_double, initial_stack); + assert_eq!(stack, u256ify([point3.1, point3.0])?); + // Standard doubling #3 + let initial_stack = u256ify(["0xdeadbeef", "0x2", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([point3.1, point3.0])?); + + // Addition with identity #1 + let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, point1.1, point1.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point1.1, point1.0])?); + // Addition with identity #2 + let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point1.1, point1.0])?); + // Addition with identity #3 + let initial_stack = + u256ify(["0xdeadbeef", identity.1, identity.0, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([identity.1, identity.0])?); + + // Addition with invalid point(s) #1 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, invalid.1, invalid.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + // Addition with invalid point(s) #2 + let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + // Addition with invalid point(s) #3 + let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + // Addition with invalid point(s) #4 + let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, invalid.1, invalid.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + + // Scalar multiplication #1 + let initial_stack = u256ify(["0xdeadbeef", s, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([point4.1, point4.0])?); + // Scalar multiplication #2 + let initial_stack = u256ify(["0xdeadbeef", "0x0", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([identity.1, identity.0])?); + // Scalar multiplication #3 + let initial_stack = u256ify(["0xdeadbeef", "0x1", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([point0.1, point0.0])?); + // Scalar multiplication #4 + let initial_stack = u256ify(["0xdeadbeef", s, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([identity.1, identity.0])?); + // Scalar multiplication #5 + let initial_stack = u256ify(["0xdeadbeef", s, invalid.1, invalid.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + + // Multiple calls + let ec_mul_hex = format!("0x{:x}", ec_mul); + let initial_stack = u256ify([ + "0xdeadbeef", + s, + &ec_mul_hex, + identity.1, + identity.0, + point0.1, + point0.0, + ])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point4.1, point4.0])?); + + Ok(()) + } +} diff --git a/evm/src/cpu/kernel/tests/exp.rs b/evm/src/cpu/kernel/tests/exp.rs new file mode 100644 index 00000000..24639349 --- /dev/null +++ b/evm/src/cpu/kernel/tests/exp.rs @@ -0,0 +1,44 @@ +use std::str::FromStr; + +use anyhow::Result; +use ethereum_types::U256; +use rand::{thread_rng, Rng}; + +use crate::cpu::kernel::aggregator::combined_kernel; +use crate::cpu::kernel::interpreter::run; + +#[test] +fn test_exp() -> Result<()> { + // Make sure we can parse and assemble the entire kernel. + let kernel = combined_kernel(); + let exp = kernel.global_labels["exp"]; + let mut rng = thread_rng(); + let a = U256([0; 4].map(|_| rng.gen())); + let b = U256([0; 4].map(|_| rng.gen())); + + // Random input + let initial_stack = vec![U256::from_str("0xdeadbeef")?, b, a]; + let stack_with_kernel = run(&kernel.code, exp, initial_stack); + let initial_stack = vec![b, a]; + let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP + let stack_with_opcode = run(&code, 0, initial_stack); + assert_eq!(stack_with_kernel, stack_with_opcode); + + // 0 base + let initial_stack = vec![U256::from_str("0xdeadbeef")?, b, U256::zero()]; + let stack_with_kernel = run(&kernel.code, exp, initial_stack); + let initial_stack = vec![b, U256::zero()]; + let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP + let stack_with_opcode = run(&code, 0, initial_stack); + assert_eq!(stack_with_kernel, stack_with_opcode); + + // 0 exponent + let initial_stack = vec![U256::from_str("0xdeadbeef")?, U256::zero(), a]; + let stack_with_kernel = run(&kernel.code, exp, initial_stack); + let initial_stack = vec![U256::zero(), a]; + let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP + let stack_with_opcode = run(&code, 0, initial_stack); + assert_eq!(stack_with_kernel, stack_with_opcode); + + Ok(()) +} diff --git a/evm/src/cpu/kernel/tests/mod.rs b/evm/src/cpu/kernel/tests/mod.rs new file mode 100644 index 00000000..49f16da6 --- /dev/null +++ b/evm/src/cpu/kernel/tests/mod.rs @@ -0,0 +1,14 @@ +mod curve_ops; +mod exp; + +use std::str::FromStr; + +use anyhow::Result; +use ethereum_types::U256; + +pub(crate) fn u256ify<'a>(hexes: impl IntoIterator) -> Result> { + Ok(hexes + .into_iter() + .map(U256::from_str) + .collect::, _>>()?) +} From a831fab8f82f5ebffa6af447a4d18821bcf47bbb Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Wed, 13 Jul 2022 19:22:32 +0200 Subject: [PATCH 02/15] Working secp add --- evm/src/cpu/kernel/aggregator.rs | 6 +- .../cpu/kernel/asm/secp256k1/curve_add.asm | 42 +- evm/src/cpu/kernel/asm/secp256k1/moddiv.asm | 1014 ++++++++--------- evm/src/cpu/kernel/assembler.rs | 3 +- evm/src/cpu/kernel/tests/curve_ops.rs | 135 +++ 5 files changed, 667 insertions(+), 533 deletions(-) diff --git a/evm/src/cpu/kernel/aggregator.rs b/evm/src/cpu/kernel/aggregator.rs index 418c0a22..fbe1c990 100644 --- a/evm/src/cpu/kernel/aggregator.rs +++ b/evm/src/cpu/kernel/aggregator.rs @@ -22,9 +22,9 @@ pub(crate) fn combined_kernel() -> Kernel { include_str!("asm/curve_mul.asm"), include_str!("asm/curve_add.asm"), include_str!("asm/moddiv.asm"), - // include_str!("asm/secp256k1/curve_mul.asm"), - // include_str!("asm/secp256k1/curve_add.asm"), - // include_str!("asm/secp256k1/moddiv.asm"), + include_str!("asm/secp256k1/curve_mul.asm"), + include_str!("asm/secp256k1/curve_add.asm"), + include_str!("asm/secp256k1/moddiv.asm"), include_str!("asm/ecrecover.asm"), include_str!("asm/storage_read.asm"), include_str!("asm/storage_write.asm"), diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm index c1f1cf46..50ddd1bc 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm @@ -71,13 +71,13 @@ global ec_add_valid_points_secp: // stack: y1, x0, y0, x1, y1, retdest DUP3 // stack: y0, y1, x0, y0, x1, y1, retdest - %submod + %submod_secp // stack: y0 - y1, x0, y0, x1, y1, retdest DUP4 // stack: x1, y0 - y1, x0, y0, x1, y1, retdest DUP3 // stack: x0, x1, y0 - y1, x0, y0, x1, y1, retdest - %submod + %submod_secp // stack: x0 - x1, y0 - y1, x0, y0, x1, y1, retdest %moddiv_secp // stack: lambda, x0, y0, x1, y1, retdest @@ -138,9 +138,9 @@ ec_add_valid_points_with_lambda: // stack: lambda, lambda, N, x1, x0, lambda, x0, y0, x1, y1, retdest MULMOD // stack: lambda^2, x1, x0, lambda, x0, y0, x1, y1, retdest - %submod + %submod_secp // stack: lambda^2 - x1, x0, lambda, x0, y0, x1, y1, retdest - %submod + %submod_secp // stack: x2, lambda, x0, y0, x1, y1, retdest // Compute y2 = lambda*(x1 - x2) - y1 @@ -150,7 +150,7 @@ ec_add_valid_points_with_lambda: // stack: x2, N, x2, lambda, x0, y0, x1, y1, retdest DUP7 // stack: x1, x2, N, x2, lambda, x0, y0, x1, y1, retdest - %submod + %submod_secp // stack: x1 - x2, N, x2, lambda, x0, y0, x1, y1, retdest DUP4 // stack: lambda, x1 - x2, N, x2, lambda, x0, y0, x1, y1, retdest @@ -160,7 +160,7 @@ ec_add_valid_points_with_lambda: // stack: y1, lambda * (x1 - x2), x2, lambda, x0, y0, x1, y1, retdest SWAP1 // stack: lambda * (x1 - x2), y1, x2, lambda, x0, y0, x1, y1, retdest - %submod + %submod_secp // stack: y2, x2, lambda, x0, y0, x1, y1, retdest // Return x2,y2 @@ -246,7 +246,7 @@ global ec_double_secp: // Push the order of the Secp256k1 scalar field. %macro secp_base - PUSH 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 + PUSH 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f %endmacro // Assumption: x, y < N and 2N < 2^256. @@ -311,30 +311,30 @@ global ec_double_secp: // stack: x^2 % N, x, N, N, x, y, b MULMOD // stack: x^3 % N, N, x, y, b - PUSH 3 - // stack: 3, x^3 % N, N, x, y, b + PUSH 7 + // stack: 7, x^3 % N, N, x, y, b ADDMOD - // stack: (x^3 + 3) % N, x, y, b + // stack: (x^3 + 7) % N, x, y, b DUP3 - // stack: y, (x^3 + 3) % N, x, y, b + // stack: y, (x^3 + 7) % N, x, y, b %secp_base - // stack: N, y, (x^3 + 3) % N, x, y, b + // stack: N, y, (x^3 + 7) % N, x, y, b SWAP1 - // stack: y, N, (x^3 + 3) % N, x, y, b + // stack: y, N, (x^3 + 7) % N, x, y, b DUP1 - // stack: y, y, N, (x^3 + 3) % N, x, y, b + // stack: y, y, N, (x^3 + 7) % N, x, y, b MULMOD - // stack: y^2 % N, (x^3 + 3) % N, x, y, b + // stack: y^2 % N, (x^3 + 7) % N, x, y, b EQ - // stack: y^2 % N == (x^3 + 3) % N, x, y, b + // stack: y^2 % N == (x^3 + 7) % N, x, y, b SWAP2 - // stack: y, x, y^2 % N == (x^3 + 3) % N, b + // stack: y, x, y^2 % N == (x^3 + 7) % N, b %ec_isidentity - // stack: (x,y)==(0,0), y^2 % N == (x^3 + 3) % N, b + // stack: (x,y)==(0,0), y^2 % N == (x^3 + 7) % N, b SWAP2 - // stack: b, y^2 % N == (x^3 + 3) % N, (x,y)==(0,0) + // stack: b, y^2 % N == (x^3 + 7) % N, (x,y)==(0,0) AND - // stack: y^2 % N == (x^3 + 3) % N & (x < N) & (y < N), (x,y)==(0,0) + // stack: y^2 % N == (x^3 + 7) % N & (x < N) & (y < N), (x,y)==(0,0) OR - // stack: y^2 % N == (x^3 + 3) % N & (x < N) & (y < N) || (x,y)==(0,0) + // stack: y^2 % N == (x^3 + 7) % N & (x < N) & (y < N) || (x,y)==(0,0) %endmacro \ No newline at end of file diff --git a/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm b/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm index 28ba24c2..dce8c03f 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm @@ -4,9 +4,9 @@ // Returns y * (x^-1) where the inverse is taken modulo N %macro moddiv_secp // stack: x, y - %inverse + %inverse_secp // stack: x^-1, y - %mulmodn + %mulmodn_secp %endmacro %macro mulmodn_secp @@ -26,761 +26,759 @@ %endmacro // Computes the inverse modulo N using x^-1 = x^(N-2) mod N and square-and-multiply modular exponentiation. -%macro inverse +%macro inverse_secp DUP1 - %squaremodn + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn - %squaremodn + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn - %squaremodn - %squaremodn - %squaremodn - %squaremodn + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn - %squaremodn + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp DUP2 - %mulmodn - %squaremodn - DUP2 - %mulmodn + %mulmodn_secp SWAP1 // stack: x, x^-1 POP diff --git a/evm/src/cpu/kernel/assembler.rs b/evm/src/cpu/kernel/assembler.rs index 179e9367..bef01d85 100644 --- a/evm/src/cpu/kernel/assembler.rs +++ b/evm/src/cpu/kernel/assembler.rs @@ -67,7 +67,8 @@ fn find_macros(files: &[File]) -> HashMap { params: params.clone(), items: items.clone(), }; - macros.insert(name.clone(), _macro); + let old = macros.insert(name.clone(), _macro); + assert!(old.is_none(), "Duplicate macro: {name}"); } } } diff --git a/evm/src/cpu/kernel/tests/curve_ops.rs b/evm/src/cpu/kernel/tests/curve_ops.rs index 7ce36ffd..97561b99 100644 --- a/evm/src/cpu/kernel/tests/curve_ops.rs +++ b/evm/src/cpu/kernel/tests/curve_ops.rs @@ -132,3 +132,138 @@ mod bn { Ok(()) } } + +#[cfg(test)] +mod secp { + use anyhow::Result; + use ethereum_types::U256; + + use crate::cpu::kernel::aggregator::combined_kernel; + use crate::cpu::kernel::interpreter::run; + use crate::cpu::kernel::tests::u256ify; + + #[test] + fn test_ec_ops() -> Result<()> { + // Make sure we can parse and assemble the entire kernel. + let kernel = combined_kernel(); + let ec_add = kernel.global_labels["ec_add_secp"]; + let ec_double = kernel.global_labels["ec_double_secp"]; + let ec_mul = kernel.global_labels["ec_mul"]; + let identity = ("0x0", "0x0"); + let invalid = ("0x0", "0x3"); // Not on curve + let point0 = ( + "0xc82ccceebd739e646631b7270ed8c33e96c4940b19db91eaf67da6ec92d109b", + "0xe0d241d2de832656c3eed78271bb06b5602d6473742c7c48a38b9f0350a76164", + ); + let point1 = ( + "0xbf26b1a7a46025d0a1787aa050d0bb83b8a4746010f873404389b8b23360919c", + "0x65adeff3fed1b22fa10279b5a25b96694a20bcbf6b718c0412f6d34a2e9bb924", + ); + // point2 = point0 + point1 + let point2 = ( + "0x191e8183402c6d6f5f22a9fe2a5ce17a7dd5184bd5d359c77189e9f714a18225", + "0xe23fbb6913de7449d92e4dfbe278e2874fac80d53bfeb8fb3400462b7bfaec74", + ); + // point3 = 2 * point0 + let point3 = ( + "0x7872498939b02197c2b6f0a0f5767f36551e43f910de472fbbff0538b21f5f45", + "0x294e15025d935438023a0e4056892abd6405fade13cf2b3131d8755be7cebad", + ); + let s = "0xa72ad7d8ce24135b5138f853d7a9896381c40523b5d1cf03072151f2af10e35e"; + // point4 = s * point0 + let point4 = ( + "0xd8bec38864f0fe56d429540e6de624afb8ddc7fba1f738337913922a30b96c14", + "0x5b086b2720ac39d173777bc36a49629c80c3a3e55e1c50527e60016d9be71318", + ); + + // Standard addition #1 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point1.1, point1.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point2.1, point2.0])?); + // Standard addition #2 + let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point2.1, point2.0])?); + + // Standard doubling #1 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point3.1, point3.0])?); + // Standard doubling #2 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_double, initial_stack); + assert_eq!(stack, u256ify([point3.1, point3.0])?); + // Standard doubling #3 + let initial_stack = u256ify(["0xdeadbeef", "0x2", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([point3.1, point3.0])?); + + // Addition with identity #1 + let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, point1.1, point1.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point1.1, point1.0])?); + // Addition with identity #2 + let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point1.1, point1.0])?); + // Addition with identity #3 + let initial_stack = + u256ify(["0xdeadbeef", identity.1, identity.0, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([identity.1, identity.0])?); + + // Addition with invalid point(s) #1 + let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, invalid.1, invalid.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + // Addition with invalid point(s) #2 + let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + // Addition with invalid point(s) #3 + let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + // Addition with invalid point(s) #4 + let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, invalid.1, invalid.0])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + + // Scalar multiplication #1 + let initial_stack = u256ify(["0xdeadbeef", s, point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([point4.1, point4.0])?); + // Scalar multiplication #2 + let initial_stack = u256ify(["0xdeadbeef", "0x0", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([identity.1, identity.0])?); + // Scalar multiplication #3 + let initial_stack = u256ify(["0xdeadbeef", "0x1", point0.1, point0.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([point0.1, point0.0])?); + // Scalar multiplication #4 + let initial_stack = u256ify(["0xdeadbeef", s, identity.1, identity.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, u256ify([identity.1, identity.0])?); + // Scalar multiplication #5 + let initial_stack = u256ify(["0xdeadbeef", s, invalid.1, invalid.0])?; + let stack = run(&kernel.code, ec_mul, initial_stack); + assert_eq!(stack, vec![U256::MAX, U256::MAX]); + + // Multiple calls + let ec_mul_hex = format!("0x{:x}", ec_mul); + let initial_stack = u256ify([ + "0xdeadbeef", + s, + &ec_mul_hex, + identity.1, + identity.0, + point0.1, + point0.0, + ])?; + let stack = run(&kernel.code, ec_add, initial_stack); + assert_eq!(stack, u256ify([point4.1, point4.0])?); + + Ok(()) + } +} From 7a6c53e92150d9254d13f69ec91c4277ab40f1ef Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Wed, 13 Jul 2022 19:25:28 +0200 Subject: [PATCH 03/15] Working secp mul --- .../cpu/kernel/asm/secp256k1/curve_mul.asm | 109 ++++++++++++++++++ evm/src/cpu/kernel/tests/curve_ops.rs | 2 +- 2 files changed, 110 insertions(+), 1 deletion(-) diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm index e69de29b..2ecf074a 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm @@ -0,0 +1,109 @@ +// Secp256k1 elliptic curve scalar multiplication. +// Recursive implementation, same algorithm as in `exp.asm`. +global ec_mul_secp: + JUMPDEST + // stack: x, y, s, retdest + DUP2 + // stack: y, x, y, s, retdest + DUP2 + // stack: x, y, x, y, s, retdest + %ec_isidentity + // stack: (x,y)==(0,0), x, y, s, retdest + %jumpi(ret_zero) + // stack: x, y, s, retdest + DUP2 + // stack: y, x, y, s, retdest + DUP2 + // stack: x, y, x, y, s, retdest + %ec_check_secp + // stack: isValid(x, y), x, y, s, retdest + %jumpi(ec_mul_valid_point) + // stack: x, y, s, retdest + %pop3 + %ec_invalid_input + +// Same algorithm as in `exp.asm` +ec_mul_valid_point: + JUMPDEST + // stack: x, y, s, retdest + DUP3 + // stack: s, x, y, s, retdest + %jumpi(step_case) + // stack: x, y, s, retdest + %jump(ret_zero) + +step_case: + JUMPDEST + // stack: x, y, s, retdest + PUSH recursion_return + // stack: recursion_return, x, y, s, retdest + PUSH 2 + // stack: 2, recursion_return, x, y, s, retdest + DUP5 + // stack: s, 2, recursion_return, x, y, s, retdest + DIV + // stack: s / 2, recursion_return, x, y, s, retdest + PUSH step_case_contd + // stack: step_case_contd, s / 2, recursion_return, x, y, s, retdest + DUP5 + // stack: y, step_case_contd, s / 2, recursion_return, x, y, s, retdest + DUP5 + // stack: x, y, step_case_contd, s / 2, recursion_return, x, y, s, retdest + %jump(ec_double_secp) + +// Assumption: 2(x,y) = (x',y') +step_case_contd: + JUMPDEST + // stack: x', y', s / 2, recursion_return, x, y, s, retdest + %jump(ec_mul_valid_point) + +recursion_return: + JUMPDEST + // stack: x', y', x, y, s, retdest + SWAP4 + // stack: s, y', x, y, x', retdest + PUSH 1 + // stack: 1, s, y', x, y, x', retdest + AND + // stack: s & 1, y', x, y, x', retdest + SWAP1 + // stack: y', s & 1, x, y, x', retdest + SWAP2 + // stack: x, s & 1, y', y, x', retdest + SWAP3 + // stack: y, s & 1, y', x, x', retdest + SWAP4 + // stack: x', s & 1, y', x, y, retdest + SWAP1 + // stack: s & 1, x', y', x, y, retdest + %jumpi(odd_scalar) + // stack: x', y', x, y, retdest + SWAP3 + // stack: y, y', x, x', retdest + POP + // stack: y', x, x', retdest + SWAP1 + // stack: x, y', x', retdest + POP + // stack: y', x', retdest + SWAP2 + // stack: retdest, x', y' + JUMP + +odd_scalar: + JUMPDEST + // stack: x', y', x, y, retdest + %jump(ec_add_valid_points_secp) + +ret_zero: + JUMPDEST + // stack: x, y, s, retdest + %pop3 + // stack: retdest + PUSH 0 + // stack: 0, retdest + PUSH 0 + // stack: 0, 0, retdest + SWAP2 + // stack: retdest, 0, 0 + JUMP diff --git a/evm/src/cpu/kernel/tests/curve_ops.rs b/evm/src/cpu/kernel/tests/curve_ops.rs index 97561b99..06ab21d5 100644 --- a/evm/src/cpu/kernel/tests/curve_ops.rs +++ b/evm/src/cpu/kernel/tests/curve_ops.rs @@ -148,7 +148,7 @@ mod secp { let kernel = combined_kernel(); let ec_add = kernel.global_labels["ec_add_secp"]; let ec_double = kernel.global_labels["ec_double_secp"]; - let ec_mul = kernel.global_labels["ec_mul"]; + let ec_mul = kernel.global_labels["ec_mul_secp"]; let identity = ("0x0", "0x0"); let invalid = ("0x0", "0x3"); // Not on curve let point0 = ( From b4ebbe5a314906407f25b42645451ebc54f36a4a Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Wed, 13 Jul 2022 19:48:17 +0200 Subject: [PATCH 04/15] Start ecrecover --- evm/src/cpu/kernel/asm/ecrecover.asm | 86 ++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index e69de29b..a05bff92 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -0,0 +1,86 @@ +global ecrecover: + JUMPDEST + // stack: hash, v, r, s, retdest + %ecrecover_input_check + // stack: isValid(v,r,s), hash, v, r, s, retdest + %jumpi(ecrecover_valid_input) + // stack: hash, v, r, s, retdest + %pop(4) + // stack: retdest + %ecrecover_invalid_input // TODO: Return correct invalid input + +ecrecover_valid_input: + JUMPDEST + // stack: hash, v, r, s, retdest + +// Check if v, r, and s are in correct form. +// Returns r < N & r!=0 & s < N & s!=0 & (v==28 || v==27). +%macro ecrecover_input_check + // stack: hash, v, r, s, retdest + DUP2 + // stack: v, hash, v, r, s, retdest + PUSH 27 + // stack: 27, v, hash, v, r, s, retdest + EQ + // stack: v==27, hash, v, r, s, retdest + DUP3 + // stack: v, v==27, hash, v, r, s, retdest + PUSH 28 + // stack: 28, v, v==27, hash, v, r, s, retdest + EQ + // stack: v==28, v==27, hash, v, r, s, retdest + OR + // stack: (v==28 || v==27), hash, v, r, s, retdest + ISZERO + // stack: (v==28 || v==27), hash, v, r, s, retdest + DUP5 + // stack: s, (v==28 || v==27), hash, v, r, s, retdest + %secp_is_out_of_bounds + // stack: (s >= N || s==0), (v==28 || v==27), hash, v, r, s, retdest + DUP5 + // stack: r, (s >= N || s==0), (v==28 || v==27), hash, v, r, s, retdest + %secp_is_out_of_bounds + // stack: (r >= N || r==0), (s >= N || s==0), (v==28 || v==27), hash, v, r, s, retdest + OR + // stack: (r >= N || r==0 || s >= N || s==0), (v==28 || v==27), hash, v, r, s, retdest + ISZERO + // stack: (r < N & r!=0 & s < N & s!=0), (v==28 || v==27), hash, v, r, s, retdest + AND + // stack: r < N & r!=0 & s < N & s!=0 & (v==28 || v==27), hash, v, r, s, retdest +%endmacro + +%macro secp_is_out_of_bounds + // stack: x + DUP1 + // stack: x, x + ISZERO + // stack: x==0, x + SWAP1 + // stack: x, x==0 + %secp_scalar + // stack: N, x, x==0 + SWAP1 + // stack: x, N, x==0 + LT + // stack: x < N, x==0 + ISZERO + // stack: x >= N, x==0 + OR + // stack: x >= N || x==0 +%endmacro + +%macro secp_scalar + PUSH 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 +%endmacro + +// Return (u256::MAX, u256::MAX) which is used to indicate the input was invalid. +%macro ecrecover_invalid_input + // stack: retdest + PUSH 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff + // stack: u256::MAX, retdest + PUSH 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff + // stack: u256::MAX, u256::MAX, retdest + SWAP2 + // stack: retdest, u256::MAX, u256::MAX + JUMP +%endmacro From 522213c9336ed7511321524fd0db62ebea767e20 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 11:30:47 +0200 Subject: [PATCH 05/15] Ecrecover until hashing --- evm/src/cpu/kernel/asm/basic_macros.asm | 20 + evm/src/cpu/kernel/asm/ecrecover.asm | 111 ++- .../cpu/kernel/asm/secp256k1/curve_mul.asm | 6 +- .../kernel/asm/secp256k1/inverse_scalar.asm | 672 +++++++++++++++ evm/src/cpu/kernel/asm/secp256k1/lift_x.asm | 815 ++++++++++++++++++ 5 files changed, 1615 insertions(+), 9 deletions(-) create mode 100644 evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm create mode 100644 evm/src/cpu/kernel/asm/secp256k1/lift_x.asm diff --git a/evm/src/cpu/kernel/asm/basic_macros.asm b/evm/src/cpu/kernel/asm/basic_macros.asm index 200aeea0..376b661d 100644 --- a/evm/src/cpu/kernel/asm/basic_macros.asm +++ b/evm/src/cpu/kernel/asm/basic_macros.asm @@ -44,6 +44,26 @@ // stack: (pred != 0) * nz + (pred == 0) * z %endmacro +// If pred, yields z; otherwise, yields nz +// Assumes pred is boolean (either 0 or 1). +%macro select_bool + // stack: pred, nz, z + dup1 + // stack: pred, pred, nz, z + iszero + // stack: notpred, pred, nz, z + swap3 + // stack: z, pred, nz, notpred + mul + // stack: pred * z, nz, notpred + swap2 + // stack: notpred, nz, pred * z + mul + // stack: notpred * nz, pred * z + add + // stack: notpred * nz + pred * z +%endmacro + %macro square // stack: x dup1 diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index a05bff92..97d12616 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -7,11 +7,112 @@ global ecrecover: // stack: hash, v, r, s, retdest %pop(4) // stack: retdest - %ecrecover_invalid_input // TODO: Return correct invalid input + %ecrecover_invalid_input +// Pseudo-code: +// let P = lift_x(r, recovery_id); +// let r_inv = r.inverse(); +// let u1 = s * r_inv; +// let u2 = -hash * r_inv; +// return u1*P + u2*GENERATOR; ecrecover_valid_input: JUMPDEST // stack: hash, v, r, s, retdest + SWAP1 + // stack: v, hash, r, s, retdest + DUP3 + // stack: r, v, hash, r, s, retdest + %secp_lift_x + // stack: x, y, hash, r, s, retdest + SWAP3 + // stack: r, y, hash, x, s, retdest + %inverse_secp_scalar + // stack: r^(-1), y, hash, x, s, retdest + DUP1 + // stack: r^(-1), r^(-1), y, hash, x, s, retdest + SWAP5 + // stack: s, r^(-1), y, hash, x, r^(-1), retdest + %mulmodn_secp_scalar + // stack: u1, y, hash, x, r^(-1), retdest + PUSH ecrecover_with_first_point + // stack: ecrecover_with_first_point, u1, y, hash, x, r^(-1), retdest + SWAP1 + // stack: u1, ecrecover_with_first_point, y, hash, x, r^(-1), retdest + SWAP2 + // stack: y, ecrecover_with_first_point, u1, hash, x, r^(-1), retdest + SWAP1 + // stack: ecrecover_with_first_point, y, u1, hash, x, r^(-1), retdest + SWAP3 + // stack: hash, y, u1, ecrecover_with_first_point, x, r^(-1), retdest + SWAP4 + // stack: x, y, u1, ecrecover_with_first_point, hash, r^(-1), retdest + %jump(ec_mul_valid_point_secp) + + SWAP2 + // stack: hash, y, u1, x, r^(-1), retdest + SWAP3 + // stack: x, y, u1, hash, r^(-1), retdest + SWAP4 + // stack: r^(-1), y, hash, x, u1, retdest + SWAP1 + // stack: y, r^(-1), hash, x, u1, retdest + SWAP2 + // stack: hash, r^(-1), y, x, u1, retdest + %secp_scalar + // stack: p, hash, r^(-1), y, x, u1, retdest + SUB + // stack: p - hash, r^(-1), y, x, u1, retdest // Assume hash < p, should be hard (127-bit) to find a hash larger than p. + %mulmodn_secp_scalar + // stack: u2, y, x, u1, retdest // Assume hash < p, should be hard (127-bit) to find a hash larger than p. + +ecrecover_with_first_point: + JUMPDEST + // stack: X, Y, hash, r^(-1), retdest + %secp_scalar + // stack: p, X, Y, hash, r^(-1), retdest + SWAP1 + // stack: X, p, Y, hash, r^(-1), retdest + SWAP4 + // stack: r^(-1), p, Y, hash, X, retdest + SWAP2 + // stack: Y, p, r^(-1), hash, X, retdest + SWAP3 + // stack: hash, p, r^(-1), Y, X, retdest + MOD + // stack: hash%p, r^(-1), Y, X, retdest + %secp_scalar + // stack: p, hash%p, r^(-1), Y, X, retdest + SUB + // stack: -hash, r^(-1), Y, X, retdest + %mulmodn_secp_scalar + // stack: u2, Y, X, retdest + PUSH 8 + // stack: final_hashing, u2, Y, X, retdest + SWAP3 + // stack: X, u2, Y, final_hashing, retdest + PUSH 7 + // stack: ec_add_valid_points_secp, X, u2, Y, final_hashing, retdest + SWAP1 + // stack: X, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + PUSH 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 // x-coordinate of generator + // stack: Gx, X, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + SWAP1 + // stack: X, Gx, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + PUSH 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8 // y-coordinate of generator + // stack: Gy, X, Gx, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + SWAP1 + // stack: X, Gy, Gx, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + SWAP4 + // stack: u2, Gy, Gx, ec_add_valid_points_secp, X, Y, final_hashing, retdest + SWAP2 + // stack: Gx, Gy, u2, ec_add_valid_points_secp, X, Y, final_hashing, retdest + %jump(ec_mul_valid_point_secp) + +// TODO +final_hashing: + JUMPDEST + PUSH 0xdeadbeef + JUMP // Check if v, r, and s are in correct form. // Returns r < N & r!=0 & s < N & s!=0 & (v==28 || v==27). @@ -73,14 +174,12 @@ ecrecover_valid_input: PUSH 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 %endmacro -// Return (u256::MAX, u256::MAX) which is used to indicate the input was invalid. +// Return u256::MAX which is used to indicate the input was invalid. %macro ecrecover_invalid_input // stack: retdest PUSH 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff // stack: u256::MAX, retdest - PUSH 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff - // stack: u256::MAX, u256::MAX, retdest - SWAP2 - // stack: retdest, u256::MAX, u256::MAX + SWAP1 + // stack: retdest, u256::MAX JUMP %endmacro diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm index 2ecf074a..7b332955 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm @@ -17,13 +17,13 @@ global ec_mul_secp: // stack: x, y, x, y, s, retdest %ec_check_secp // stack: isValid(x, y), x, y, s, retdest - %jumpi(ec_mul_valid_point) + %jumpi(ec_mul_valid_point_secp) // stack: x, y, s, retdest %pop3 %ec_invalid_input // Same algorithm as in `exp.asm` -ec_mul_valid_point: +global ec_mul_valid_point_secp: JUMPDEST // stack: x, y, s, retdest DUP3 @@ -55,7 +55,7 @@ step_case: step_case_contd: JUMPDEST // stack: x', y', s / 2, recursion_return, x, y, s, retdest - %jump(ec_mul_valid_point) + %jump(ec_mul_valid_point_secp) recursion_return: JUMPDEST diff --git a/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm b/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm new file mode 100644 index 00000000..704191fa --- /dev/null +++ b/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm @@ -0,0 +1,672 @@ +/// Division modulo 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141, the Secp256k1 scalar field order +/// To replace with more efficient method using non-determinism later. + +%macro mulmodn_secp_scalar + // stack: x, y + %secp_scalar + // stack: N, x, y + SWAP2 + // stack: y, x, N + MULMOD +%endmacro + +%macro squaremodn_secp_scalar + // stack: x + DUP1 + // stack: x, x + %mulmodn_secp +%endmacro + +// Computes the inverse modulo N using x^-1 = x^(N-2) mod N and square-and-multiply modular exponentiation. +%macro inverse_secp_scalar + DUP1 + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + SWAP1 + // stack: x, x^-1 + POP + // stack: x^-1 +%endmacro diff --git a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm new file mode 100644 index 00000000..2578f6aa --- /dev/null +++ b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm @@ -0,0 +1,815 @@ +%macro secp_lift_x + // stack: x, v + DUP1 + // stack: x, x, v + %cubemodn_secp + // stack: x^3, x, v + PUSH 7 + // stack: 7, x^3, x, v + %addmodn_secp + // stack: x^3+7, x, v + %sqrt_secp + // stack: y, x, v + DUP1 + // stack: y, y, x, v + PUSH 1 + // stack: 1, y, y, x, v + AND + // stack: 1 & y, y, x, v + PUSH 27 + // stack: 27, 1 & y, y, x, v + DUP5 + // stack: v, 27, 1 & y, y, x, v + SUB + // stack: v - 27, 1 & y, y, x, v + EQ + // stack: correctParity, y, x, v + DUP2 + // stack: y, correctParity, y, x, v + %secp_base + // stack: N, y, correctParity, y, x, v + SUB + // stack: N - y, correctParity, y, x, v + SWAP1 + // stack: correctParity, N - y, y, x, v + %select_bool + // stack: goody, x, v + SWAP2 + // stack: v, x, goody + POP + // stack: x, goody +%endmacro + +%macro cubemodn_secp + // stack: x + DUP1 + // stack: x, x + %squaremodn_secp + // stack: x^2, x + %mulmodn_secp +%endmacro + +%macro addmodn_secp + // stack: x, y + %secp_base + // stack: N, x, y + SWAP2 + // stack: y, x, N + ADDMOD +%endmacro + +// Returns sqrt(x). Computed as x^(q+1)/4, with q the Secp base field order. +/// To replace with more efficient method using non-determinism later. +%macro sqrt_secp + // stack: x + DUP1 + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp + SWAP1 + // stack: x, x^-1 + POP + // stack: x^-1 +%endmacro \ No newline at end of file From 905b0243e70dc5cb6b889c925fb76d86dd750775 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 13:07:58 +0200 Subject: [PATCH 06/15] Minor fixes --- evm/src/cpu/kernel/aggregator.rs | 2 ++ evm/src/cpu/kernel/asm/ecrecover.asm | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/evm/src/cpu/kernel/aggregator.rs b/evm/src/cpu/kernel/aggregator.rs index fbe1c990..aba35828 100644 --- a/evm/src/cpu/kernel/aggregator.rs +++ b/evm/src/cpu/kernel/aggregator.rs @@ -25,6 +25,8 @@ pub(crate) fn combined_kernel() -> Kernel { include_str!("asm/secp256k1/curve_mul.asm"), include_str!("asm/secp256k1/curve_add.asm"), include_str!("asm/secp256k1/moddiv.asm"), + include_str!("asm/secp256k1/lift_x.asm"), + include_str!("asm/secp256k1/inverse_scalar.asm"), include_str!("asm/ecrecover.asm"), include_str!("asm/storage_read.asm"), include_str!("asm/storage_write.asm"), diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index 97d12616..1f36eb61 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -5,7 +5,7 @@ global ecrecover: // stack: isValid(v,r,s), hash, v, r, s, retdest %jumpi(ecrecover_valid_input) // stack: hash, v, r, s, retdest - %pop(4) + %pop4 // stack: retdest %ecrecover_invalid_input From ad9e13102692a77ad48a6357a14ea7348c0233ab Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 13:16:25 +0200 Subject: [PATCH 07/15] Add test --- evm/src/cpu/kernel/asm/ecrecover.asm | 17 ----------------- evm/src/cpu/kernel/tests/ecrecover.rs | 22 ++++++++++++++++++++++ evm/src/cpu/kernel/tests/mod.rs | 1 + 3 files changed, 23 insertions(+), 17 deletions(-) create mode 100644 evm/src/cpu/kernel/tests/ecrecover.rs diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index 1f36eb61..14d398af 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -48,23 +48,6 @@ ecrecover_valid_input: // stack: x, y, u1, ecrecover_with_first_point, hash, r^(-1), retdest %jump(ec_mul_valid_point_secp) - SWAP2 - // stack: hash, y, u1, x, r^(-1), retdest - SWAP3 - // stack: x, y, u1, hash, r^(-1), retdest - SWAP4 - // stack: r^(-1), y, hash, x, u1, retdest - SWAP1 - // stack: y, r^(-1), hash, x, u1, retdest - SWAP2 - // stack: hash, r^(-1), y, x, u1, retdest - %secp_scalar - // stack: p, hash, r^(-1), y, x, u1, retdest - SUB - // stack: p - hash, r^(-1), y, x, u1, retdest // Assume hash < p, should be hard (127-bit) to find a hash larger than p. - %mulmodn_secp_scalar - // stack: u2, y, x, u1, retdest // Assume hash < p, should be hard (127-bit) to find a hash larger than p. - ecrecover_with_first_point: JUMPDEST // stack: X, Y, hash, r^(-1), retdest diff --git a/evm/src/cpu/kernel/tests/ecrecover.rs b/evm/src/cpu/kernel/tests/ecrecover.rs new file mode 100644 index 00000000..e185dcd3 --- /dev/null +++ b/evm/src/cpu/kernel/tests/ecrecover.rs @@ -0,0 +1,22 @@ +use anyhow::Result; + +use crate::cpu::kernel::aggregator::combined_kernel; +use crate::cpu::kernel::interpreter::run; +use crate::cpu::kernel::tests::u256ify; + +#[test] +fn test_ec_ops() -> Result<()> { + // Make sure we can parse and assemble the entire kernel. + let kernel = combined_kernel(); + let ecrecover = kernel.global_labels["ecrecover"]; + let hash = "0x0"; + let v = "0x27"; + let r = "0x1"; + let s = "0x1"; + + let initial_stack = u256ify([s, r, v, hash])?; + let stack = run(&kernel.code, ecrecover, initial_stack); + dbg!(stack); + + Ok(()) +} diff --git a/evm/src/cpu/kernel/tests/mod.rs b/evm/src/cpu/kernel/tests/mod.rs index 49f16da6..100ef377 100644 --- a/evm/src/cpu/kernel/tests/mod.rs +++ b/evm/src/cpu/kernel/tests/mod.rs @@ -1,4 +1,5 @@ mod curve_ops; +mod ecrecover; mod exp; use std::str::FromStr; From 33a5934255c1f58544ee5f47a357e211eacc0d69 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 14:26:01 +0200 Subject: [PATCH 08/15] Passing tests --- evm/Cargo.toml | 2 + evm/src/cpu/kernel/aggregator.rs | 5 -- evm/src/cpu/kernel/asm/curve_mul.asm | 6 +- evm/src/cpu/kernel/asm/ecrecover.asm | 6 +- .../cpu/kernel/asm/secp256k1/curve_mul.asm | 17 +---- .../kernel/asm/secp256k1/inverse_scalar.asm | 2 +- evm/src/cpu/kernel/tests/ecrecover.rs | 66 +++++++++++++++---- 7 files changed, 65 insertions(+), 39 deletions(-) diff --git a/evm/Cargo.toml b/evm/Cargo.toml index facf300b..714b6389 100644 --- a/evm/Cargo.toml +++ b/evm/Cargo.toml @@ -20,9 +20,11 @@ rand = "0.8.5" rand_chacha = "0.3.1" rlp = "0.5.1" keccak-rust = { git = "https://github.com/npwardberkeley/keccak-rust" } +keccak-hash = "0.9.0" [dev-dependencies] hex-literal = "0.3.4" +hex = "0.4.3" [features] asmtools = ["hex"] diff --git a/evm/src/cpu/kernel/aggregator.rs b/evm/src/cpu/kernel/aggregator.rs index 6517e89d..1e1cd1f4 100644 --- a/evm/src/cpu/kernel/aggregator.rs +++ b/evm/src/cpu/kernel/aggregator.rs @@ -38,12 +38,7 @@ pub(crate) fn combined_kernel() -> Kernel { #[cfg(test)] mod tests { - use std::str::FromStr; - - use anyhow::Result; - use ethereum_types::U256; use log::debug; - use rand::{thread_rng, Rng}; use crate::cpu::kernel::aggregator::combined_kernel; diff --git a/evm/src/cpu/kernel/asm/curve_mul.asm b/evm/src/cpu/kernel/asm/curve_mul.asm index 246946d9..d976d9d5 100644 --- a/evm/src/cpu/kernel/asm/curve_mul.asm +++ b/evm/src/cpu/kernel/asm/curve_mul.asm @@ -14,7 +14,7 @@ global ec_mul: // stack: x, y, x, y, s, retdest %ec_isidentity // stack: (x,y)==(0,0), x, y, s, retdest - %jumpi(ret_zero) + %jumpi(ret_zero_ec_mul) // stack: x, y, s, retdest DUP2 // stack: y, x, y, s, retdest @@ -35,7 +35,7 @@ ec_mul_valid_point: // stack: s, x, y, s, retdest %jumpi(step_case) // stack: x, y, s, retdest - %jump(ret_zero) + %jump(ret_zero_ec_mul) step_case: JUMPDEST @@ -100,7 +100,7 @@ odd_scalar: // stack: x', y', x, y, retdest %jump(ec_add_valid_points) -ret_zero: +global ret_zero_ec_mul: JUMPDEST // stack: x, y, s, retdest %pop3 diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index 14d398af..519dc75e 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -69,11 +69,11 @@ ecrecover_with_first_point: // stack: -hash, r^(-1), Y, X, retdest %mulmodn_secp_scalar // stack: u2, Y, X, retdest - PUSH 8 + PUSH final_hashing // stack: final_hashing, u2, Y, X, retdest SWAP3 // stack: X, u2, Y, final_hashing, retdest - PUSH 7 + PUSH ec_add_valid_points_secp // stack: ec_add_valid_points_secp, X, u2, Y, final_hashing, retdest SWAP1 // stack: X, ec_add_valid_points_secp, u2, Y, final_hashing, retdest @@ -115,8 +115,6 @@ final_hashing: // stack: v==28, v==27, hash, v, r, s, retdest OR // stack: (v==28 || v==27), hash, v, r, s, retdest - ISZERO - // stack: (v==28 || v==27), hash, v, r, s, retdest DUP5 // stack: s, (v==28 || v==27), hash, v, r, s, retdest %secp_is_out_of_bounds diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm index 7b332955..4438857a 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm @@ -9,7 +9,7 @@ global ec_mul_secp: // stack: x, y, x, y, s, retdest %ec_isidentity // stack: (x,y)==(0,0), x, y, s, retdest - %jumpi(ret_zero) + %jumpi(ret_zero_ec_mul) // stack: x, y, s, retdest DUP2 // stack: y, x, y, s, retdest @@ -30,7 +30,7 @@ global ec_mul_valid_point_secp: // stack: s, x, y, s, retdest %jumpi(step_case) // stack: x, y, s, retdest - %jump(ret_zero) + %jump(ret_zero_ec_mul) step_case: JUMPDEST @@ -94,16 +94,3 @@ odd_scalar: JUMPDEST // stack: x', y', x, y, retdest %jump(ec_add_valid_points_secp) - -ret_zero: - JUMPDEST - // stack: x, y, s, retdest - %pop3 - // stack: retdest - PUSH 0 - // stack: 0, retdest - PUSH 0 - // stack: 0, 0, retdest - SWAP2 - // stack: retdest, 0, 0 - JUMP diff --git a/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm b/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm index 704191fa..ce0af757 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm @@ -14,7 +14,7 @@ // stack: x DUP1 // stack: x, x - %mulmodn_secp + %mulmodn_secp_scalar %endmacro // Computes the inverse modulo N using x^-1 = x^(N-2) mod N and square-and-multiply modular exponentiation. diff --git a/evm/src/cpu/kernel/tests/ecrecover.rs b/evm/src/cpu/kernel/tests/ecrecover.rs index e185dcd3..4a3a257b 100644 --- a/evm/src/cpu/kernel/tests/ecrecover.rs +++ b/evm/src/cpu/kernel/tests/ecrecover.rs @@ -1,22 +1,66 @@ -use anyhow::Result; +use anyhow::{ensure, Result}; +use ethereum_types::U256; +use hex_literal::hex; +use keccak_hash::keccak; use crate::cpu::kernel::aggregator::combined_kernel; +use crate::cpu::kernel::assembler::Kernel; use crate::cpu::kernel::interpreter::run; use crate::cpu::kernel::tests::u256ify; -#[test] -fn test_ec_ops() -> Result<()> { - // Make sure we can parse and assemble the entire kernel. - let kernel = combined_kernel(); - let ecrecover = kernel.global_labels["ecrecover"]; - let hash = "0x0"; - let v = "0x27"; - let r = "0x1"; - let s = "0x1"; +fn pubkey_to_addr(x: U256, y: U256) -> Vec { + let mut buf = [0; 64]; + x.to_big_endian(&mut buf[0..32]); + y.to_big_endian(&mut buf[32..64]); + let hash = keccak(buf); + hash.0[12..].to_vec() +} +fn test_valid_ecrecover( + hash: &str, + v: &str, + r: &str, + s: &str, + expected: &str, + kernel: &Kernel, +) -> Result<()> { + let ecrecover = kernel.global_labels["ecrecover"]; let initial_stack = u256ify([s, r, v, hash])?; let stack = run(&kernel.code, ecrecover, initial_stack); - dbg!(stack); + let got = pubkey_to_addr(stack[1], stack[0]); + assert_eq!(got, hex::decode(expected).unwrap()); + + Ok(()) +} + +#[test] +fn test_ecrecover() -> Result<()> { + let kernel = combined_kernel(); + + test_valid_ecrecover( + "0x55f77e8909b1f1c9531c4a309bb2d40388e9ed4b87830c8f90363c6b36255fb9", + "0x1b", + "0xd667c5a20fa899b253924099e10ae92998626718585b8171eb98de468bbebc", + "0x58351f48ce34bf134ee611fb5bf255a5733f0029561d345a7d46bfa344b60ac0", + "67f3c0Da351384838d7F7641AB0fCAcF853E1844", + &kernel, + )?; + test_valid_ecrecover( + "0x55f77e8909b1f1c9531c4a309bb2d40388e9ed4b87830c8f90363c6b36255fb9", + "0x1c", + "0xd667c5a20fa899b253924099e10ae92998626718585b8171eb98de468bbebc", + "0x58351f48ce34bf134ee611fb5bf255a5733f0029561d345a7d46bfa344b60ac0", + "aA58436DeABb64982a386B2De1A8015AA28fCCc0", + &kernel, + )?; + // test_valid_ecrecover( + // "0x0", + // "0x1c", + // "0x3a18b21408d275dde53c0ea86f9c1982eca60193db0ce15008fa408d43024847", + // "0x5db9745f44089305b2f2c980276e7025a594828d878e6e36dd2abd34ca6b9e3d", + // "aA58436DeABb64982a386B2De1A8015AA28fCCc0", + // &kernel, + // )?; Ok(()) } From 7ee884b84d86e42ce968b1d200b5556c93f7851a Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 15:26:07 +0200 Subject: [PATCH 09/15] More tests --- evm/src/cpu/kernel/asm/basic_macros.asm | 5 ++ evm/src/cpu/kernel/asm/ecrecover.asm | 24 +++++--- evm/src/cpu/kernel/asm/secp256k1/lift_x.asm | 66 ++++++++++++--------- evm/src/cpu/kernel/tests/ecrecover.rs | 63 ++++++++++++++++---- 4 files changed, 111 insertions(+), 47 deletions(-) diff --git a/evm/src/cpu/kernel/asm/basic_macros.asm b/evm/src/cpu/kernel/asm/basic_macros.asm index 376b661d..b70ce41f 100644 --- a/evm/src/cpu/kernel/asm/basic_macros.asm +++ b/evm/src/cpu/kernel/asm/basic_macros.asm @@ -23,6 +23,11 @@ %pop2 %endmacro +%macro pop5 + %pop2 + %pop3 +%endmacro + // If pred is zero, yields z; otherwise, yields nz %macro select // stack: pred, nz, z diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index 519dc75e..09dc50e8 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -3,9 +3,21 @@ global ecrecover: // stack: hash, v, r, s, retdest %ecrecover_input_check // stack: isValid(v,r,s), hash, v, r, s, retdest + SWAP2 + // stack: v, hash, isValid(v,r,s), r, s, retdest + DUP4 + // stack: r, v, hash, isValid(v,r,s), r, s, retdest + %secp_lift_x + // stack: sqrtOk, x, y, hash, isValid(v,r,s), r, s, retdest + SWAP1 + // stack: x, sqrtOk, y, hash, isValid(v,r,s), r, s, retdest + SWAP4 + // stack: isValid(v,r,s), sqrtOk, y, hash, x, r, s, retdest + AND + // stack: isValid(v,r,s) & sqrtOk, y, hash, x, r, s, retdest %jumpi(ecrecover_valid_input) - // stack: hash, v, r, s, retdest - %pop4 + // stack: y, hash, x, r, s, retdest + %pop5 // stack: retdest %ecrecover_invalid_input @@ -17,12 +29,10 @@ global ecrecover: // return u1*P + u2*GENERATOR; ecrecover_valid_input: JUMPDEST - // stack: hash, v, r, s, retdest + // stack: y, hash, x, r, s, retdest SWAP1 - // stack: v, hash, r, s, retdest - DUP3 - // stack: r, v, hash, r, s, retdest - %secp_lift_x + // stack: hash, y, x, r, s, retdest + SWAP2 // stack: x, y, hash, r, s, retdest SWAP3 // stack: r, y, hash, x, s, retdest diff --git a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm index 2578f6aa..57469239 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm @@ -8,36 +8,48 @@ // stack: 7, x^3, x, v %addmodn_secp // stack: x^3+7, x, v - %sqrt_secp - // stack: y, x, v DUP1 - // stack: y, y, x, v - PUSH 1 - // stack: 1, y, y, x, v - AND - // stack: 1 & y, y, x, v - PUSH 27 - // stack: 27, 1 & y, y, x, v - DUP5 - // stack: v, 27, 1 & y, y, x, v - SUB - // stack: v - 27, 1 & y, y, x, v - EQ - // stack: correctParity, y, x, v - DUP2 - // stack: y, correctParity, y, x, v - %secp_base - // stack: N, y, correctParity, y, x, v - SUB - // stack: N - y, correctParity, y, x, v + // stack: x^3+7, x^3+7, x, v + %sqrt_secp + // stack: y, x^3+7, x, v SWAP1 - // stack: correctParity, N - y, y, x, v - %select_bool - // stack: goody, x, v + // stack: x^3+7, y, x, v + DUP2 + // stack: y, x^3+7, y, x, v + %squaremodn_secp + // stack: y^2, x^3+7, y, x, v + EQ + // stack: sqrtOk, y, x, v + SWAP3 + // stack: v, y, x, sqrtOk + DUP2 + // stack: y, v, y, x, sqrtOk + PUSH 1 + // stack: 1, y, v, y, x, sqrtOk + AND + // stack: 1 & y, v, y, x, sqrtOk + PUSH 27 + // stack: 27, 1 & y, v, y, x, sqrtOk + SWAP1 + // stack: 1 & y, 27, v, y, x, sqrtOk SWAP2 - // stack: v, x, goody - POP - // stack: x, goody + // stack: v, 27, 1 & y, y, x, sqrtOk + SUB + // stack: v - 27, 1 & y, y, x, sqrtOk + EQ + // stack: correctParity, y, x, sqrtOk + DUP2 + // stack: y, correctParity, y, x, sqrtOk + %secp_base + // stack: N, y, correctParity, y, x, sqrtOk + SUB + // stack: N - y, correctParity, y, x, sqrtOk + SWAP1 + // stack: correctParity, N - y, y, x, sqrtOk + %select_bool + // stack: goody, x, sqrtOk + SWAP2 + // stack: sqrtOk, x, goody %endmacro %macro cubemodn_secp diff --git a/evm/src/cpu/kernel/tests/ecrecover.rs b/evm/src/cpu/kernel/tests/ecrecover.rs index 4a3a257b..47115317 100644 --- a/evm/src/cpu/kernel/tests/ecrecover.rs +++ b/evm/src/cpu/kernel/tests/ecrecover.rs @@ -1,6 +1,5 @@ -use anyhow::{ensure, Result}; +use anyhow::Result; use ethereum_types::U256; -use hex_literal::hex; use keccak_hash::keccak; use crate::cpu::kernel::aggregator::combined_kernel; @@ -28,7 +27,16 @@ fn test_valid_ecrecover( let initial_stack = u256ify([s, r, v, hash])?; let stack = run(&kernel.code, ecrecover, initial_stack); let got = pubkey_to_addr(stack[1], stack[0]); - assert_eq!(got, hex::decode(expected).unwrap()); + assert_eq!(got, hex::decode(&expected[2..]).unwrap()); + + Ok(()) +} + +fn test_invalid_ecrecover(hash: &str, v: &str, r: &str, s: &str, kernel: &Kernel) -> Result<()> { + let ecrecover = kernel.global_labels["ecrecover"]; + let initial_stack = u256ify(["0xdeadbeef", s, r, v, hash])?; + let stack = run(&kernel.code, ecrecover, initial_stack); + assert_eq!(stack, vec![U256::MAX]); Ok(()) } @@ -42,7 +50,7 @@ fn test_ecrecover() -> Result<()> { "0x1b", "0xd667c5a20fa899b253924099e10ae92998626718585b8171eb98de468bbebc", "0x58351f48ce34bf134ee611fb5bf255a5733f0029561d345a7d46bfa344b60ac0", - "67f3c0Da351384838d7F7641AB0fCAcF853E1844", + "0x67f3c0Da351384838d7F7641AB0fCAcF853E1844", &kernel, )?; test_valid_ecrecover( @@ -50,17 +58,46 @@ fn test_ecrecover() -> Result<()> { "0x1c", "0xd667c5a20fa899b253924099e10ae92998626718585b8171eb98de468bbebc", "0x58351f48ce34bf134ee611fb5bf255a5733f0029561d345a7d46bfa344b60ac0", - "aA58436DeABb64982a386B2De1A8015AA28fCCc0", + "0xaA58436DeABb64982a386B2De1A8015AA28fCCc0", + &kernel, + )?; + test_valid_ecrecover( + "0x0", + "0x1c", + "0x1", + "0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140", + "0x3344c6f6eeCA588be132142DB0a32C71ABFAAe7B", + &kernel, + )?; + + test_invalid_ecrecover( + "0x0", + "0x42", // v not in {27,28} + "0x1", + "0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140", + &kernel, + )?; + test_invalid_ecrecover( + "0x0", + "0x42", + "0xd667c5a20fa899b253924099e10ae92998626718585b8171eb98de468bbebc", + "0x0", // s=0 + &kernel, + )?; + test_invalid_ecrecover( + "0x0", + "0x42", + "0x0", // r=0 + "0xd667c5a20fa899b253924099e10ae92998626718585b8171eb98de468bbebc", + &kernel, + )?; + test_invalid_ecrecover( + "0x0", + "0x1c", + "0x3a18b21408d275dde53c0ea86f9c1982eca60193db0ce15008fa408d43024847", // r^3 + 7 isn't a square + "0x5db9745f44089305b2f2c980276e7025a594828d878e6e36dd2abd34ca6b9e3d", &kernel, )?; - // test_valid_ecrecover( - // "0x0", - // "0x1c", - // "0x3a18b21408d275dde53c0ea86f9c1982eca60193db0ce15008fa408d43024847", - // "0x5db9745f44089305b2f2c980276e7025a594828d878e6e36dd2abd34ca6b9e3d", - // "aA58436DeABb64982a386B2De1A8015AA28fCCc0", - // &kernel, - // )?; Ok(()) } From 0ccd5adc7b166a9b4d802533e7c3851ff70b46c1 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 19:23:08 +0200 Subject: [PATCH 10/15] Redundant x-coord in lifting --- evm/src/cpu/kernel/asm/basic_macros.asm | 5 --- evm/src/cpu/kernel/asm/ecrecover.asm | 34 ++++++++------- evm/src/cpu/kernel/asm/secp256k1/lift_x.asm | 48 ++++++++++----------- evm/src/cpu/kernel/interpreter.rs | 1 + 4 files changed, 42 insertions(+), 46 deletions(-) diff --git a/evm/src/cpu/kernel/asm/basic_macros.asm b/evm/src/cpu/kernel/asm/basic_macros.asm index b70ce41f..376b661d 100644 --- a/evm/src/cpu/kernel/asm/basic_macros.asm +++ b/evm/src/cpu/kernel/asm/basic_macros.asm @@ -23,11 +23,6 @@ %pop2 %endmacro -%macro pop5 - %pop2 - %pop3 -%endmacro - // If pred is zero, yields z; otherwise, yields nz %macro select // stack: pred, nz, z diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index 09dc50e8..0b9b0d06 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -1,26 +1,33 @@ +// ecrecover precompile. global ecrecover: JUMPDEST // stack: hash, v, r, s, retdest + + // Check if inputs are valid. %ecrecover_input_check // stack: isValid(v,r,s), hash, v, r, s, retdest + + // Lift r to an elliptic curve point if possible. SWAP2 // stack: v, hash, isValid(v,r,s), r, s, retdest DUP4 // stack: r, v, hash, isValid(v,r,s), r, s, retdest %secp_lift_x - // stack: sqrtOk, x, y, hash, isValid(v,r,s), r, s, retdest - SWAP1 - // stack: x, sqrtOk, y, hash, isValid(v,r,s), r, s, retdest - SWAP4 - // stack: isValid(v,r,s), sqrtOk, y, hash, x, r, s, retdest + // stack: y, sqrtOk, hash, isValid(v,r,s), r, s, retdest + + // If inputs are invalid or lifting fails, abort. + SWAP3 + // stack: isValid(v,r,s), sqrtOk, hash, y, r, s, retdest AND - // stack: isValid(v,r,s) & sqrtOk, y, hash, x, r, s, retdest + // stack: isValid(v,r,s) & sqrtOk, hash, y, r, s, retdest %jumpi(ecrecover_valid_input) - // stack: y, hash, x, r, s, retdest - %pop5 + // stack: hash, y, r, s, retdest + %pop4 // stack: retdest %ecrecover_invalid_input +// ecrecover precompile. +// Assumption: Inputs are valid. // Pseudo-code: // let P = lift_x(r, recovery_id); // let r_inv = r.inverse(); @@ -29,13 +36,10 @@ global ecrecover: // return u1*P + u2*GENERATOR; ecrecover_valid_input: JUMPDEST - // stack: y, hash, x, r, s, retdest - SWAP1 - // stack: hash, y, x, r, s, retdest - SWAP2 - // stack: x, y, hash, r, s, retdest - SWAP3 - // stack: r, y, hash, x, s, retdest + // stack: hash, y, r, s, retdest + DUP3 + // stack: r, y, hash, r, s, retdest + STOP %inverse_secp_scalar // stack: r^(-1), y, hash, x, s, retdest DUP1 diff --git a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm index 57469239..aba07392 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm @@ -1,55 +1,51 @@ %macro secp_lift_x // stack: x, v - DUP1 - // stack: x, x, v %cubemodn_secp - // stack: x^3, x, v + // stack: x^3, v PUSH 7 - // stack: 7, x^3, x, v + // stack: 7, x^3, v %addmodn_secp // stack: x^3+7, x, v DUP1 - // stack: x^3+7, x^3+7, x, v + // stack: x^3+7, x^3+7, v %sqrt_secp // stack: y, x^3+7, x, v SWAP1 - // stack: x^3+7, y, x, v + // stack: x^3+7, y, v DUP2 - // stack: y, x^3+7, y, x, v + // stack: y, x^3+7, y, v %squaremodn_secp - // stack: y^2, x^3+7, y, x, v + // stack: y^2, x^3+7, y, v EQ - // stack: sqrtOk, y, x, v - SWAP3 - // stack: v, y, x, sqrtOk + // stack: sqrtOk, y, v + SWAP2 + // stack: v, y, sqrtOk DUP2 - // stack: y, v, y, x, sqrtOk + // stack: y, v, y, sqrtOk PUSH 1 - // stack: 1, y, v, y, x, sqrtOk + // stack: 1, y, v, y, sqrtOk AND - // stack: 1 & y, v, y, x, sqrtOk + // stack: 1 & y, v, y, sqrtOk PUSH 27 - // stack: 27, 1 & y, v, y, x, sqrtOk + // stack: 27, 1 & y, v, y, sqrtOk SWAP1 - // stack: 1 & y, 27, v, y, x, sqrtOk + // stack: 1 & y, 27, v, y, sqrtOk SWAP2 - // stack: v, 27, 1 & y, y, x, sqrtOk + // stack: v, 27, 1 & y, y, sqrtOk SUB - // stack: v - 27, 1 & y, y, x, sqrtOk + // stack: v - 27, 1 & y, y, sqrtOk EQ - // stack: correctParity, y, x, sqrtOk + // stack: correctParity, y, sqrtOk DUP2 - // stack: y, correctParity, y, x, sqrtOk + // stack: y, correctParity, y, sqrtOk %secp_base - // stack: N, y, correctParity, y, x, sqrtOk + // stack: N, y, correctParity, y, sqrtOk SUB - // stack: N - y, correctParity, y, x, sqrtOk + // stack: N - y, correctParity, y, sqrtOk SWAP1 - // stack: correctParity, N - y, y, x, sqrtOk + // stack: correctParity, N - y, y, sqrtOk %select_bool - // stack: goody, x, sqrtOk - SWAP2 - // stack: sqrtOk, x, goody + // stack: goody, sqrtOk %endmacro %macro cubemodn_secp diff --git a/evm/src/cpu/kernel/interpreter.rs b/evm/src/cpu/kernel/interpreter.rs index 09e493b9..e2ccd9f3 100644 --- a/evm/src/cpu/kernel/interpreter.rs +++ b/evm/src/cpu/kernel/interpreter.rs @@ -138,6 +138,7 @@ impl<'a> Interpreter<'a> { } fn run_stop(&mut self) { + dbg!(&self.stack); self.running = false; } From f4390410a3b0b81e682fbd351597d0c0cd807723 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 19:39:07 +0200 Subject: [PATCH 11/15] Comments --- evm/src/cpu/kernel/asm/ecrecover.asm | 17 +++++++++++++++-- evm/src/cpu/kernel/asm/secp256k1/lift_x.asm | 2 ++ evm/src/cpu/kernel/interpreter.rs | 1 - 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index 0b9b0d06..0d1776dd 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -37,9 +37,12 @@ global ecrecover: ecrecover_valid_input: JUMPDEST // stack: hash, y, r, s, retdest + + // Compute u1 = s * r^(-1) + SWAP1 + // stack: y, hash, r, s, retdest DUP3 - // stack: r, y, hash, r, s, retdest - STOP + // stack: r, y, hash, x, s, retdest (r=x) %inverse_secp_scalar // stack: r^(-1), y, hash, x, s, retdest DUP1 @@ -48,6 +51,9 @@ ecrecover_valid_input: // stack: s, r^(-1), y, hash, x, r^(-1), retdest %mulmodn_secp_scalar // stack: u1, y, hash, x, r^(-1), retdest + + + // Compute (X,Y) = u1 * (x,y) PUSH ecrecover_with_first_point // stack: ecrecover_with_first_point, u1, y, hash, x, r^(-1), retdest SWAP1 @@ -62,6 +68,8 @@ ecrecover_valid_input: // stack: x, y, u1, ecrecover_with_first_point, hash, r^(-1), retdest %jump(ec_mul_valid_point_secp) +// ecrecover precompile. +// Assumption: (X,Y) = u1 * P. Result is (X,Y) + u2*GENERATOR ecrecover_with_first_point: JUMPDEST // stack: X, Y, hash, r^(-1), retdest @@ -75,6 +83,8 @@ ecrecover_with_first_point: // stack: Y, p, r^(-1), hash, X, retdest SWAP3 // stack: hash, p, r^(-1), Y, X, retdest + + // Compute u2 = -hash * r^(-1) MOD // stack: hash%p, r^(-1), Y, X, retdest %secp_scalar @@ -83,6 +93,9 @@ ecrecover_with_first_point: // stack: -hash, r^(-1), Y, X, retdest %mulmodn_secp_scalar // stack: u2, Y, X, retdest + + // Compute u2 * GENERATOR and chain the call to `ec_mul` with a call to `ec_add` to compute PUBKEY = (X,Y) + u2 * GENERATOR, + // and a call to `final_hashing` to get the final result `SHA3(PUBKEY)[-20:]`. PUSH final_hashing // stack: final_hashing, u2, Y, X, retdest SWAP3 diff --git a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm index aba07392..a03ba5eb 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm @@ -1,3 +1,5 @@ +// Returns y such that (x,y) is on Secp256k1 and y&1 = v - 27, +// as well as a flag indicating whether such a y exists. %macro secp_lift_x // stack: x, v %cubemodn_secp diff --git a/evm/src/cpu/kernel/interpreter.rs b/evm/src/cpu/kernel/interpreter.rs index e2ccd9f3..09e493b9 100644 --- a/evm/src/cpu/kernel/interpreter.rs +++ b/evm/src/cpu/kernel/interpreter.rs @@ -138,7 +138,6 @@ impl<'a> Interpreter<'a> { } fn run_stop(&mut self) { - dbg!(&self.stack); self.running = false; } From 62c094615db79149df773132984986a26caf17c3 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Thu, 14 Jul 2022 19:46:02 +0200 Subject: [PATCH 12/15] Add `_base` suffix --- .../cpu/kernel/asm/secp256k1/curve_add.asm | 20 +- evm/src/cpu/kernel/asm/secp256k1/lift_x.asm | 1016 ++++++++-------- evm/src/cpu/kernel/asm/secp256k1/moddiv.asm | 1020 ++++++++--------- 3 files changed, 1027 insertions(+), 1029 deletions(-) diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm index 50ddd1bc..f150e20e 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm @@ -71,15 +71,15 @@ global ec_add_valid_points_secp: // stack: y1, x0, y0, x1, y1, retdest DUP3 // stack: y0, y1, x0, y0, x1, y1, retdest - %submod_secp + %submod_secp_base // stack: y0 - y1, x0, y0, x1, y1, retdest DUP4 // stack: x1, y0 - y1, x0, y0, x1, y1, retdest DUP3 // stack: x0, x1, y0 - y1, x0, y0, x1, y1, retdest - %submod_secp + %submod_secp_base // stack: x0 - x1, y0 - y1, x0, y0, x1, y1, retdest - %moddiv_secp + %moddiv_secp_base // stack: lambda, x0, y0, x1, y1, retdest %jump(ec_add_valid_points_with_lambda) @@ -138,9 +138,9 @@ ec_add_valid_points_with_lambda: // stack: lambda, lambda, N, x1, x0, lambda, x0, y0, x1, y1, retdest MULMOD // stack: lambda^2, x1, x0, lambda, x0, y0, x1, y1, retdest - %submod_secp + %submod_secp_base // stack: lambda^2 - x1, x0, lambda, x0, y0, x1, y1, retdest - %submod_secp + %submod_secp_base // stack: x2, lambda, x0, y0, x1, y1, retdest // Compute y2 = lambda*(x1 - x2) - y1 @@ -150,7 +150,7 @@ ec_add_valid_points_with_lambda: // stack: x2, N, x2, lambda, x0, y0, x1, y1, retdest DUP7 // stack: x1, x2, N, x2, lambda, x0, y0, x1, y1, retdest - %submod_secp + %submod_secp_base // stack: x1 - x2, N, x2, lambda, x0, y0, x1, y1, retdest DUP4 // stack: lambda, x1 - x2, N, x2, lambda, x0, y0, x1, y1, retdest @@ -160,7 +160,7 @@ ec_add_valid_points_with_lambda: // stack: y1, lambda * (x1 - x2), x2, lambda, x0, y0, x1, y1, retdest SWAP1 // stack: lambda * (x1 - x2), y1, x2, lambda, x0, y0, x1, y1, retdest - %submod_secp + %submod_secp_base // stack: y2, x2, lambda, x0, y0, x1, y1, retdest // Return x2,y2 @@ -228,7 +228,7 @@ ec_add_equal_points: // stack: 3/2 * x0^2, x0, y0, x1, y1, retdest DUP3 // stack: y0, 3/2 * x0^2, x0, y0, x1, y1, retdest - %moddiv_secp + %moddiv_secp_base // stack: lambda, x0, y0, x1, y1, retdest %jump(ec_add_valid_points_with_lambda) @@ -249,9 +249,7 @@ global ec_double_secp: PUSH 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f %endmacro -// Assumption: x, y < N and 2N < 2^256. -// Note: Doesn't hold for Secp256k1 base field. -%macro submod_secp +%macro submod_secp_base // stack: x, y SWAP1 // stack: y, x diff --git a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm index a03ba5eb..42bdda7d 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm @@ -2,21 +2,21 @@ // as well as a flag indicating whether such a y exists. %macro secp_lift_x // stack: x, v - %cubemodn_secp + %cubemodn_secp_base // stack: x^3, v PUSH 7 // stack: 7, x^3, v - %addmodn_secp + %addmodn_secp_base // stack: x^3+7, x, v DUP1 // stack: x^3+7, x^3+7, v - %sqrt_secp + %sqrt_secp_base // stack: y, x^3+7, x, v SWAP1 // stack: x^3+7, y, v DUP2 // stack: y, x^3+7, y, v - %squaremodn_secp + %squaremodn_secp_base // stack: y^2, x^3+7, y, v EQ // stack: sqrtOk, y, v @@ -50,16 +50,16 @@ // stack: goody, sqrtOk %endmacro -%macro cubemodn_secp +%macro cubemodn_secp_base // stack: x DUP1 // stack: x, x - %squaremodn_secp + %squaremodn_secp_base // stack: x^2, x - %mulmodn_secp + %mulmodn_secp_base %endmacro -%macro addmodn_secp +%macro addmodn_secp_base // stack: x, y %secp_base // stack: N, x, y @@ -70,754 +70,754 @@ // Returns sqrt(x). Computed as x^(q+1)/4, with q the Secp base field order. /// To replace with more efficient method using non-determinism later. -%macro sqrt_secp +%macro sqrt_secp_base // stack: x DUP1 - %squaremodn_secp + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp - %squaremodn_secp - %squaremodn_secp - %squaremodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base SWAP1 // stack: x, x^-1 POP diff --git a/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm b/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm index dce8c03f..941fa33a 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/moddiv.asm @@ -2,14 +2,14 @@ /// To replace with more efficient method using non-determinism later. // Returns y * (x^-1) where the inverse is taken modulo N -%macro moddiv_secp +%macro moddiv_secp_base // stack: x, y - %inverse_secp + %inverse_secp_base // stack: x^-1, y - %mulmodn_secp + %mulmodn_secp_base %endmacro -%macro mulmodn_secp +%macro mulmodn_secp_base // stack: x, y %secp_base // stack: N, x, y @@ -18,767 +18,767 @@ MULMOD %endmacro -%macro squaremodn_secp +%macro squaremodn_secp_base // stack: x DUP1 // stack: x, x - %mulmodn_secp + %mulmodn_secp_base %endmacro // Computes the inverse modulo N using x^-1 = x^(N-2) mod N and square-and-multiply modular exponentiation. -%macro inverse_secp +%macro inverse_secp_base DUP1 - %squaremodn_secp + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp - %squaremodn_secp - %squaremodn_secp - %squaremodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp - %squaremodn_secp - %squaremodn_secp + %mulmodn_secp_base + %squaremodn_secp_base + %squaremodn_secp_base DUP2 - %mulmodn_secp + %mulmodn_secp_base SWAP1 // stack: x, x^-1 POP From 48f9b7fdf3055e669a15363beeecb8c3c6992b5b Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Fri, 15 Jul 2022 09:56:52 +0200 Subject: [PATCH 13/15] PR feedback --- evm/src/cpu/kernel/asm/ecrecover.asm | 12 ++++++ .../cpu/kernel/asm/secp256k1/curve_add.asm | 32 +--------------- .../cpu/kernel/asm/secp256k1/curve_mul.asm | 24 ------------ evm/src/cpu/kernel/asm/secp256k1/lift_x.asm | 38 ++++++++----------- evm/src/cpu/kernel/tests/curve_ops.rs | 27 +------------ 5 files changed, 31 insertions(+), 102 deletions(-) diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index 0d1776dd..d0994054 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -12,6 +12,18 @@ global ecrecover: // stack: v, hash, isValid(v,r,s), r, s, retdest DUP4 // stack: r, v, hash, isValid(v,r,s), r, s, retdest + + // Compute v-27 which gives the parity of the y-coordinate of the lifted point. + SWAP1 + // stack: v, r, hash, isValid(v,r,s), r, s, retdest + PUSH 27 + // stack: 27, v, r, hash, isValid(v,r,s), r, s, retdest + SWAP1 + // stack: v, 27, r, hash, isValid(v,r,s), r, s, retdest + SUB + // stack: v - 27, r, hash, isValid(v,r,s), r, s, retdest + SWAP1 + // stack: r, v - 27, hash, isValid(v,r,s), r, s, retdest %secp_lift_x // stack: y, sqrtOk, hash, isValid(v,r,s), r, s, retdest diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm index f150e20e..7f9c1fff 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_add.asm @@ -1,34 +1,5 @@ // #define N 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 // Secp256k1 scalar field order -// Secp256k1 elliptic curve addition. -// Uses the standard affine addition formula. -global ec_add_secp: - JUMPDEST - // stack: x0, y0, x1, y1, retdest - - // Check if points are valid Secp256k1 points. - DUP2 - // stack: y0, x0, y0, x1, y1, retdest - DUP2 - // stack: x0, y0, x0, y0, x1, y1, retdest - %ec_check_secp - // stack: isValid(x0, y0), x0, y0, x1, y1, retdest - DUP5 - // stack: x1, isValid(x0, y0), x0, y0, x1, y1, retdest - DUP5 - // stack: x1, y1, isValid(x0, y0), x0, y0, x1, y1, retdest - %ec_check_secp - // stack: isValid(x1, y1), isValid(x0, y0), x0, y0, x1, y1, retdest - AND - // stack: isValid(x1, y1) & isValid(x0, y0), x0, y0, x1, y1, retdest - %jumpi(ec_add_valid_points_secp) - // stack: x0, y0, x1, y1, retdest - - // Otherwise return - %pop4 - // stack: retdest - %ec_invalid_input - // Secp256k1 elliptic curve addition. // Assumption: (x0,y0) and (x1,y1) are valid points. global ec_add_valid_points_secp: @@ -232,7 +203,7 @@ ec_add_equal_points: // stack: lambda, x0, y0, x1, y1, retdest %jump(ec_add_valid_points_with_lambda) -// BN254 elliptic curve doubling. +// Secp256k1 elliptic curve doubling. // Assumption: (x0,y0) is a valid point. // Standard doubling formula. global ec_double_secp: @@ -249,6 +220,7 @@ global ec_double_secp: PUSH 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f %endmacro +// Modular subtraction. Subtraction x-y underflows iff x Result<()> { // Make sure we can parse and assemble the entire kernel. let kernel = combined_kernel(); - let ec_add = kernel.global_labels["ec_add_secp"]; + let ec_add = kernel.global_labels["ec_add_valid_points_secp"]; let ec_double = kernel.global_labels["ec_double_secp"]; - let ec_mul = kernel.global_labels["ec_mul_secp"]; + let ec_mul = kernel.global_labels["ec_mul_valid_point_secp"]; let identity = ("0x0", "0x0"); - let invalid = ("0x0", "0x3"); // Not on curve let point0 = ( "0xc82ccceebd739e646631b7270ed8c33e96c4940b19db91eaf67da6ec92d109b", "0xe0d241d2de832656c3eed78271bb06b5602d6473742c7c48a38b9f0350a76164", @@ -212,23 +210,6 @@ mod secp { let stack = run(&kernel.code, ec_add, initial_stack); assert_eq!(stack, u256ify([identity.1, identity.0])?); - // Addition with invalid point(s) #1 - let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - // Addition with invalid point(s) #2 - let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - // Addition with invalid point(s) #3 - let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - // Addition with invalid point(s) #4 - let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); - // Scalar multiplication #1 let initial_stack = u256ify(["0xdeadbeef", s, point0.1, point0.0])?; let stack = run(&kernel.code, ec_mul, initial_stack); @@ -245,10 +226,6 @@ mod secp { let initial_stack = u256ify(["0xdeadbeef", s, identity.1, identity.0])?; let stack = run(&kernel.code, ec_mul, initial_stack); assert_eq!(stack, u256ify([identity.1, identity.0])?); - // Scalar multiplication #5 - let initial_stack = u256ify(["0xdeadbeef", s, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); - assert_eq!(stack, vec![U256::MAX, U256::MAX]); // Multiple calls let ec_mul_hex = format!("0x{:x}", ec_mul); From ba9aa14f515e14612c78de9069e3887159a59cf7 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 18 Jul 2022 14:00:20 +0200 Subject: [PATCH 14/15] PR feedback --- evm/src/cpu/kernel/asm/secp256k1/lift_x.asm | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm index 34133e60..cd392b61 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm @@ -60,8 +60,9 @@ ADDMOD %endmacro -// Returns sqrt(x). Computed as x^(q+1)/4, with q the Secp base field order. -/// To replace with more efficient method using non-determinism later. +// Returns a square root of x if one exists, otherwise an undefined value. +// Computed as x^(q+1)/4, with q the Secp base field order. +// To replace with more efficient method using non-determinism later. %macro sqrt_secp_base // stack: x DUP1 From a22dbd18ed186c23a66de72f815bf159416d6c66 Mon Sep 17 00:00:00 2001 From: wborgeaud Date: Mon, 18 Jul 2022 14:04:40 +0200 Subject: [PATCH 15/15] Merge conflicts --- evm/src/cpu/kernel/tests/curve_ops.rs | 62 +++++++++++++-------------- evm/src/cpu/kernel/tests/ecrecover.rs | 4 +- evm/src/cpu/kernel/tests/exp.rs | 12 +++--- 3 files changed, 39 insertions(+), 39 deletions(-) diff --git a/evm/src/cpu/kernel/tests/curve_ops.rs b/evm/src/cpu/kernel/tests/curve_ops.rs index b04fb77d..7d7f042a 100644 --- a/evm/src/cpu/kernel/tests/curve_ops.rs +++ b/evm/src/cpu/kernel/tests/curve_ops.rs @@ -43,76 +43,76 @@ mod bn { // Standard addition #1 let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point1.1, point1.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point2.1, point2.0])?); // Standard addition #2 let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point2.1, point2.0])?); // Standard doubling #1 let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point3.1, point3.0])?); // Standard doubling #2 let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_double, initial_stack); + let stack = run(&kernel.code, ec_double, initial_stack).stack; assert_eq!(stack, u256ify([point3.1, point3.0])?); // Standard doubling #3 let initial_stack = u256ify(["0xdeadbeef", "0x2", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([point3.1, point3.0])?); // Addition with identity #1 let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, point1.1, point1.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point1.1, point1.0])?); // Addition with identity #2 let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point1.1, point1.0])?); // Addition with identity #3 let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([identity.1, identity.0])?); // Addition with invalid point(s) #1 let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, vec![U256::MAX, U256::MAX]); // Addition with invalid point(s) #2 let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, vec![U256::MAX, U256::MAX]); // Addition with invalid point(s) #3 let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, vec![U256::MAX, U256::MAX]); // Addition with invalid point(s) #4 let initial_stack = u256ify(["0xdeadbeef", invalid.1, invalid.0, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, vec![U256::MAX, U256::MAX]); // Scalar multiplication #1 let initial_stack = u256ify(["0xdeadbeef", s, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([point4.1, point4.0])?); // Scalar multiplication #2 let initial_stack = u256ify(["0xdeadbeef", "0x0", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([identity.1, identity.0])?); // Scalar multiplication #3 let initial_stack = u256ify(["0xdeadbeef", "0x1", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([point0.1, point0.0])?); // Scalar multiplication #4 let initial_stack = u256ify(["0xdeadbeef", s, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([identity.1, identity.0])?); // Scalar multiplication #5 let initial_stack = u256ify(["0xdeadbeef", s, invalid.1, invalid.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, vec![U256::MAX, U256::MAX]); // Multiple calls @@ -126,7 +126,7 @@ mod bn { point0.1, point0.0, ])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point4.1, point4.0])?); Ok(()) @@ -176,55 +176,55 @@ mod secp { // Standard addition #1 let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point1.1, point1.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point2.1, point2.0])?); // Standard addition #2 let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point2.1, point2.0])?); // Standard doubling #1 let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point3.1, point3.0])?); // Standard doubling #2 let initial_stack = u256ify(["0xdeadbeef", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_double, initial_stack); + let stack = run(&kernel.code, ec_double, initial_stack).stack; assert_eq!(stack, u256ify([point3.1, point3.0])?); // Standard doubling #3 let initial_stack = u256ify(["0xdeadbeef", "0x2", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([point3.1, point3.0])?); // Addition with identity #1 let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, point1.1, point1.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point1.1, point1.0])?); // Addition with identity #2 let initial_stack = u256ify(["0xdeadbeef", point1.1, point1.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point1.1, point1.0])?); // Addition with identity #3 let initial_stack = u256ify(["0xdeadbeef", identity.1, identity.0, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([identity.1, identity.0])?); // Scalar multiplication #1 let initial_stack = u256ify(["0xdeadbeef", s, point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([point4.1, point4.0])?); // Scalar multiplication #2 let initial_stack = u256ify(["0xdeadbeef", "0x0", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([identity.1, identity.0])?); // Scalar multiplication #3 let initial_stack = u256ify(["0xdeadbeef", "0x1", point0.1, point0.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([point0.1, point0.0])?); // Scalar multiplication #4 let initial_stack = u256ify(["0xdeadbeef", s, identity.1, identity.0])?; - let stack = run(&kernel.code, ec_mul, initial_stack); + let stack = run(&kernel.code, ec_mul, initial_stack).stack; assert_eq!(stack, u256ify([identity.1, identity.0])?); // Multiple calls @@ -238,7 +238,7 @@ mod secp { point0.1, point0.0, ])?; - let stack = run(&kernel.code, ec_add, initial_stack); + let stack = run(&kernel.code, ec_add, initial_stack).stack; assert_eq!(stack, u256ify([point4.1, point4.0])?); Ok(()) diff --git a/evm/src/cpu/kernel/tests/ecrecover.rs b/evm/src/cpu/kernel/tests/ecrecover.rs index 47115317..5077d042 100644 --- a/evm/src/cpu/kernel/tests/ecrecover.rs +++ b/evm/src/cpu/kernel/tests/ecrecover.rs @@ -25,7 +25,7 @@ fn test_valid_ecrecover( ) -> Result<()> { let ecrecover = kernel.global_labels["ecrecover"]; let initial_stack = u256ify([s, r, v, hash])?; - let stack = run(&kernel.code, ecrecover, initial_stack); + let stack = run(&kernel.code, ecrecover, initial_stack).stack; let got = pubkey_to_addr(stack[1], stack[0]); assert_eq!(got, hex::decode(&expected[2..]).unwrap()); @@ -35,7 +35,7 @@ fn test_valid_ecrecover( fn test_invalid_ecrecover(hash: &str, v: &str, r: &str, s: &str, kernel: &Kernel) -> Result<()> { let ecrecover = kernel.global_labels["ecrecover"]; let initial_stack = u256ify(["0xdeadbeef", s, r, v, hash])?; - let stack = run(&kernel.code, ecrecover, initial_stack); + let stack = run(&kernel.code, ecrecover, initial_stack).stack; assert_eq!(stack, vec![U256::MAX]); Ok(()) diff --git a/evm/src/cpu/kernel/tests/exp.rs b/evm/src/cpu/kernel/tests/exp.rs index 24639349..b12b943e 100644 --- a/evm/src/cpu/kernel/tests/exp.rs +++ b/evm/src/cpu/kernel/tests/exp.rs @@ -18,26 +18,26 @@ fn test_exp() -> Result<()> { // Random input let initial_stack = vec![U256::from_str("0xdeadbeef")?, b, a]; - let stack_with_kernel = run(&kernel.code, exp, initial_stack); + let stack_with_kernel = run(&kernel.code, exp, initial_stack).stack; let initial_stack = vec![b, a]; let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP - let stack_with_opcode = run(&code, 0, initial_stack); + let stack_with_opcode = run(&code, 0, initial_stack).stack; assert_eq!(stack_with_kernel, stack_with_opcode); // 0 base let initial_stack = vec![U256::from_str("0xdeadbeef")?, b, U256::zero()]; - let stack_with_kernel = run(&kernel.code, exp, initial_stack); + let stack_with_kernel = run(&kernel.code, exp, initial_stack).stack; let initial_stack = vec![b, U256::zero()]; let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP - let stack_with_opcode = run(&code, 0, initial_stack); + let stack_with_opcode = run(&code, 0, initial_stack).stack; assert_eq!(stack_with_kernel, stack_with_opcode); // 0 exponent let initial_stack = vec![U256::from_str("0xdeadbeef")?, U256::zero(), a]; - let stack_with_kernel = run(&kernel.code, exp, initial_stack); + let stack_with_kernel = run(&kernel.code, exp, initial_stack).stack; let initial_stack = vec![U256::zero(), a]; let code = [0xa, 0x63, 0xde, 0xad, 0xbe, 0xef, 0x56]; // EXP, PUSH4 deadbeef, JUMP - let stack_with_opcode = run(&code, 0, initial_stack); + let stack_with_opcode = run(&code, 0, initial_stack).stack; assert_eq!(stack_with_kernel, stack_with_opcode); Ok(())