diff --git a/evm/src/cpu/kernel/asm/basic_macros.asm b/evm/src/cpu/kernel/asm/basic_macros.asm index 200aeea0..376b661d 100644 --- a/evm/src/cpu/kernel/asm/basic_macros.asm +++ b/evm/src/cpu/kernel/asm/basic_macros.asm @@ -44,6 +44,26 @@ // stack: (pred != 0) * nz + (pred == 0) * z %endmacro +// If pred, yields z; otherwise, yields nz +// Assumes pred is boolean (either 0 or 1). +%macro select_bool + // stack: pred, nz, z + dup1 + // stack: pred, pred, nz, z + iszero + // stack: notpred, pred, nz, z + swap3 + // stack: z, pred, nz, notpred + mul + // stack: pred * z, nz, notpred + swap2 + // stack: notpred, nz, pred * z + mul + // stack: notpred * nz, pred * z + add + // stack: notpred * nz + pred * z +%endmacro + %macro square // stack: x dup1 diff --git a/evm/src/cpu/kernel/asm/ecrecover.asm b/evm/src/cpu/kernel/asm/ecrecover.asm index a05bff92..97d12616 100644 --- a/evm/src/cpu/kernel/asm/ecrecover.asm +++ b/evm/src/cpu/kernel/asm/ecrecover.asm @@ -7,11 +7,112 @@ global ecrecover: // stack: hash, v, r, s, retdest %pop(4) // stack: retdest - %ecrecover_invalid_input // TODO: Return correct invalid input + %ecrecover_invalid_input +// Pseudo-code: +// let P = lift_x(r, recovery_id); +// let r_inv = r.inverse(); +// let u1 = s * r_inv; +// let u2 = -hash * r_inv; +// return u1*P + u2*GENERATOR; ecrecover_valid_input: JUMPDEST // stack: hash, v, r, s, retdest + SWAP1 + // stack: v, hash, r, s, retdest + DUP3 + // stack: r, v, hash, r, s, retdest + %secp_lift_x + // stack: x, y, hash, r, s, retdest + SWAP3 + // stack: r, y, hash, x, s, retdest + %inverse_secp_scalar + // stack: r^(-1), y, hash, x, s, retdest + DUP1 + // stack: r^(-1), r^(-1), y, hash, x, s, retdest + SWAP5 + // stack: s, r^(-1), y, hash, x, r^(-1), retdest + %mulmodn_secp_scalar + // stack: u1, y, hash, x, r^(-1), retdest + PUSH ecrecover_with_first_point + // stack: ecrecover_with_first_point, u1, y, hash, x, r^(-1), retdest + SWAP1 + // stack: u1, ecrecover_with_first_point, y, hash, x, r^(-1), retdest + SWAP2 + // stack: y, ecrecover_with_first_point, u1, hash, x, r^(-1), retdest + SWAP1 + // stack: ecrecover_with_first_point, y, u1, hash, x, r^(-1), retdest + SWAP3 + // stack: hash, y, u1, ecrecover_with_first_point, x, r^(-1), retdest + SWAP4 + // stack: x, y, u1, ecrecover_with_first_point, hash, r^(-1), retdest + %jump(ec_mul_valid_point_secp) + + SWAP2 + // stack: hash, y, u1, x, r^(-1), retdest + SWAP3 + // stack: x, y, u1, hash, r^(-1), retdest + SWAP4 + // stack: r^(-1), y, hash, x, u1, retdest + SWAP1 + // stack: y, r^(-1), hash, x, u1, retdest + SWAP2 + // stack: hash, r^(-1), y, x, u1, retdest + %secp_scalar + // stack: p, hash, r^(-1), y, x, u1, retdest + SUB + // stack: p - hash, r^(-1), y, x, u1, retdest // Assume hash < p, should be hard (127-bit) to find a hash larger than p. + %mulmodn_secp_scalar + // stack: u2, y, x, u1, retdest // Assume hash < p, should be hard (127-bit) to find a hash larger than p. + +ecrecover_with_first_point: + JUMPDEST + // stack: X, Y, hash, r^(-1), retdest + %secp_scalar + // stack: p, X, Y, hash, r^(-1), retdest + SWAP1 + // stack: X, p, Y, hash, r^(-1), retdest + SWAP4 + // stack: r^(-1), p, Y, hash, X, retdest + SWAP2 + // stack: Y, p, r^(-1), hash, X, retdest + SWAP3 + // stack: hash, p, r^(-1), Y, X, retdest + MOD + // stack: hash%p, r^(-1), Y, X, retdest + %secp_scalar + // stack: p, hash%p, r^(-1), Y, X, retdest + SUB + // stack: -hash, r^(-1), Y, X, retdest + %mulmodn_secp_scalar + // stack: u2, Y, X, retdest + PUSH 8 + // stack: final_hashing, u2, Y, X, retdest + SWAP3 + // stack: X, u2, Y, final_hashing, retdest + PUSH 7 + // stack: ec_add_valid_points_secp, X, u2, Y, final_hashing, retdest + SWAP1 + // stack: X, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + PUSH 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798 // x-coordinate of generator + // stack: Gx, X, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + SWAP1 + // stack: X, Gx, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + PUSH 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8 // y-coordinate of generator + // stack: Gy, X, Gx, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + SWAP1 + // stack: X, Gy, Gx, ec_add_valid_points_secp, u2, Y, final_hashing, retdest + SWAP4 + // stack: u2, Gy, Gx, ec_add_valid_points_secp, X, Y, final_hashing, retdest + SWAP2 + // stack: Gx, Gy, u2, ec_add_valid_points_secp, X, Y, final_hashing, retdest + %jump(ec_mul_valid_point_secp) + +// TODO +final_hashing: + JUMPDEST + PUSH 0xdeadbeef + JUMP // Check if v, r, and s are in correct form. // Returns r < N & r!=0 & s < N & s!=0 & (v==28 || v==27). @@ -73,14 +174,12 @@ ecrecover_valid_input: PUSH 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141 %endmacro -// Return (u256::MAX, u256::MAX) which is used to indicate the input was invalid. +// Return u256::MAX which is used to indicate the input was invalid. %macro ecrecover_invalid_input // stack: retdest PUSH 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff // stack: u256::MAX, retdest - PUSH 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff - // stack: u256::MAX, u256::MAX, retdest - SWAP2 - // stack: retdest, u256::MAX, u256::MAX + SWAP1 + // stack: retdest, u256::MAX JUMP %endmacro diff --git a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm index 2ecf074a..7b332955 100644 --- a/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm +++ b/evm/src/cpu/kernel/asm/secp256k1/curve_mul.asm @@ -17,13 +17,13 @@ global ec_mul_secp: // stack: x, y, x, y, s, retdest %ec_check_secp // stack: isValid(x, y), x, y, s, retdest - %jumpi(ec_mul_valid_point) + %jumpi(ec_mul_valid_point_secp) // stack: x, y, s, retdest %pop3 %ec_invalid_input // Same algorithm as in `exp.asm` -ec_mul_valid_point: +global ec_mul_valid_point_secp: JUMPDEST // stack: x, y, s, retdest DUP3 @@ -55,7 +55,7 @@ step_case: step_case_contd: JUMPDEST // stack: x', y', s / 2, recursion_return, x, y, s, retdest - %jump(ec_mul_valid_point) + %jump(ec_mul_valid_point_secp) recursion_return: JUMPDEST diff --git a/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm b/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm new file mode 100644 index 00000000..704191fa --- /dev/null +++ b/evm/src/cpu/kernel/asm/secp256k1/inverse_scalar.asm @@ -0,0 +1,672 @@ +/// Division modulo 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141, the Secp256k1 scalar field order +/// To replace with more efficient method using non-determinism later. + +%macro mulmodn_secp_scalar + // stack: x, y + %secp_scalar + // stack: N, x, y + SWAP2 + // stack: y, x, N + MULMOD +%endmacro + +%macro squaremodn_secp_scalar + // stack: x + DUP1 + // stack: x, x + %mulmodn_secp +%endmacro + +// Computes the inverse modulo N using x^-1 = x^(N-2) mod N and square-and-multiply modular exponentiation. +%macro inverse_secp_scalar + DUP1 + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + %squaremodn_secp_scalar + DUP2 + %mulmodn_secp_scalar + SWAP1 + // stack: x, x^-1 + POP + // stack: x^-1 +%endmacro diff --git a/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm new file mode 100644 index 00000000..2578f6aa --- /dev/null +++ b/evm/src/cpu/kernel/asm/secp256k1/lift_x.asm @@ -0,0 +1,815 @@ +%macro secp_lift_x + // stack: x, v + DUP1 + // stack: x, x, v + %cubemodn_secp + // stack: x^3, x, v + PUSH 7 + // stack: 7, x^3, x, v + %addmodn_secp + // stack: x^3+7, x, v + %sqrt_secp + // stack: y, x, v + DUP1 + // stack: y, y, x, v + PUSH 1 + // stack: 1, y, y, x, v + AND + // stack: 1 & y, y, x, v + PUSH 27 + // stack: 27, 1 & y, y, x, v + DUP5 + // stack: v, 27, 1 & y, y, x, v + SUB + // stack: v - 27, 1 & y, y, x, v + EQ + // stack: correctParity, y, x, v + DUP2 + // stack: y, correctParity, y, x, v + %secp_base + // stack: N, y, correctParity, y, x, v + SUB + // stack: N - y, correctParity, y, x, v + SWAP1 + // stack: correctParity, N - y, y, x, v + %select_bool + // stack: goody, x, v + SWAP2 + // stack: v, x, goody + POP + // stack: x, goody +%endmacro + +%macro cubemodn_secp + // stack: x + DUP1 + // stack: x, x + %squaremodn_secp + // stack: x^2, x + %mulmodn_secp +%endmacro + +%macro addmodn_secp + // stack: x, y + %secp_base + // stack: N, x, y + SWAP2 + // stack: y, x, N + ADDMOD +%endmacro + +// Returns sqrt(x). Computed as x^(q+1)/4, with q the Secp base field order. +/// To replace with more efficient method using non-determinism later. +%macro sqrt_secp + // stack: x + DUP1 + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + DUP2 + %mulmodn_secp + %squaremodn_secp + %squaremodn_secp + SWAP1 + // stack: x, x^-1 + POP + // stack: x^-1 +%endmacro \ No newline at end of file