mirror of
https://github.com/logos-storage/plonky2-verifier.git
synced 2026-01-02 13:53:07 +00:00
generating all the verifier challenges seem to work
This commit is contained in:
parent
e280b14301
commit
1a54e5827c
110
Challenge/FRI.hs
Normal file
110
Challenge/FRI.hs
Normal file
@ -0,0 +1,110 @@
|
||||
|
||||
-- | The FRI protocol's challenges
|
||||
|
||||
{-# LANGUAGE StrictData, RecordWildCards, NondecreasingIndentation #-}
|
||||
module Challenge.FRI where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Control.Monad
|
||||
|
||||
import Data.Bits
|
||||
|
||||
import Goldilocks
|
||||
import GoldilocksExt
|
||||
import Hash
|
||||
import Digest
|
||||
import Types
|
||||
import Challenge.Monad
|
||||
|
||||
import Debug.Trace
|
||||
debug x y z = trace ("\n - " ++ x ++ " -> " ++ show y) z
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | FRI protocol challenges
|
||||
data FriChallenges = MkFriChallenges
|
||||
{ fri_alpha :: FExt -- ^ Scaling factor to combine polynomials.
|
||||
, fri_betas :: [FExt] -- ^ Betas used in the FRI commit phase reductions.
|
||||
, fri_pow_response :: F -- ^ proof-of-work "response"
|
||||
, fri_query_indices :: [Int] -- ^ Indices at which the oracle is queried in FRI.
|
||||
}
|
||||
deriving (Eq,Show)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
newtype FriOpenings
|
||||
= MkFriOpenings { batches :: [FriOpeningBatch] }
|
||||
deriving (Eq,Show)
|
||||
|
||||
newtype FriOpeningBatch
|
||||
= MkFriOpeningBatch { values :: [FExt] }
|
||||
deriving (Eq,Show)
|
||||
|
||||
absorbFriOpenings :: FriOpenings -> Duplex ()
|
||||
absorbFriOpenings (MkFriOpenings batches) = mapM_ (absorb . values) batches
|
||||
|
||||
-- | Just reordering and concatenating things...
|
||||
toFriOpenings :: OpeningSet -> FriOpenings
|
||||
toFriOpenings (MkOpeningSet{..}) = MkFriOpenings [ batch_this, batch_next ]
|
||||
where
|
||||
batch_this = MkFriOpeningBatch $ concat
|
||||
[ opening_constants
|
||||
, opening_plonk_sigmas
|
||||
, opening_wires
|
||||
, opening_plonk_zs
|
||||
, opening_partial_products
|
||||
, opening_quotient_polys
|
||||
, opening_lookup_zs
|
||||
]
|
||||
batch_next = MkFriOpeningBatch $ concat
|
||||
[ opening_plonk_zs_next
|
||||
, opening_lookup_zs_next
|
||||
]
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
friChallenges :: CommonCircuitData -> VerifierOnlyCircuitData -> Proof -> Duplex FriChallenges
|
||||
friChallenges common_data verifier_data proof = do
|
||||
let config = circuit_config common_data
|
||||
let fri_config = config_fri_config config
|
||||
let openings = Types.openings proof
|
||||
let fri_proof = Types.opening_proof proof
|
||||
let degree_bits = fri_degree_bits (circuit_fri_params common_data)
|
||||
|
||||
absorbFriOpenings $ toFriOpenings openings
|
||||
|
||||
-- Scaling factor to combine polynomials.
|
||||
alpha <- squeeze :: Duplex FExt
|
||||
|
||||
-- Recover the random betas used in the FRI reductions.
|
||||
betas <- forM (fri_commit_phase_merkle_caps fri_proof) $ \cap -> do
|
||||
absorb cap
|
||||
squeeze
|
||||
|
||||
absorb $ coeffs $ fri_final_poly fri_proof
|
||||
|
||||
-- proof of work or "grinding"
|
||||
-- they are doing this in a quite strange way...
|
||||
-- namely: absorb the candidate prover witness, generate a Fiat-Shamir "challenge",
|
||||
-- and check the leading zeros of _that_. Let's say I would do it differently
|
||||
absorb $ fri_pow_witness fri_proof
|
||||
pow_response <- squeeze
|
||||
|
||||
-- query indices
|
||||
let lde_size = shiftL 1 (degree_bits + fri_rate_bits fri_config)
|
||||
let num_fri_queries = fri_num_query_rounds fri_config
|
||||
let f :: F -> Int
|
||||
f felt = fromInteger (mod (asInteger felt) lde_size)
|
||||
query_indices <- (map f) <$> (squeezeN num_fri_queries)
|
||||
|
||||
return $ MkFriChallenges
|
||||
{ fri_alpha = alpha
|
||||
, fri_betas = betas
|
||||
, fri_pow_response = pow_response
|
||||
, fri_query_indices = query_indices
|
||||
}
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
|
||||
73
Challenge/Monad.hs
Normal file
73
Challenge/Monad.hs
Normal file
@ -0,0 +1,73 @@
|
||||
|
||||
-- | Monadic interface to do Fiat-Shamir challenges
|
||||
|
||||
{-# LANGUAGE StrictData, GeneralizedNewtypeDeriving #-}
|
||||
module Challenge.Monad where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Data.Array
|
||||
|
||||
import Control.Monad
|
||||
import Control.Monad.Identity
|
||||
import qualified Control.Monad.State.Strict as S
|
||||
import Control.Monad.IO.Class
|
||||
|
||||
import Goldilocks
|
||||
import Digest
|
||||
import Challenge.Pure ( DuplexState, Squeeze, Absorb )
|
||||
import qualified Challenge.Pure as Pure
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- * Monadic interface
|
||||
|
||||
newtype DuplexT m a
|
||||
= DuplexT (S.StateT DuplexState m a)
|
||||
deriving (Functor,Applicative,Monad)
|
||||
|
||||
type Duplex a = DuplexT Identity a
|
||||
|
||||
runDuplexT :: Monad m => DuplexT m a -> State -> m a
|
||||
runDuplexT (DuplexT action) ini = S.evalStateT action (Pure.duplexInitialState ini)
|
||||
|
||||
runDuplex :: Duplex a -> State -> a
|
||||
runDuplex action ini = runIdentity (runDuplexT action ini)
|
||||
|
||||
absorb :: (Monad m, Absorb a) => a -> DuplexT m ()
|
||||
absorb x = DuplexT $ S.modify (Pure.absorb x)
|
||||
|
||||
squeeze :: (Monad m, Squeeze a) => DuplexT m a
|
||||
squeeze = DuplexT $ S.state Pure.squeeze
|
||||
|
||||
squeezeN :: (Monad m, Squeeze a) => Int -> DuplexT m [a]
|
||||
squeezeN n = DuplexT $ S.state (Pure.squeezeN n)
|
||||
|
||||
-- | For debugging only
|
||||
inspectDuplexState :: Monad m => DuplexT m (DuplexState)
|
||||
inspectDuplexState = DuplexT S.get
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
type DuplexIO a = DuplexT IO a
|
||||
|
||||
instance MonadIO (DuplexT IO) where
|
||||
liftIO action = DuplexT (liftIO action)
|
||||
|
||||
duplexPrint :: Show a => a -> DuplexIO ()
|
||||
duplexPrint x = DuplexT (liftIO $ print x)
|
||||
|
||||
printDuplexState :: DuplexIO ()
|
||||
printDuplexState = duplexPrint =<< inspectDuplexState
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
duplexTest :: Int -> IO ()
|
||||
duplexTest m = runDuplexT action zeroState where
|
||||
action :: DuplexIO ()
|
||||
action = do
|
||||
forM_ [0..19] $ \k -> do
|
||||
absorb (map intToF [1..k])
|
||||
ys <- squeezeN k :: DuplexIO [F]
|
||||
duplexPrint ys
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
@ -2,16 +2,19 @@
|
||||
-- | Fiat-Shamir challenges
|
||||
|
||||
{-# LANGUAGE StrictData, GeneralizedNewtypeDeriving #-}
|
||||
module Challenge where
|
||||
module Challenge.Pure
|
||||
( DuplexState
|
||||
, duplexInitialState
|
||||
, Absorb(..)
|
||||
, Squeeze(..)
|
||||
, squeezeN
|
||||
)
|
||||
where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Data.Array
|
||||
|
||||
import Control.Monad
|
||||
import qualified Control.Monad.State.Strict as S
|
||||
import Control.Monad.IO.Class
|
||||
|
||||
import Goldilocks
|
||||
import GoldilocksExt
|
||||
import Poseidon
|
||||
@ -104,45 +107,3 @@ instance Squeeze GoldilocksExt where
|
||||
in (MkExt x y, state2)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- * Monadic interface
|
||||
|
||||
newtype DuplexT m a
|
||||
= DuplexT (S.StateT DuplexState m a)
|
||||
deriving (Functor,Applicative,Monad)
|
||||
|
||||
type DuplexIO a = DuplexT IO a
|
||||
|
||||
instance MonadIO (DuplexT IO) where
|
||||
liftIO action = DuplexT (liftIO action)
|
||||
|
||||
runDuplexT :: Monad m => DuplexT m a -> State -> m a
|
||||
runDuplexT (DuplexT action) ini = S.evalStateT action (duplexInitialState ini)
|
||||
|
||||
doPrint :: Show a => a -> DuplexIO ()
|
||||
doPrint x = DuplexT (liftIO $ print x)
|
||||
|
||||
doDebug :: DuplexIO ()
|
||||
doDebug = DuplexT $ do
|
||||
state <- S.get
|
||||
liftIO (print state)
|
||||
return ()
|
||||
|
||||
doAbsorb :: Absorb a => a -> DuplexIO ()
|
||||
doAbsorb x = DuplexT $ S.modify (absorb x)
|
||||
|
||||
doSqueeze :: Squeeze a => DuplexIO a
|
||||
doSqueeze = DuplexT $ S.state squeeze
|
||||
|
||||
doSqueezeN :: Squeeze a => Int -> DuplexIO [a]
|
||||
doSqueezeN n = DuplexT $ S.state (squeezeN n)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
duplexTest :: Int -> IO ()
|
||||
duplexTest m = flip runDuplexT zeroState $ do
|
||||
forM_ [0..19] $ \k -> do
|
||||
doAbsorb (map intToF [1..k])
|
||||
ys <- doSqueezeN k :: DuplexIO [F]
|
||||
doPrint ys
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
105
Challenge/Verifier.hs
Normal file
105
Challenge/Verifier.hs
Normal file
@ -0,0 +1,105 @@
|
||||
|
||||
-- | The verifier's challenges
|
||||
|
||||
{-# LANGUAGE StrictData #-}
|
||||
module Challenge.Verifier where
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
import Goldilocks
|
||||
import GoldilocksExt
|
||||
import Hash
|
||||
import Digest
|
||||
import Types
|
||||
|
||||
import Challenge.Monad
|
||||
import Challenge.FRI
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | Lookup challenges (called \"deltas\" in Plonky2)
|
||||
data LookupDelta = MkLookupDelta
|
||||
{ lookup_A :: F -- ^ used to combine the lookup input and output in the argument
|
||||
, lookup_B :: F -- ^ used to combine the lookup input and output in the LUT consistency check
|
||||
, lookup_alpha :: F -- ^ the random value in the log-derivative lookup argument
|
||||
, lookup_delta :: F -- ^ the point to evaluate the lookup polynomial on (for the consistency check?)
|
||||
}
|
||||
deriving (Eq,Show)
|
||||
|
||||
mkLookupDelta :: [F] -> LookupDelta
|
||||
mkLookupDelta [a,b,c,d] = MkLookupDelta a b c d
|
||||
mkLookupDelta _ = error "mkLookupDelta: expecting 4 field elements"
|
||||
|
||||
-- | Plonky2 does this rather strangely... First it reuses the plonk betas and gammas,
|
||||
-- then extend with the required number of new challenges, then partition it
|
||||
-- into groups of four challenges
|
||||
mkLookupDeltaList :: [F] -> [LookupDelta]
|
||||
mkLookupDeltaList = go where
|
||||
go [] = []
|
||||
go fs = case splitAt 4 fs of
|
||||
(this,rest) -> mkLookupDelta this : go rest
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | All challenges
|
||||
data ProofChallenges = MkProofChallenges
|
||||
{ plonk_betas :: [F] -- ^ Random values used in Plonk's permutation argument.
|
||||
, plonk_gammas :: [F] -- ^ Random values used in Plonk's permutation argument.
|
||||
, plonk_alphas :: [F] -- ^ Random values used to combine PLONK constraints.
|
||||
, plonk_deltas :: [LookupDelta] -- ^ Lookup challenges.
|
||||
, plonk_zeta :: FExt -- ^ Point at which the PLONK polynomials are opened.
|
||||
, fri_challenges :: FriChallenges
|
||||
}
|
||||
deriving (Eq,Show)
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
-- | Derive all challenges
|
||||
proofChallenges :: CommonCircuitData -> VerifierOnlyCircuitData -> ProofWithPublicInputs -> ProofChallenges
|
||||
proofChallenges common_data verifier_data proof_data = runDuplex action zeroState where
|
||||
|
||||
MkVerifierOnlyCircuitData constants_sigmas_cap circuit_digest = verifier_data
|
||||
MkProofWithPublicInputs proof public_io = proof_data
|
||||
|
||||
config = circuit_config common_data
|
||||
num_challenges = config_num_challenges config
|
||||
has_lookup = circuit_num_lookup_polys common_data > 0
|
||||
|
||||
public_inputs_hash = sponge public_io
|
||||
|
||||
action :: Duplex ProofChallenges
|
||||
action = do
|
||||
|
||||
absorb circuit_digest
|
||||
absorb public_inputs_hash
|
||||
absorb (wires_cap proof)
|
||||
|
||||
-- plonk challenges
|
||||
plonk_betas <- squeezeN num_challenges :: Duplex [F]
|
||||
plonk_gammas <- squeezeN num_challenges :: Duplex [F]
|
||||
|
||||
-- lookup challenges
|
||||
plonk_deltas <- case has_lookup of
|
||||
False -> return []
|
||||
True -> do
|
||||
deltas <- squeezeN (2*num_challenges)
|
||||
return $ mkLookupDeltaList (plonk_betas ++ plonk_gammas ++ deltas)
|
||||
|
||||
absorb $ plonk_zs_partial_products_cap proof
|
||||
plonk_alphas <- squeezeN num_challenges :: Duplex [F]
|
||||
|
||||
absorb $ quotient_polys_cap proof
|
||||
plonk_zeta <- squeeze :: Duplex FExt
|
||||
|
||||
fri_challenges <- friChallenges common_data verifier_data proof
|
||||
|
||||
return $ MkProofChallenges
|
||||
{ plonk_betas = plonk_betas
|
||||
, plonk_gammas = plonk_gammas
|
||||
, plonk_alphas = plonk_alphas
|
||||
, plonk_deltas = plonk_deltas
|
||||
, plonk_zeta = plonk_zeta
|
||||
, fri_challenges = fri_challenges
|
||||
}
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
24
testmain.hs
24
testmain.hs
@ -9,6 +9,7 @@ import Types
|
||||
import Hash
|
||||
import Digest
|
||||
import Goldilocks
|
||||
import Challenge.Verifier
|
||||
|
||||
import qualified Data.ByteString.Char8 as B
|
||||
import qualified Data.ByteString.Lazy.Char8 as L
|
||||
@ -16,25 +17,18 @@ import qualified Data.ByteString.Lazy.Char8 as L
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
main = do
|
||||
let publicIO = MkPublicInputs [0, 1, 3736710860384812976]
|
||||
|
||||
--let prefix = "fibonacci"
|
||||
--let prefix = "recursion_outer"
|
||||
-- let prefix = "fibonacci"
|
||||
-- let prefix = "recursion_outer"
|
||||
let prefix = "lookup"
|
||||
|
||||
text_common <- L.readFile ("json/" ++ prefix ++ "_common.json")
|
||||
text_proof <- L.readFile ("json/" ++ prefix ++ "_proof.json" )
|
||||
text_vkey <- L.readFile ("json/" ++ prefix ++ "_vkey.json" )
|
||||
text_proof <- L.readFile ("json/" ++ prefix ++ "_proof.json" )
|
||||
|
||||
-- let Just vkey = decode text_vkey :: Maybe VerifierOnlyCircuitData
|
||||
-- print vkey
|
||||
-- putStrLn ""
|
||||
-- L.putStr (encode vkey)
|
||||
let Just common_data = decode text_common :: Maybe CommonCircuitData
|
||||
let Just verifier_data = decode text_vkey :: Maybe VerifierOnlyCircuitData
|
||||
let Just proof_data = decode text_proof :: Maybe ProofWithPublicInputs
|
||||
|
||||
-- let ei = eitherDecode text_common :: Either String CommonCircuitData
|
||||
-- print ei
|
||||
-- putStrLn ""
|
||||
let challenges = proofChallenges common_data verifier_data proof_data
|
||||
|
||||
let ei = eitherDecode text_proof :: Either String ProofWithPublicInputs
|
||||
print ei
|
||||
putStrLn ""
|
||||
print challenges
|
||||
Loading…
x
Reference in New Issue
Block a user