2024-01-25 02:04:35 +04:00
|
|
|
from typing import TypeAlias, List, Optional
|
2024-01-31 23:09:03 +01:00
|
|
|
from hashlib import sha256, blake2b
|
2024-01-24 12:52:30 +01:00
|
|
|
|
|
|
|
# Please note this is still a work in progress
|
2024-02-01 13:56:49 +04:00
|
|
|
from dataclasses import dataclass, field
|
2024-01-24 12:52:30 +01:00
|
|
|
|
|
|
|
Id: TypeAlias = bytes
|
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
@dataclass
|
|
|
|
class Epoch:
|
|
|
|
# identifier of the epoch, counting incrementally from 0
|
|
|
|
epoch: int
|
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class TimeConfig:
|
|
|
|
# How many slots in a epoch, all epochs will have the same number of slots
|
|
|
|
slots_per_epoch: int
|
|
|
|
# How long a slot lasts in seconds
|
|
|
|
slot_duration: int
|
|
|
|
# Start of the first epoch, in unix timestamp second precision
|
|
|
|
chain_start_time: int
|
|
|
|
|
|
|
|
|
2024-02-01 14:53:59 +04:00
|
|
|
@dataclass
|
|
|
|
class Config:
|
|
|
|
k: int
|
2024-02-01 21:33:37 +04:00
|
|
|
active_slot_coeff: float # 'f', the rate of occupied slots
|
2024-02-01 14:53:59 +04:00
|
|
|
time: TimeConfig
|
|
|
|
|
2024-02-01 21:33:37 +04:00
|
|
|
@property
|
|
|
|
def s(self):
|
|
|
|
return int(3 * self.k / self.active_slot_coeff)
|
|
|
|
|
2024-02-01 14:53:59 +04:00
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
# An absolute unique indentifier of a slot, counting incrementally from 0
|
|
|
|
@dataclass
|
|
|
|
class Slot:
|
|
|
|
absolute_slot: int
|
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
def from_unix_timestamp_s(config: TimeConfig, timestamp_s: int) -> "Slot":
|
2024-01-30 19:57:54 +09:00
|
|
|
absolute_slot = (timestamp_s - config.chain_start_time) // config.slot_duration
|
2024-01-24 12:52:30 +01:00
|
|
|
return Slot(absolute_slot)
|
|
|
|
|
|
|
|
def epoch(self, config: TimeConfig) -> Epoch:
|
|
|
|
return self.absolute_slot // config.slots_per_epoch
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
2024-02-01 14:53:59 +04:00
|
|
|
class Coin:
|
|
|
|
pk: int
|
|
|
|
value: int
|
|
|
|
|
|
|
|
def commitment(self) -> Id:
|
|
|
|
# TODO: mocked until CL is understood
|
|
|
|
pk_bytes = int.to_bytes(self.pk, length=32, byteorder="little")
|
|
|
|
value_bytes = int.to_bytes(self.value, length=32, byteorder="little")
|
|
|
|
|
|
|
|
h = sha256()
|
|
|
|
h.update(pk_bytes)
|
|
|
|
h.update(value_bytes)
|
|
|
|
return h.digest()
|
|
|
|
|
|
|
|
def nullifier(self) -> Id:
|
|
|
|
# TODO: mocked until CL is understood
|
|
|
|
pk_bytes = int.to_bytes(self.pk, length=32, byteorder="little")
|
|
|
|
value_bytes = int.to_bytes(self.value, length=32, byteorder="little")
|
|
|
|
|
|
|
|
h = sha256()
|
|
|
|
h.update(pk_bytes)
|
|
|
|
h.update(value_bytes)
|
|
|
|
h.update(b"\x00") # extra 0 byte to differentiate from commitment
|
|
|
|
return h.digest()
|
2024-01-24 12:52:30 +01:00
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
|
2024-02-01 13:56:49 +04:00
|
|
|
@dataclass
|
|
|
|
class MockLeaderProof:
|
|
|
|
commitment: Id
|
|
|
|
nullifier: Id
|
|
|
|
|
2024-02-01 14:53:59 +04:00
|
|
|
@staticmethod
|
|
|
|
def from_coin(coin: Coin):
|
|
|
|
return MockLeaderProof(commitment=coin.commitment(), nullifier=coin.nullifier())
|
|
|
|
|
|
|
|
def verify(self, slot):
|
2024-02-01 13:56:49 +04:00
|
|
|
# TODO: verification not implemented
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
@dataclass
|
|
|
|
class BlockHeader:
|
|
|
|
slot: Slot
|
|
|
|
parent: Id
|
2024-01-31 23:09:03 +01:00
|
|
|
content_size: int
|
|
|
|
content_id: Id
|
2024-02-01 13:56:49 +04:00
|
|
|
leader_proof: MockLeaderProof
|
2024-01-31 23:09:03 +01:00
|
|
|
|
|
|
|
# **Attention**:
|
|
|
|
# The ID of a block header is defined as the 32byte blake2b hash of its fields
|
|
|
|
# as serialized in the format specified by the 'HEADER' rule in 'messages.abnf'.
|
|
|
|
#
|
|
|
|
# The following code is to be considered as a reference implementation, mostly to be used for testing.
|
|
|
|
def id(self) -> Id:
|
|
|
|
h = blake2b(digest_size=32)
|
2024-02-02 01:16:14 +04:00
|
|
|
|
|
|
|
# version byte
|
2024-01-31 23:09:03 +01:00
|
|
|
h.update(b"\x01")
|
2024-02-02 01:16:14 +04:00
|
|
|
|
2024-01-31 23:09:03 +01:00
|
|
|
# content size
|
|
|
|
h.update(int.to_bytes(self.content_size, length=4, byteorder="big"))
|
2024-02-02 01:16:14 +04:00
|
|
|
|
2024-01-31 23:09:03 +01:00
|
|
|
# content id
|
|
|
|
assert len(self.content_id) == 32
|
|
|
|
h.update(self.content_id)
|
2024-02-02 01:16:14 +04:00
|
|
|
|
2024-01-31 23:09:03 +01:00
|
|
|
# slot
|
|
|
|
h.update(int.to_bytes(self.slot.absolute_slot, length=8, byteorder="big"))
|
2024-02-02 01:16:14 +04:00
|
|
|
|
2024-01-31 23:09:03 +01:00
|
|
|
# parent
|
|
|
|
assert len(self.parent) == 32
|
|
|
|
h.update(self.parent)
|
2024-02-01 13:56:49 +04:00
|
|
|
|
2024-02-02 01:16:14 +04:00
|
|
|
# leader proof
|
|
|
|
assert len(self.leader_proof.commitment) == 32
|
|
|
|
h.update(self.leader_proof.commitment)
|
|
|
|
assert len(self.leader_proof.nullifier) == 32
|
|
|
|
h.update(self.leader_proof.nullifier)
|
2024-02-01 13:56:49 +04:00
|
|
|
|
2024-01-31 23:09:03 +01:00
|
|
|
return h.digest()
|
2024-01-24 12:52:30 +01:00
|
|
|
|
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
@dataclass
|
2024-01-24 12:52:30 +01:00
|
|
|
class Chain:
|
|
|
|
blocks: List[BlockHeader]
|
|
|
|
|
|
|
|
def tip(self) -> BlockHeader:
|
|
|
|
return self.blocks[-1]
|
|
|
|
|
|
|
|
def length(self) -> int:
|
|
|
|
return len(self.blocks)
|
|
|
|
|
|
|
|
def contains_block(self, block: BlockHeader) -> bool:
|
|
|
|
return block in self.blocks
|
2024-01-25 02:04:35 +04:00
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
def block_position(self, block: BlockHeader) -> int:
|
|
|
|
assert self.contains_block(block)
|
2024-01-25 02:04:35 +04:00
|
|
|
for i, b in enumerate(self.blocks):
|
2024-01-24 12:52:30 +01:00
|
|
|
if b == block:
|
|
|
|
return i
|
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
|
2024-02-01 13:56:49 +04:00
|
|
|
@dataclass
|
|
|
|
class LedgerState:
|
|
|
|
"""
|
|
|
|
A snapshot of the ledger state up to some block
|
|
|
|
"""
|
|
|
|
|
|
|
|
block: Id = None
|
2024-02-01 14:53:59 +04:00
|
|
|
nonce: Id = None
|
2024-02-01 13:56:49 +04:00
|
|
|
total_stake: int = None
|
|
|
|
commitments: set[Id] = field(default_factory=set) # set of commitments
|
|
|
|
nullifiers: set[Id] = field(default_factory=set) # set of nullified
|
|
|
|
|
2024-02-01 21:33:37 +04:00
|
|
|
def copy(self):
|
|
|
|
return LedgerState(
|
|
|
|
block=self.block,
|
|
|
|
nonce=self.nonce,
|
|
|
|
total_stake=self.total_stake,
|
|
|
|
commitments=self.commitments.copy(),
|
|
|
|
nullifiers=self.nullifiers.copy(),
|
|
|
|
)
|
|
|
|
|
2024-02-01 14:53:59 +04:00
|
|
|
def verify_committed(self, commitment: Id) -> bool:
|
|
|
|
return commitment in self.commitments
|
|
|
|
|
|
|
|
def verify_unspent(self, nullifier: Id) -> bool:
|
|
|
|
return nullifier not in self.nullifiers
|
|
|
|
|
|
|
|
def apply(self, block: BlockHeader):
|
|
|
|
assert block.parent == self.block
|
|
|
|
self.block = block.id()
|
|
|
|
self.nullifiers.add(block.leader_proof.nullifier)
|
2024-02-01 13:56:49 +04:00
|
|
|
|
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
class Follower:
|
2024-02-01 13:56:49 +04:00
|
|
|
def __init__(self, genesis_state: LedgerState, config: Config):
|
2024-01-24 12:52:30 +01:00
|
|
|
self.config = config
|
|
|
|
self.forks = []
|
2024-02-01 13:56:49 +04:00
|
|
|
self.local_chain = Chain([])
|
|
|
|
self.epoch = EpochState(
|
|
|
|
stake_distribution_snapshot=genesis_state,
|
|
|
|
nonce_snapshot=genesis_state,
|
|
|
|
)
|
2024-02-01 20:25:49 +04:00
|
|
|
self.genesis_state = genesis_state
|
2024-02-01 21:33:37 +04:00
|
|
|
self.ledger_state = genesis_state.copy()
|
2024-01-24 12:52:30 +01:00
|
|
|
|
2024-02-01 14:53:59 +04:00
|
|
|
def validate_header(self, block: BlockHeader) -> bool:
|
2024-02-01 13:56:49 +04:00
|
|
|
# TODO: this is not the full block validation spec, only slot leader is verified
|
|
|
|
return self.verify_slot_leader(block.slot, block.leader_proof)
|
|
|
|
|
|
|
|
def verify_slot_leader(self, slot: Slot, proof: MockLeaderProof) -> bool:
|
|
|
|
return (
|
2024-02-01 14:53:59 +04:00
|
|
|
proof.verify(slot) # verify slot leader proof
|
|
|
|
and self.epoch.verify_commitment_is_old_enough_to_lead(proof.commitment)
|
|
|
|
and self.ledger_state.verify_unspent(proof.nullifier)
|
2024-02-01 13:56:49 +04:00
|
|
|
)
|
2024-01-24 12:52:30 +01:00
|
|
|
|
|
|
|
# Try appending this block to an existing chain and return whether
|
|
|
|
# the operation was successful
|
|
|
|
def try_extend_chains(self, block: BlockHeader) -> bool:
|
2024-02-01 14:53:59 +04:00
|
|
|
if self.tip_id() == block.parent:
|
2024-01-24 12:52:30 +01:00
|
|
|
self.local_chain.blocks.append(block)
|
|
|
|
return True
|
2024-01-25 02:04:35 +04:00
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
for chain in self.forks:
|
2024-02-01 21:33:37 +04:00
|
|
|
if chain.tip().id() == block.parent:
|
2024-01-24 12:52:30 +01:00
|
|
|
chain.blocks.append(block)
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
def try_create_fork(self, block: BlockHeader) -> Optional[Chain]:
|
2024-02-01 21:33:37 +04:00
|
|
|
if self.genesis_state.block == block.parent:
|
|
|
|
# this block is forking off the genesis state
|
|
|
|
return Chain(blocks=[block])
|
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
chains = self.forks + [self.local_chain]
|
|
|
|
for chain in chains:
|
2024-02-01 21:33:37 +04:00
|
|
|
if chain.contains_block(block):
|
2024-01-24 12:52:30 +01:00
|
|
|
block_position = chain.block_position(block)
|
2024-01-25 02:04:35 +04:00
|
|
|
return Chain(blocks=chain.blocks[:block_position] + [block])
|
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
def on_block(self, block: BlockHeader):
|
|
|
|
if not self.validate_header(block):
|
|
|
|
return
|
|
|
|
|
|
|
|
# check if the new block extends an existing chain
|
2024-02-01 14:53:59 +04:00
|
|
|
succeeded_in_extending_a_chain = self.try_extend_chains(block)
|
|
|
|
if not succeeded_in_extending_a_chain:
|
|
|
|
# we failed to extend one of the existing chains,
|
|
|
|
# therefore we might need to create a new fork
|
|
|
|
new_chain = self.try_create_fork(block)
|
|
|
|
if new_chain is not None:
|
|
|
|
self.forks.append(new_chain)
|
|
|
|
else:
|
|
|
|
# otherwise, we're missing the parent block
|
|
|
|
# in that case, just ignore the block
|
|
|
|
return
|
|
|
|
|
|
|
|
# We may need to switch forks, lets run the fork choice rule to check.
|
2024-02-01 21:33:37 +04:00
|
|
|
new_chain = self.fork_choice()
|
2024-02-01 14:53:59 +04:00
|
|
|
|
|
|
|
if new_chain == self.local_chain:
|
|
|
|
# we have not re-org'd therefore we can simply update our ledger state
|
|
|
|
# if this block extend our local chain
|
|
|
|
if self.local_chain.tip() == block:
|
|
|
|
self.ledger_state.apply(block)
|
|
|
|
else:
|
|
|
|
# we have re-org'd, therefore we must roll back out ledger state and
|
|
|
|
# re-apply blocks from the new chain
|
2024-02-01 20:25:49 +04:00
|
|
|
ledger_state = self.genesis_state.copy()
|
2024-02-01 14:53:59 +04:00
|
|
|
for block in new_chain.blocks:
|
|
|
|
ledger_state.apply(block)
|
|
|
|
|
|
|
|
self.ledger_state = ledger_state
|
|
|
|
self.local_chain = new_chain
|
2024-01-24 12:52:30 +01:00
|
|
|
|
|
|
|
# Evaluate the fork choice rule and return the block header of the block that should be the head of the chain
|
2024-02-01 21:33:37 +04:00
|
|
|
def fork_choice(self) -> Chain:
|
|
|
|
return maxvalid_bg(
|
|
|
|
self.local_chain, self.forks, k=self.config.k, s=self.config.s
|
|
|
|
)
|
2024-01-24 12:52:30 +01:00
|
|
|
|
2024-02-01 14:53:59 +04:00
|
|
|
def tip_id(self) -> Id:
|
|
|
|
if self.local_chain.length() > 0:
|
2024-02-01 21:33:37 +04:00
|
|
|
return self.local_chain.tip().id()
|
2024-02-01 14:53:59 +04:00
|
|
|
else:
|
|
|
|
return self.ledger_state.block
|
2024-02-01 12:19:43 +04:00
|
|
|
|
2024-01-25 02:04:35 +04:00
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class EpochState:
|
|
|
|
# for details of snapshot schedule please see:
|
|
|
|
# https://github.com/IntersectMBO/ouroboros-consensus/blob/fe245ac1d8dbfb563ede2fdb6585055e12ce9738/docs/website/contents/for-developers/Glossary.md#epoch-structure
|
|
|
|
|
|
|
|
# The stake distribution snapshot is taken at the beginning of the previous epoch
|
|
|
|
stake_distribution_snapshot: LedgerState
|
|
|
|
|
|
|
|
# The nonce snapshot is taken 7k/f slots into the previous epoch
|
|
|
|
nonce_snapshot: LedgerState
|
|
|
|
|
2024-02-01 14:53:59 +04:00
|
|
|
def verify_commitment_is_old_enough_to_lead(self, commitment: Id) -> bool:
|
|
|
|
return self.stake_distribution_snapshot.verify_committed(commitment)
|
2024-02-01 12:19:43 +04:00
|
|
|
|
2024-01-25 15:26:54 +04:00
|
|
|
def total_stake(self) -> int:
|
2024-01-25 02:04:35 +04:00
|
|
|
"""Returns the total stake that will be used to reletivize leadership proofs during this epoch"""
|
|
|
|
return self.stake_distribution_snapshot.total_stake
|
|
|
|
|
|
|
|
def nonce(self) -> bytes:
|
|
|
|
return self.nonce_snapshot.nonce
|
|
|
|
|
|
|
|
|
|
|
|
def phi(f: float, alpha: float) -> float:
|
|
|
|
"""
|
|
|
|
params:
|
|
|
|
f: 'active slot coefficient' - the rate of occupied slots
|
|
|
|
alpha: relative stake held by the validator
|
|
|
|
|
|
|
|
returns: the probability that this validator should win the slot lottery
|
|
|
|
"""
|
|
|
|
return 1 - (1 - f) ** alpha
|
|
|
|
|
|
|
|
|
|
|
|
class MOCK_LEADER_VRF:
|
|
|
|
"""NOT SECURE: A mock VRF function where the sk and pk are assummed to be the same"""
|
|
|
|
|
|
|
|
ORDER = 2**256
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def vrf(cls, sk: int, nonce: bytes, slot: int) -> int:
|
|
|
|
h = sha256()
|
|
|
|
h.update(int.to_bytes(sk, length=32))
|
|
|
|
h.update(nonce)
|
|
|
|
h.update(int.to_bytes(slot, length=16)) # 64bit slots
|
|
|
|
return int.from_bytes(h.digest())
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def verify(cls, r, pk, nonce, slot):
|
|
|
|
raise NotImplemented()
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
2024-01-24 12:52:30 +01:00
|
|
|
class Leader:
|
2024-02-01 21:33:37 +04:00
|
|
|
config: Config
|
2024-01-25 02:04:35 +04:00
|
|
|
coin: Coin
|
|
|
|
|
2024-02-01 13:56:49 +04:00
|
|
|
def try_prove_slot_leader(
|
2024-02-01 12:19:43 +04:00
|
|
|
self, epoch: EpochState, slot: Slot
|
2024-02-01 13:56:49 +04:00
|
|
|
) -> MockLeaderProof | None:
|
|
|
|
if self._is_slot_leader(epoch, slot):
|
2024-02-01 14:53:59 +04:00
|
|
|
return MockLeaderProof.from_coin(self.coin)
|
2024-01-24 12:52:30 +01:00
|
|
|
|
|
|
|
def propose_block(self, slot: Slot, parent: BlockHeader) -> BlockHeader:
|
2024-01-25 14:25:37 +04:00
|
|
|
return BlockHeader(parent=parent.id(), slot=slot)
|
2024-01-24 12:52:30 +01:00
|
|
|
|
2024-02-01 13:56:49 +04:00
|
|
|
def _is_slot_leader(self, epoch: EpochState, slot: Slot):
|
|
|
|
relative_stake = self.coin.value / epoch.total_stake()
|
|
|
|
|
|
|
|
r = MOCK_LEADER_VRF.vrf(self.coin.pk, epoch.nonce(), slot)
|
|
|
|
|
|
|
|
return r < MOCK_LEADER_VRF.ORDER * phi(
|
|
|
|
self.config.active_slot_coeff, relative_stake
|
|
|
|
)
|
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
|
2024-01-29 14:29:56 +01:00
|
|
|
def common_prefix_len(a: Chain, b: Chain) -> int:
|
|
|
|
for i, (x, y) in enumerate(zip(a.blocks, b.blocks)):
|
2024-01-31 23:09:03 +01:00
|
|
|
if x.id() != y.id():
|
2024-01-29 14:29:56 +01:00
|
|
|
return i
|
|
|
|
return min(len(a.blocks), len(b.blocks))
|
|
|
|
|
|
|
|
|
|
|
|
def chain_density(chain: Chain, slot: Slot) -> int:
|
|
|
|
return len(
|
|
|
|
[
|
|
|
|
block
|
|
|
|
for block in chain.blocks
|
|
|
|
if block.slot.absolute_slot < slot.absolute_slot
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Implementation of the fork choice rule as defined in the Ouroboros Genesis paper
|
|
|
|
# k defines the forking depth of chain we accept without more analysis
|
2024-01-30 19:57:54 +09:00
|
|
|
# s defines the length of time (unit of slots) after the fork happened we will inspect for chain density
|
2024-01-29 14:29:56 +01:00
|
|
|
def maxvalid_bg(local_chain: Chain, forks: List[Chain], k: int, s: int) -> Chain:
|
|
|
|
cmax = local_chain
|
|
|
|
for chain in forks:
|
|
|
|
lowest_common_ancestor = common_prefix_len(cmax, chain)
|
|
|
|
m = cmax.length() - lowest_common_ancestor
|
|
|
|
if m <= k:
|
|
|
|
# Classic longest chain rule with parameter k
|
|
|
|
if cmax.length() < chain.length():
|
|
|
|
cmax = chain
|
|
|
|
else:
|
|
|
|
# The chain is forking too much, we need to pay a bit more attention
|
|
|
|
# In particular, select the chain that is the densest after the fork
|
|
|
|
forking_slot = Slot(
|
|
|
|
cmax.blocks[lowest_common_ancestor].slot.absolute_slot + s
|
|
|
|
)
|
|
|
|
cmax_density = chain_density(cmax, forking_slot)
|
|
|
|
candidate_density = chain_density(chain, forking_slot)
|
|
|
|
if cmax_density < candidate_density:
|
|
|
|
cmax = chain
|
|
|
|
|
|
|
|
return cmax
|
|
|
|
|
|
|
|
|
2024-01-24 12:52:30 +01:00
|
|
|
if __name__ == "__main__":
|
2024-01-25 02:04:35 +04:00
|
|
|
pass
|