# beacon_chain # Copyright (c) 2018-2024 Status Research & Development GmbH # Licensed and distributed under either of # * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT). # * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0). # at your option. This file may not be copied, modified, or distributed except according to those terms. {.push raises: [].} # Uncategorized helper functions from the spec import std/[algorithm, macros, tables], stew/results, ssz_serialization/[ proofs, types], chronicles, ./[beacon_time, crypto], kzg4844/kzg_ex, eth/p2p/discoveryv5/[node], ./helpers, ./datatypes/[eip7594, deneb] type CellBytes = array[eip7594.CELLS_PER_EXT_BLOB, Cell] ProofBytes = array[eip7594.CELLS_PER_EXT_BLOB, KzgProof] proc sortedColumnIndices*(columnsPerSubnet: ColumnIndex, subnetIds: HashSet[uint64]): seq[ColumnIndex] = var res: seq[ColumnIndex] = @[] for i in 0 ..< columnsPerSubnet: for subnetId in subnetIds: let index = DATA_COLUMN_SIDECAR_SUBNET_COUNT * i + subnetId res.add(ColumnIndex(index)) res.sort() res proc sortedColumnIndexList*(columnsPerSubnet: ColumnIndex, subnetIds: HashSet[uint64]): List[ColumnIndex, NUMBER_OF_COLUMNS] = var res: seq[ColumnIndex] list: List[ColumnIndex, NUMBER_OF_COLUMNS] for i in 0 ..< columnsPerSubnet: for subnetId in subnetIds: let index = DATA_COLUMN_SIDECAR_SUBNET_COUNT * i + subnetId res.add(ColumnIndex(index)) res.sort() for elem in res: discard list.add(ColumnIndex(elem)) list proc get_custody_column_subnet*(node_id: NodeId, custody_subnet_count: uint64): Result[HashSet[uint64], cstring] = # fetches the subnets for custody column for the current node # assert custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT if not (custody_subnet_count <= DATA_COLUMN_SIDECAR_SUBNET_COUNT): return err("Eip7594: Custody subnet count exceeds the DATA_COLUMN_SIDECAR_SUBNET_COUNT") var subnet_ids: HashSet[uint64] current_id = node_id while subnet_ids.len < int(custody_subnet_count): var subnet_id_bytes: array[8, byte] subnet_id_bytes[0..7] = current_id.toBytesLE().toOpenArray(0,7) var subnet_id = bytes_to_uint64(subnet_id_bytes) mod DATA_COLUMN_SIDECAR_SUBNET_COUNT if subnet_id notin subnet_ids: subnet_ids.incl(subnet_id) if current_id == UInt256.high.NodeId: # Overflow prevention current_id = NodeId(StUint[256].zero) current_id += NodeId(StUint[256].one) # assert len(subnet_ids) == len(set(subnet_ids)) if not (subnet_ids.len == subnet_ids.len): return err("Eip7594: Subnet ids are not unique") ok(subnet_ids) # https://github.com/ethereum/consensus-specs/blob/5f48840f4d768bf0e0a8156a3ed06ec333589007/specs/_features/eip7594/das-core.md#get_custody_columns proc get_custody_columns*(node_id: NodeId, custody_subnet_count: uint64): Result[seq[ColumnIndex], cstring] = let subnet_ids = get_custody_column_subnet(node_id, custody_subnet_count).get # columns_per_subnet = NUMBER_OF_COLUMNS // DATA_COLUMN_SIDECAR_SUBNET_COUNT let columns_per_subnet = NUMBER_OF_COLUMNS div DATA_COLUMN_SIDECAR_SUBNET_COUNT ok(sortedColumnIndices(ColumnIndex(columns_per_subnet), subnet_ids)) proc get_custody_column_list*(node_id: NodeId, custody_subnet_count: uint64): Result[List[ColumnIndex, NUMBER_OF_COLUMNS], cstring] = let subnet_ids = get_custody_column_subnet(node_id, custody_subnet_count).get # columns_per_subnet = NUMBER_OF_COLUMNS // DATA_COLUMN_SIDECAR_SUBNET_COUNT let columns_per_subnet = NUMBER_OF_COLUMNS div DATA_COLUMN_SIDECAR_SUBNET_COUNT ok(sortedColumnIndexList(ColumnIndex(columns_per_subnet), subnet_ids)) # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/_features/eip7594/das-core.md#compute_extended_matrix proc compute_extended_matrix* (blobs: seq[KzgBlob]): Result[seq[MatrixEntry], cstring] = # This helper demonstrates the relationship between blobs and the `MatrixEntries` var extended_matrix: seq[MatrixEntry] for blbIdx, blob in blobs.pairs: let cellsAndProofs = computeCellsAndKzgProofs(blob) if not cellsAndProofs.isOk: return err("Computing Extended Matrix: Issue computing cells and proofs") for i in 0..= columnsNeeded: return true false # https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/_features/eip7594/das-core.md#compute_extended_matrix proc get_extended_sample_count*(samples_per_slot: int, allowed_failures: int): int = # `get_extended_sample_count` computes the number of samples we # should query from peers, given the SAMPLES_PER_SLOT and # the number of allowed failures # Retrieving the column count let columnsCount = NUMBER_OF_COLUMNS.int # If 50% of the columns are missing, we are able to reconstruct the data # If 50% + 1 columns are missing, we are NO MORE able to reconstruct the data let worstCaseConditionCount = (columnsCount div 2) + 1 # Compute the false positive threshold let falsePositiveThreshold = hypergeom_cdf(0, columnsCount, worstCaseConditionCount, samples_per_slot) var sampleCount: int # Finally, compute the extended sample count for i in samples_per_slot .. columnsCount + 1: if hypergeom_cdf(allowed_failures, columnsCount, worstCaseConditionCount, i) <= falsePositiveThreshold: sampleCount = i break sampleCount = i return sampleCount