rework reconstruction publishing strategy

This commit is contained in:
Agnish Ghosh 2024-09-12 11:59:59 +05:30
parent dc4a1df2d7
commit ee33ec0683
4 changed files with 25 additions and 32 deletions

View File

@ -161,12 +161,6 @@ proc queryRandom*(
debug "Could not decode the csc count ENR bitfield of peer",
peer = n.record.toURI(), exception = e.name, msg = e.msg
continue
if wantedCscnets == cscnetsNode:
score += 1
else:
debug "Wanted csc count and decode csc from enr does not match!",
wantedCsc = wantedCscnets, cscNets = cscnetsNode
let attnetsBytes = n.record.get(enrAttestationSubnetsField, seq[byte])
if attnetsBytes.isOk():

View File

@ -1533,7 +1533,7 @@ proc tryReconstructingDataColumns* (self: BeaconNode,
# then reconstruction is not possible, and if all the data columns
# are already stored then we do not need to reconstruct at all
if storedColumns.len < NUMBER_OF_COLUMNS div 2 or storedColumns.len == NUMBER_OF_COLUMNS:
ok(data_column_sidecars)
ok(finalisedDataColumns)
else:
# Recover blobs from saved data column sidecars
let recovered_cps = recover_cells_and_proofs(data_column_sidecars, storedColumns.len, signed_block)
@ -1563,6 +1563,8 @@ proc reconstructAndSendDataColumns*(node: BeaconNode) {.async.} =
if not data_column_sidecars.isOk():
return
notice "Data Column Reconstructed and Saved Successfully"
if node.config.subscribeAllSubnets:
notice "Attempting to publish reconstructed columns"
let dc = data_column_sidecars.get
var
worker_count = len(dc)

View File

@ -275,9 +275,6 @@ proc get_data_column_sidecars*(signed_block: deneb.TrustedSignedBeaconBlock |
discard column_cells.add(cellsAndProofs[i].cells)
discard column_proofs.add(cellsAndProofs[i].proofs)
debugEcho "column cells len"
debugEcho column_cells.len
var sidecar = DataColumnSidecar(
index: ColumnIndex(column_index),
column: column_cells,

View File

@ -35,7 +35,7 @@ block:
const MAX_TOP_BYTE = 114
proc createSampleKzgBlobs(n: int): Result[seq[KzgBlob], cstring] =
proc createSampleKzgBlobs(n: int): seq[KzgBlob] =
var blob: array[BYTES_PER_BLOB, byte]
var blobs: seq[KzgBlob]
for i in 0..<n:
@ -45,7 +45,7 @@ proc createSampleKzgBlobs(n: int): Result[seq[KzgBlob], cstring] =
blob[i] = MAX_TOP_BYTE
blobs.add(KzgBlob(bytes: blob))
ok(blobs)
blobs
proc chunks[T](lst: seq[T], n: int): seq[seq[T]] =
## Helper that splits a list into N sized chunks.
@ -59,7 +59,7 @@ suite "EIP-7594 Unit Tests":
let
blob_count = 2
input_blobs = createSampleKzgBlobs(blob_count)
extended_matrix = compute_extended_matrix(input_blobs.get)
extended_matrix = compute_extended_matrix(input_blobs)
doAssert extended_matrix.get.len == kzg_abi.CELLS_PER_EXT_BLOB * blob_count
let
chunkSize = kzg_abi.CELLS_PER_EXT_BLOB
@ -79,7 +79,7 @@ suite "EIP-7594 Unit Tests":
let
blob_count = 2
blobs = createSampleKzgBlobs(blob_count)
extended_matrix = compute_extended_matrix(blobs.get)
extended_matrix = compute_extended_matrix(blobs)
# Construct a matrix with some entries missing
var partial_matrix: seq[MatrixEntry]