rework reconstruction publishing strategy
This commit is contained in:
parent
dc4a1df2d7
commit
ee33ec0683
|
@ -161,12 +161,6 @@ proc queryRandom*(
|
||||||
debug "Could not decode the csc count ENR bitfield of peer",
|
debug "Could not decode the csc count ENR bitfield of peer",
|
||||||
peer = n.record.toURI(), exception = e.name, msg = e.msg
|
peer = n.record.toURI(), exception = e.name, msg = e.msg
|
||||||
continue
|
continue
|
||||||
if wantedCscnets == cscnetsNode:
|
|
||||||
score += 1
|
|
||||||
|
|
||||||
else:
|
|
||||||
debug "Wanted csc count and decode csc from enr does not match!",
|
|
||||||
wantedCsc = wantedCscnets, cscNets = cscnetsNode
|
|
||||||
|
|
||||||
let attnetsBytes = n.record.get(enrAttestationSubnetsField, seq[byte])
|
let attnetsBytes = n.record.get(enrAttestationSubnetsField, seq[byte])
|
||||||
if attnetsBytes.isOk():
|
if attnetsBytes.isOk():
|
||||||
|
|
|
@ -1533,7 +1533,7 @@ proc tryReconstructingDataColumns* (self: BeaconNode,
|
||||||
# then reconstruction is not possible, and if all the data columns
|
# then reconstruction is not possible, and if all the data columns
|
||||||
# are already stored then we do not need to reconstruct at all
|
# are already stored then we do not need to reconstruct at all
|
||||||
if storedColumns.len < NUMBER_OF_COLUMNS div 2 or storedColumns.len == NUMBER_OF_COLUMNS:
|
if storedColumns.len < NUMBER_OF_COLUMNS div 2 or storedColumns.len == NUMBER_OF_COLUMNS:
|
||||||
ok(data_column_sidecars)
|
ok(finalisedDataColumns)
|
||||||
else:
|
else:
|
||||||
# Recover blobs from saved data column sidecars
|
# Recover blobs from saved data column sidecars
|
||||||
let recovered_cps = recover_cells_and_proofs(data_column_sidecars, storedColumns.len, signed_block)
|
let recovered_cps = recover_cells_and_proofs(data_column_sidecars, storedColumns.len, signed_block)
|
||||||
|
@ -1563,24 +1563,26 @@ proc reconstructAndSendDataColumns*(node: BeaconNode) {.async.} =
|
||||||
if not data_column_sidecars.isOk():
|
if not data_column_sidecars.isOk():
|
||||||
return
|
return
|
||||||
notice "Data Column Reconstructed and Saved Successfully"
|
notice "Data Column Reconstructed and Saved Successfully"
|
||||||
let dc = data_column_sidecars.get
|
if node.config.subscribeAllSubnets:
|
||||||
var
|
notice "Attempting to publish reconstructed columns"
|
||||||
worker_count = len(dc)
|
let dc = data_column_sidecars.get
|
||||||
das_workers = newSeq[Future[SendResult]](worker_count)
|
var
|
||||||
for i in 0..<dc.lenu64:
|
worker_count = len(dc)
|
||||||
let subnet_id = compute_subnet_for_data_column_sidecar(i)
|
das_workers = newSeq[Future[SendResult]](worker_count)
|
||||||
das_workers[i] =
|
for i in 0..<dc.lenu64:
|
||||||
node.network.broadcastDataColumnSidecar(subnet_id, dc[i])
|
let subnet_id = compute_subnet_for_data_column_sidecar(i)
|
||||||
let allres = await allFinished(das_workers)
|
das_workers[i] =
|
||||||
for i in 0..<allres.len:
|
node.network.broadcastDataColumnSidecar(subnet_id, dc[i])
|
||||||
let res = allres[i]
|
let allres = await allFinished(das_workers)
|
||||||
doAssert res.finished()
|
for i in 0..<allres.len:
|
||||||
if res.failed():
|
let res = allres[i]
|
||||||
notice "Reconstructed data columns not sent",
|
doAssert res.finished()
|
||||||
data_column = shortLog(dc[i]), error = res.error[]
|
if res.failed():
|
||||||
else:
|
notice "Reconstructed data columns not sent",
|
||||||
notice "Reconstructed data columns sent",
|
data_column = shortLog(dc[i]), error = res.error[]
|
||||||
data_column = shortLog(dc[i])
|
else:
|
||||||
|
notice "Reconstructed data columns sent",
|
||||||
|
data_column = shortLog(dc[i])
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
@ -275,9 +275,6 @@ proc get_data_column_sidecars*(signed_block: deneb.TrustedSignedBeaconBlock |
|
||||||
discard column_cells.add(cellsAndProofs[i].cells)
|
discard column_cells.add(cellsAndProofs[i].cells)
|
||||||
discard column_proofs.add(cellsAndProofs[i].proofs)
|
discard column_proofs.add(cellsAndProofs[i].proofs)
|
||||||
|
|
||||||
debugEcho "column cells len"
|
|
||||||
debugEcho column_cells.len
|
|
||||||
|
|
||||||
var sidecar = DataColumnSidecar(
|
var sidecar = DataColumnSidecar(
|
||||||
index: ColumnIndex(column_index),
|
index: ColumnIndex(column_index),
|
||||||
column: column_cells,
|
column: column_cells,
|
||||||
|
|
|
@ -35,7 +35,7 @@ block:
|
||||||
|
|
||||||
const MAX_TOP_BYTE = 114
|
const MAX_TOP_BYTE = 114
|
||||||
|
|
||||||
proc createSampleKzgBlobs(n: int): Result[seq[KzgBlob], cstring] =
|
proc createSampleKzgBlobs(n: int): seq[KzgBlob] =
|
||||||
var blob: array[BYTES_PER_BLOB, byte]
|
var blob: array[BYTES_PER_BLOB, byte]
|
||||||
var blobs: seq[KzgBlob]
|
var blobs: seq[KzgBlob]
|
||||||
for i in 0..<n:
|
for i in 0..<n:
|
||||||
|
@ -45,7 +45,7 @@ proc createSampleKzgBlobs(n: int): Result[seq[KzgBlob], cstring] =
|
||||||
blob[i] = MAX_TOP_BYTE
|
blob[i] = MAX_TOP_BYTE
|
||||||
blobs.add(KzgBlob(bytes: blob))
|
blobs.add(KzgBlob(bytes: blob))
|
||||||
|
|
||||||
ok(blobs)
|
blobs
|
||||||
|
|
||||||
proc chunks[T](lst: seq[T], n: int): seq[seq[T]] =
|
proc chunks[T](lst: seq[T], n: int): seq[seq[T]] =
|
||||||
## Helper that splits a list into N sized chunks.
|
## Helper that splits a list into N sized chunks.
|
||||||
|
@ -59,7 +59,7 @@ suite "EIP-7594 Unit Tests":
|
||||||
let
|
let
|
||||||
blob_count = 2
|
blob_count = 2
|
||||||
input_blobs = createSampleKzgBlobs(blob_count)
|
input_blobs = createSampleKzgBlobs(blob_count)
|
||||||
extended_matrix = compute_extended_matrix(input_blobs.get)
|
extended_matrix = compute_extended_matrix(input_blobs)
|
||||||
doAssert extended_matrix.get.len == kzg_abi.CELLS_PER_EXT_BLOB * blob_count
|
doAssert extended_matrix.get.len == kzg_abi.CELLS_PER_EXT_BLOB * blob_count
|
||||||
let
|
let
|
||||||
chunkSize = kzg_abi.CELLS_PER_EXT_BLOB
|
chunkSize = kzg_abi.CELLS_PER_EXT_BLOB
|
||||||
|
@ -79,7 +79,7 @@ suite "EIP-7594 Unit Tests":
|
||||||
let
|
let
|
||||||
blob_count = 2
|
blob_count = 2
|
||||||
blobs = createSampleKzgBlobs(blob_count)
|
blobs = createSampleKzgBlobs(blob_count)
|
||||||
extended_matrix = compute_extended_matrix(blobs.get)
|
extended_matrix = compute_extended_matrix(blobs)
|
||||||
|
|
||||||
# Construct a matrix with some entries missing
|
# Construct a matrix with some entries missing
|
||||||
var partial_matrix: seq[MatrixEntry]
|
var partial_matrix: seq[MatrixEntry]
|
||||||
|
|
Loading…
Reference in New Issue