add more specifics to reconstruction and block addition conditions
This commit is contained in:
parent
18fe511cc2
commit
dfcc89138f
|
@ -232,7 +232,7 @@ proc storeBackfillBlock(
|
||||||
columnsOk = r.isOk()
|
columnsOk = r.isOk()
|
||||||
|
|
||||||
if not columnsOk:
|
if not columnsOk:
|
||||||
if dataColumnsOpt.isSome:
|
if dataColumnsOpt.isSome and dataColumnsOpt.get.len >= (NUMBER_OF_COLUMNS div 2):
|
||||||
let
|
let
|
||||||
data_columns = dataColumnsOpt.get
|
data_columns = dataColumnsOpt.get
|
||||||
recovered_cps =
|
recovered_cps =
|
||||||
|
|
|
@ -398,11 +398,15 @@ proc processDataColumnSidecar*(
|
||||||
let columnless = o.unsafeGet()
|
let columnless = o.unsafeGet()
|
||||||
withBlck(columnless):
|
withBlck(columnless):
|
||||||
when consensusFork >= ConsensusFork.Deneb:
|
when consensusFork >= ConsensusFork.Deneb:
|
||||||
if self.dataColumnQuarantine[].hasEnoughDataColumns(forkyBlck):
|
if self.dataColumnQuarantine[].hasMissingDataColumns(forkyBlck):
|
||||||
|
self.blockProcessor[].enqueueBlock(
|
||||||
|
MsgSource.gossip, columnless,
|
||||||
|
Opt.none(BlobSidecars),
|
||||||
|
Opt.some(self.dataColumnQuarantine[].popDataColumns(block_root, forkyBlck)))
|
||||||
|
elif self.dataColumnQuarantine[].hasEnoughDataColumns(forkyBlck):
|
||||||
let
|
let
|
||||||
columns = self.dataColumnQuarantine[].gatherDataColumns(forkyBlck)
|
columns = self.dataColumnQuarantine[].gatherDataColumns(forkyBlck)
|
||||||
if columns.len >= (NUMBER_OF_COLUMNS div 2) or
|
if columns.len >= (NUMBER_OF_COLUMNS div 2):
|
||||||
self.dataColumnQuarantine[].supernode:
|
|
||||||
let
|
let
|
||||||
reconstructed_columns =
|
reconstructed_columns =
|
||||||
self.processReconstructionFromGossip(forkyBlck, columns)
|
self.processReconstructionFromGossip(forkyBlck, columns)
|
||||||
|
|
|
@ -191,8 +191,7 @@ proc recover_cells_and_proofs*(
|
||||||
return err ("DataColumns do not have the same length")
|
return err ("DataColumns do not have the same length")
|
||||||
|
|
||||||
var
|
var
|
||||||
recovered_cps: seq[CellsAndProofs]
|
recovered_cps = newSeq[CellsAndProofs](blobCount)
|
||||||
recovered_cps.setLen(blobCount)
|
|
||||||
|
|
||||||
for blobIdx in 0..<blobCount:
|
for blobIdx in 0..<blobCount:
|
||||||
var
|
var
|
||||||
|
@ -217,7 +216,7 @@ proc recover_cells_and_proofs*(
|
||||||
if not recovered_cells_and_proofs.isOk:
|
if not recovered_cells_and_proofs.isOk:
|
||||||
return err("Issue with computing cells and proofs!")
|
return err("Issue with computing cells and proofs!")
|
||||||
|
|
||||||
recovered_cps.add recovered_cells_and_proofs.get
|
recovered_cps[bIdx] = recovered_cells_and_proofs.get
|
||||||
|
|
||||||
ok(recovered_cps)
|
ok(recovered_cps)
|
||||||
|
|
||||||
|
@ -285,16 +284,28 @@ proc get_data_column_sidecars*(signed_beacon_block: deneb.TrustedSignedBeaconBlo
|
||||||
var
|
var
|
||||||
sidecars =
|
sidecars =
|
||||||
newSeqOfCap[DataColumnSidecar](kzg_abi.CELLS_PER_EXT_BLOB)
|
newSeqOfCap[DataColumnSidecar](kzg_abi.CELLS_PER_EXT_BLOB)
|
||||||
|
# Flattened the cells and proofs from the `CellsAndProofs` type to
|
||||||
|
# make it simpler to handle overall
|
||||||
|
flattened_cells =
|
||||||
|
newSeq[CellBytes](cellsAndProofs.len)
|
||||||
|
flattened_proofs =
|
||||||
|
newSeq[ProofBytes](cellsAndProofs.len)
|
||||||
|
|
||||||
|
|
||||||
|
for i in 0..<cellsAndProofs.len:
|
||||||
|
flattened_cells[i] = cellsAndProofs[i].cells
|
||||||
|
flattened_proofs[i] = cellsAndProofs[i].proofs
|
||||||
|
|
||||||
for column_index in 0..<NUMBER_OF_COLUMNS:
|
for column_index in 0..<NUMBER_OF_COLUMNS:
|
||||||
var
|
var
|
||||||
column_cells: seq[KzgCell]
|
column_cells: seq[KzgCell]
|
||||||
column_proofs: seq[KzgProof]
|
column_proofs: seq[KzgProof]
|
||||||
for i in 0..<cellsAndProofs.len:
|
for row_index in 0..<cellsAndProofs.len:
|
||||||
column_cells.add(cellsAndProofs[i].cells)
|
column_cells.add(flattened_cells[row_index][column_index])
|
||||||
column_proofs.add(cellsAndProofs[i].proofs)
|
column_proofs.add(flattened_proofs[row_index][column_index])
|
||||||
|
|
||||||
column_proofs.setLen(blck.body.blob_kzg_commitments.len)
|
column_proofs.setLen(blck.body.blob_kzg_commitments.len)
|
||||||
|
column_cells.setLen(blck.body.blob_kzg_commitments.len)
|
||||||
|
|
||||||
var sidecar = DataColumnSidecar(
|
var sidecar = DataColumnSidecar(
|
||||||
index: ColumnIndex(column_index),
|
index: ColumnIndex(column_index),
|
||||||
|
@ -361,6 +372,7 @@ proc get_data_column_sidecars*(signed_beacon_block: deneb.SignedBeaconBlock |
|
||||||
column_proofs.add(flattened_proofs[row_index][column_index])
|
column_proofs.add(flattened_proofs[row_index][column_index])
|
||||||
|
|
||||||
column_proofs.setLen(blck.body.blob_kzg_commitments.len)
|
column_proofs.setLen(blck.body.blob_kzg_commitments.len)
|
||||||
|
column_cells.setLen(blck.body.blob_kzg_commitments.len)
|
||||||
|
|
||||||
var sidecar = DataColumnSidecar(
|
var sidecar = DataColumnSidecar(
|
||||||
index: ColumnIndex(column_index),
|
index: ColumnIndex(column_index),
|
||||||
|
|
Loading…
Reference in New Issue