add more extensive da checks
This commit is contained in:
parent
028925851c
commit
c2419a5132
|
@ -420,19 +420,28 @@ proc initFullNode(
|
||||||
localSubnetCount))
|
localSubnetCount))
|
||||||
accumulatedColumns = dataColumnQuarantine[].accumulateDataColumns(forkyBlck)
|
accumulatedColumns = dataColumnQuarantine[].accumulateDataColumns(forkyBlck)
|
||||||
|
|
||||||
for ac in accumulatedColumns:
|
if accumulatedColumns.len == 0:
|
||||||
if ac notin localCustodyColumns:
|
# We don't have all the data columns for this block, so we have
|
||||||
# We don't have all the data columns for this block, so we have
|
# to put it in columnless quarantine.
|
||||||
# to put it in columnless quarantine.
|
if not quarantine[].addColumnless(dag.finalizedHead.slot, forkyBlck):
|
||||||
if not quarantine[].addColumnless(dag.finalizedHead.slot, forkyBlck):
|
return err(VerifierError.UnviableFork)
|
||||||
return err(VerifierError.UnviableFork)
|
|
||||||
else:
|
|
||||||
return err(VerifierError.MissingParent)
|
|
||||||
else:
|
else:
|
||||||
let data_columns = dataColumnQuarantine[].popDataColumns(forkyBlck.root, forkyBlck)
|
return err(VerifierError.MissingParent)
|
||||||
return await blockProcessor[].addBlock(MsgSource.gossip, signedBlock,
|
elif supernode == true and accumulatedColumns.len >= localCustodyColumns.len div 2:
|
||||||
Opt.none(BlobSidecars), Opt.some(data_columns),
|
let data_columns = dataColumnQuarantine[].popDataColumns(forkyBlck.root, forkyBlck)
|
||||||
maybeFinalized = maybeFinalized)
|
return await blockProcessor[].addBlock(MsgSource.gossip, signedBlock,
|
||||||
|
Opt.none(BlobSidecars), Opt.some(data_columns),
|
||||||
|
maybeFinalized = maybeFinalized)
|
||||||
|
|
||||||
|
elif supernode == false and accumulatedColumns.len <= localCustodyColumns.len div 2:
|
||||||
|
let data_columns = dataColumnQuarantine[].popDataColumns(forkyBlck.root, forkyBlck)
|
||||||
|
return await blockProcessor[].addBlock(MsgSource.gossip, signedBlock,
|
||||||
|
Opt.none(BlobSidecars), Opt.some(data_columns),
|
||||||
|
maybeFinalized = maybeFinalized)
|
||||||
|
else:
|
||||||
|
return await blockProcessor[].addBlock(MsgSource.gossip, signedBlock,
|
||||||
|
Opt.none(BlobSidecars), Opt.none(DataColumnSidecars),
|
||||||
|
maybeFinalized = maybeFinalized)
|
||||||
else:
|
else:
|
||||||
return await blockProcessor[].addBlock(MsgSource.gossip, signedBlock,
|
return await blockProcessor[].addBlock(MsgSource.gossip, signedBlock,
|
||||||
Opt.none(BlobSidecars), Opt.none(DataColumnSidecars),
|
Opt.none(BlobSidecars), Opt.none(DataColumnSidecars),
|
||||||
|
|
|
@ -360,11 +360,10 @@ func groupDataColumns*[T](req: SyncRequest[T],
|
||||||
# The following data column sidecars, where they exist, MUST be sent in consecutive (slot, index) order.
|
# The following data column sidecars, where they exist, MUST be sent in consecutive (slot, index) order.
|
||||||
# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/_features/eip7594/p2p-interface.md
|
# https://github.com/ethereum/consensus-specs/blob/v1.5.0-alpha.3/specs/_features/eip7594/p2p-interface.md
|
||||||
let header = forkyBlck.toSignedBeaconBlockHeader()
|
let header = forkyBlck.toSignedBeaconBlockHeader()
|
||||||
for column_idx in data_columns
|
for column_idx in 0..<data_columns.len:
|
||||||
if column_cursor >= data_columns.len:
|
|
||||||
return err("DataColumnSidecar: response too short")
|
|
||||||
let data_column_sidecar = data_columns[column_cursor]
|
let data_column_sidecar = data_columns[column_cursor]
|
||||||
if data_column_sidecar.index != ColumnIndex column_idx
|
if data_column_sidecar.index != ColumnIndex column_idx:
|
||||||
|
return err("DataColumnSidecar: invalid index")
|
||||||
if data_column_sidecar.signed_block_header != header:
|
if data_column_sidecar.signed_block_header != header:
|
||||||
return err("DataColumnSidecar: unexpected signed_block_header")
|
return err("DataColumnSidecar: unexpected signed_block_header")
|
||||||
grouped[block_idx].add(data_column_sidecar)
|
grouped[block_idx].add(data_column_sidecar)
|
||||||
|
|
Loading…
Reference in New Issue