2024-01-26 22:38:12 +00:00
|
|
|
# fluffy
|
|
|
|
# Copyright (c) 2022-2024 Status Research & Development GmbH
|
2022-10-24 12:16:40 +00:00
|
|
|
# Licensed and distributed under either of
|
|
|
|
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
|
|
|
|
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
|
|
|
|
# at your option. This file may not be copied, modified, or distributed except according to those terms.
|
|
|
|
|
2023-01-31 12:38:08 +00:00
|
|
|
{.push raises: [].}
|
2022-10-24 12:16:40 +00:00
|
|
|
|
|
|
|
import
|
2024-05-30 12:54:03 +00:00
|
|
|
results,
|
2024-02-28 17:31:45 +00:00
|
|
|
chronos,
|
|
|
|
chronicles,
|
2022-10-24 12:16:40 +00:00
|
|
|
eth/p2p/discoveryv5/[protocol, enr],
|
|
|
|
beacon_chain/spec/forks,
|
2023-09-28 16:16:41 +00:00
|
|
|
beacon_chain/gossip_processing/light_client_processor,
|
2022-10-24 12:16:40 +00:00
|
|
|
../wire/[portal_protocol, portal_stream, portal_protocol_config],
|
2024-07-25 18:15:26 +00:00
|
|
|
"."/[beacon_content, beacon_db, beacon_validation, beacon_chain_historical_summaries]
|
2023-03-13 20:30:57 +00:00
|
|
|
|
2023-10-20 10:06:25 +00:00
|
|
|
export beacon_content, beacon_db
|
2022-10-24 12:16:40 +00:00
|
|
|
|
|
|
|
logScope:
|
2024-10-09 08:21:00 +00:00
|
|
|
topics = "portal_beacon"
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2024-02-28 17:31:45 +00:00
|
|
|
type BeaconNetwork* = ref object
|
|
|
|
portalProtocol*: PortalProtocol
|
|
|
|
beaconDb*: BeaconDb
|
|
|
|
processor*: ref LightClientProcessor
|
|
|
|
contentQueue*: AsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])]
|
|
|
|
forkDigests*: ForkDigests
|
2024-10-09 08:21:00 +00:00
|
|
|
trustedBlockRoot*: Opt[Eth2Digest]
|
2024-02-28 17:31:45 +00:00
|
|
|
processContentLoop: Future[void]
|
2024-09-19 13:38:49 +00:00
|
|
|
statusLogLoop: Future[void]
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2024-07-17 15:07:27 +00:00
|
|
|
func toContentIdHandler(contentKey: ContentKeyByteList): results.Opt[ContentId] =
|
2022-11-08 17:31:45 +00:00
|
|
|
ok(toContentId(contentKey))
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2024-01-26 22:38:12 +00:00
|
|
|
proc validateHistoricalSummaries(
|
2024-02-28 17:31:45 +00:00
|
|
|
n: BeaconNetwork, summariesWithProof: HistoricalSummariesWithProof
|
|
|
|
): Result[void, string] =
|
2024-01-26 22:38:12 +00:00
|
|
|
let
|
|
|
|
finalityUpdate = getLastFinalityUpdate(n.beaconDb).valueOr:
|
|
|
|
return err("Require finality update for verification")
|
|
|
|
|
|
|
|
# TODO: compare slots first
|
2024-02-28 17:31:45 +00:00
|
|
|
stateRoot = withForkyFinalityUpdate(finalityUpdate):
|
|
|
|
when lcDataFork > LightClientDataFork.None:
|
|
|
|
forkyFinalityUpdate.finalized_header.beacon.state_root
|
|
|
|
else:
|
|
|
|
# Note: this should always be the case as historical_summaries was
|
|
|
|
# introduced in Capella.
|
|
|
|
return err("Require Altair or > for verification")
|
2024-01-26 22:38:12 +00:00
|
|
|
|
|
|
|
if summariesWithProof.verifyProof(stateRoot):
|
|
|
|
ok()
|
|
|
|
else:
|
|
|
|
err("Failed verifying historical_summaries proof")
|
|
|
|
|
2023-10-06 13:46:53 +00:00
|
|
|
proc getContent(
|
2024-02-28 17:31:45 +00:00
|
|
|
n: BeaconNetwork, contentKey: ContentKey
|
2024-06-14 12:21:30 +00:00
|
|
|
): Future[results.Opt[seq[byte]]] {.async: (raises: [CancelledError]).} =
|
2023-10-06 13:46:53 +00:00
|
|
|
let
|
|
|
|
contentKeyEncoded = encode(contentKey)
|
|
|
|
contentId = toContentId(contentKeyEncoded)
|
2023-10-18 14:59:44 +00:00
|
|
|
localContent = n.portalProtocol.dbGet(contentKeyEncoded, contentId)
|
|
|
|
|
|
|
|
if localContent.isSome():
|
|
|
|
return localContent
|
|
|
|
|
2024-02-28 17:31:45 +00:00
|
|
|
let contentRes = await n.portalProtocol.contentLookup(contentKeyEncoded, contentId)
|
2023-10-06 13:46:53 +00:00
|
|
|
|
|
|
|
if contentRes.isNone():
|
|
|
|
warn "Failed fetching content from the beacon chain network",
|
|
|
|
contentKey = contentKeyEncoded
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.none(seq[byte])
|
2023-10-06 13:46:53 +00:00
|
|
|
else:
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.some(contentRes.value().content)
|
2023-10-06 13:46:53 +00:00
|
|
|
|
2022-10-24 12:16:40 +00:00
|
|
|
proc getLightClientBootstrap*(
|
2024-02-28 17:31:45 +00:00
|
|
|
n: BeaconNetwork, trustedRoot: Digest
|
2024-06-14 12:21:30 +00:00
|
|
|
): Future[results.Opt[ForkedLightClientBootstrap]] {.async: (raises: [CancelledError]).} =
|
2022-10-24 12:16:40 +00:00
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
contentKey = bootstrapContentKey(trustedRoot)
|
|
|
|
contentResult = await n.getContent(contentKey)
|
|
|
|
|
|
|
|
if contentResult.isNone():
|
|
|
|
return Opt.none(ForkedLightClientBootstrap)
|
2022-10-24 12:16:40 +00:00
|
|
|
|
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
bootstrap = contentResult.value()
|
2024-02-28 17:31:45 +00:00
|
|
|
decodingResult = decodeLightClientBootstrapForked(n.forkDigests, bootstrap)
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2023-10-06 13:46:53 +00:00
|
|
|
if decodingResult.isErr():
|
2023-02-26 18:18:03 +00:00
|
|
|
return Opt.none(ForkedLightClientBootstrap)
|
2022-10-24 12:16:40 +00:00
|
|
|
else:
|
|
|
|
# TODO Not doing validation for now, as probably it should be done by layer
|
|
|
|
# above
|
2023-10-06 13:46:53 +00:00
|
|
|
return Opt.some(decodingResult.value())
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2022-10-28 07:49:18 +00:00
|
|
|
proc getLightClientUpdatesByRange*(
|
2024-02-28 17:31:45 +00:00
|
|
|
n: BeaconNetwork, startPeriod: SyncCommitteePeriod, count: uint64
|
2024-06-14 12:21:30 +00:00
|
|
|
): Future[results.Opt[ForkedLightClientUpdateList]] {.
|
|
|
|
async: (raises: [CancelledError])
|
|
|
|
.} =
|
2022-11-18 09:00:06 +00:00
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
contentKey = updateContentKey(distinctBase(startPeriod), count)
|
|
|
|
contentResult = await n.getContent(contentKey)
|
|
|
|
|
|
|
|
if contentResult.isNone():
|
|
|
|
return Opt.none(ForkedLightClientUpdateList)
|
2022-11-03 08:12:32 +00:00
|
|
|
|
2022-11-18 09:00:06 +00:00
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
updates = contentResult.value()
|
2024-02-28 17:31:45 +00:00
|
|
|
decodingResult = decodeLightClientUpdatesByRange(n.forkDigests, updates)
|
2022-11-18 09:00:06 +00:00
|
|
|
|
2023-10-06 13:46:53 +00:00
|
|
|
if decodingResult.isErr():
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.none(ForkedLightClientUpdateList)
|
2022-11-18 09:00:06 +00:00
|
|
|
else:
|
|
|
|
# TODO Not doing validation for now, as probably it should be done by layer
|
|
|
|
# above
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.some(decodingResult.value())
|
2022-11-03 08:12:32 +00:00
|
|
|
|
2022-10-28 07:49:18 +00:00
|
|
|
proc getLightClientFinalityUpdate*(
|
2024-02-28 17:31:45 +00:00
|
|
|
n: BeaconNetwork, finalizedSlot: uint64
|
2024-06-14 12:21:30 +00:00
|
|
|
): Future[results.Opt[ForkedLightClientFinalityUpdate]] {.
|
|
|
|
async: (raises: [CancelledError])
|
|
|
|
.} =
|
2022-11-18 09:00:06 +00:00
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
contentKey = finalityUpdateContentKey(finalizedSlot)
|
|
|
|
contentResult = await n.getContent(contentKey)
|
2022-11-03 08:12:32 +00:00
|
|
|
|
2023-10-06 13:46:53 +00:00
|
|
|
if contentResult.isNone():
|
2023-02-26 18:18:03 +00:00
|
|
|
return Opt.none(ForkedLightClientFinalityUpdate)
|
2022-11-03 08:12:32 +00:00
|
|
|
|
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
finalityUpdate = contentResult.value()
|
2024-02-28 17:31:45 +00:00
|
|
|
decodingResult =
|
|
|
|
decodeLightClientFinalityUpdateForked(n.forkDigests, finalityUpdate)
|
2022-11-03 08:12:32 +00:00
|
|
|
|
2023-10-06 13:46:53 +00:00
|
|
|
if decodingResult.isErr():
|
2023-02-26 18:18:03 +00:00
|
|
|
return Opt.none(ForkedLightClientFinalityUpdate)
|
2022-11-03 08:12:32 +00:00
|
|
|
else:
|
2023-10-06 13:46:53 +00:00
|
|
|
return Opt.some(decodingResult.value())
|
2022-10-28 07:49:18 +00:00
|
|
|
|
|
|
|
proc getLightClientOptimisticUpdate*(
|
2024-02-28 17:31:45 +00:00
|
|
|
n: BeaconNetwork, optimisticSlot: uint64
|
2024-06-14 12:21:30 +00:00
|
|
|
): Future[results.Opt[ForkedLightClientOptimisticUpdate]] {.
|
|
|
|
async: (raises: [CancelledError])
|
|
|
|
.} =
|
2022-11-18 09:00:06 +00:00
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
contentKey = optimisticUpdateContentKey(optimisticSlot)
|
|
|
|
contentResult = await n.getContent(contentKey)
|
2022-11-03 08:12:32 +00:00
|
|
|
|
2023-10-06 13:46:53 +00:00
|
|
|
if contentResult.isNone():
|
2023-02-26 18:18:03 +00:00
|
|
|
return Opt.none(ForkedLightClientOptimisticUpdate)
|
2022-11-03 08:12:32 +00:00
|
|
|
|
|
|
|
let
|
2023-10-06 13:46:53 +00:00
|
|
|
optimisticUpdate = contentResult.value()
|
2024-02-28 17:31:45 +00:00
|
|
|
decodingResult =
|
|
|
|
decodeLightClientOptimisticUpdateForked(n.forkDigests, optimisticUpdate)
|
2022-11-03 08:12:32 +00:00
|
|
|
|
2023-10-06 13:46:53 +00:00
|
|
|
if decodingResult.isErr():
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.none(ForkedLightClientOptimisticUpdate)
|
2022-11-03 08:12:32 +00:00
|
|
|
else:
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.some(decodingResult.value())
|
2022-10-28 07:49:18 +00:00
|
|
|
|
2024-01-26 22:38:12 +00:00
|
|
|
proc getHistoricalSummaries*(
|
2024-05-13 16:49:21 +00:00
|
|
|
n: BeaconNetwork, epoch: uint64
|
2024-06-14 12:21:30 +00:00
|
|
|
): Future[results.Opt[HistoricalSummaries]] {.async: (raises: [CancelledError]).} =
|
2024-01-26 22:38:12 +00:00
|
|
|
# Note: when taken from the db, it does not need to verify the proof.
|
|
|
|
let
|
2024-05-13 16:49:21 +00:00
|
|
|
contentKey = historicalSummariesContentKey(epoch)
|
2024-02-28 17:31:45 +00:00
|
|
|
content = ?await n.getContent(contentKey)
|
2024-01-26 22:38:12 +00:00
|
|
|
|
2024-05-24 21:15:04 +00:00
|
|
|
summariesWithProof = decodeSsz(n.forkDigests, content, HistoricalSummariesWithProof).valueOr:
|
2024-01-26 22:38:12 +00:00
|
|
|
return Opt.none(HistoricalSummaries)
|
|
|
|
|
|
|
|
if n.validateHistoricalSummaries(summariesWithProof).isOk():
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.some(summariesWithProof.historical_summaries)
|
2024-01-26 22:38:12 +00:00
|
|
|
else:
|
2024-07-25 18:15:26 +00:00
|
|
|
Opt.none(HistoricalSummaries)
|
2024-01-26 22:38:12 +00:00
|
|
|
|
2022-10-24 12:16:40 +00:00
|
|
|
proc new*(
|
2023-10-20 10:06:25 +00:00
|
|
|
T: type BeaconNetwork,
|
2024-06-18 07:32:57 +00:00
|
|
|
portalNetwork: PortalNetwork,
|
2022-10-24 12:16:40 +00:00
|
|
|
baseProtocol: protocol.Protocol,
|
2023-10-20 10:06:25 +00:00
|
|
|
beaconDb: BeaconDb,
|
2022-10-24 12:16:40 +00:00
|
|
|
streamManager: StreamManager,
|
|
|
|
forkDigests: ForkDigests,
|
2024-07-25 18:15:26 +00:00
|
|
|
trustedBlockRoot: Opt[Eth2Digest],
|
2022-10-24 12:16:40 +00:00
|
|
|
bootstrapRecords: openArray[Record] = [],
|
2024-02-28 17:31:45 +00:00
|
|
|
portalConfig: PortalProtocolConfig = defaultPortalProtocolConfig,
|
|
|
|
): T =
|
2022-10-24 12:16:40 +00:00
|
|
|
let
|
2024-02-28 17:31:45 +00:00
|
|
|
contentQueue = newAsyncQueue[(Opt[NodeId], ContentKeysList, seq[seq[byte]])](50)
|
2022-10-24 12:16:40 +00:00
|
|
|
|
|
|
|
stream = streamManager.registerNewStream(contentQueue)
|
|
|
|
|
|
|
|
portalProtocol = PortalProtocol.new(
|
2024-02-28 17:31:45 +00:00
|
|
|
baseProtocol,
|
2024-06-18 07:32:57 +00:00
|
|
|
getProtocolId(portalNetwork, PortalSubnetwork.beacon),
|
2023-02-26 18:18:03 +00:00
|
|
|
toContentIdHandler,
|
2024-02-28 17:31:45 +00:00
|
|
|
createGetHandler(beaconDb),
|
2024-09-09 15:52:11 +00:00
|
|
|
createStoreHandler(beaconDb),
|
2024-09-05 16:31:55 +00:00
|
|
|
createRadiusHandler(beaconDb),
|
2024-02-28 17:31:45 +00:00
|
|
|
stream,
|
|
|
|
bootstrapRecords,
|
2024-09-05 16:31:55 +00:00
|
|
|
config = portalConfig,
|
2024-02-28 17:31:45 +00:00
|
|
|
)
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2024-10-09 08:21:00 +00:00
|
|
|
let beaconBlockRoot =
|
|
|
|
# TODO: Need to have some form of weak subjectivity check here.
|
|
|
|
if trustedBlockRoot.isNone():
|
|
|
|
beaconDb.getLatestBlockRoot()
|
|
|
|
else:
|
|
|
|
trustedBlockRoot
|
|
|
|
|
2023-10-20 10:06:25 +00:00
|
|
|
BeaconNetwork(
|
2022-10-24 12:16:40 +00:00
|
|
|
portalProtocol: portalProtocol,
|
2023-10-20 10:06:25 +00:00
|
|
|
beaconDb: beaconDb,
|
2022-10-24 12:16:40 +00:00
|
|
|
contentQueue: contentQueue,
|
2024-02-28 17:31:45 +00:00
|
|
|
forkDigests: forkDigests,
|
2024-10-09 08:21:00 +00:00
|
|
|
trustedBlockRoot: beaconBlockRoot,
|
2022-10-24 12:16:40 +00:00
|
|
|
)
|
|
|
|
|
2024-07-26 16:56:32 +00:00
|
|
|
proc lightClientVerifier(
|
|
|
|
processor: ref LightClientProcessor, obj: SomeForkedLightClientObject
|
|
|
|
): Future[Result[void, VerifierError]] {.async: (raises: [CancelledError], raw: true).} =
|
|
|
|
let resfut = Future[Result[void, VerifierError]].Raising([CancelledError]).init(
|
|
|
|
"lightClientVerifier"
|
|
|
|
)
|
|
|
|
processor[].addObject(MsgSource.gossip, obj, resfut)
|
|
|
|
resfut
|
|
|
|
|
|
|
|
proc updateVerifier*(
|
|
|
|
processor: ref LightClientProcessor, obj: ForkedLightClientUpdate
|
|
|
|
): auto =
|
|
|
|
processor.lightClientVerifier(obj)
|
|
|
|
|
2022-10-24 12:16:40 +00:00
|
|
|
proc validateContent(
|
2024-07-17 15:07:27 +00:00
|
|
|
n: BeaconNetwork, content: seq[byte], contentKey: ContentKeyByteList
|
2024-07-26 16:56:32 +00:00
|
|
|
): Future[Result[void, string]] {.async: (raises: [CancelledError]).} =
|
2023-09-28 16:16:41 +00:00
|
|
|
let key = contentKey.decode().valueOr:
|
2023-10-18 21:29:20 +00:00
|
|
|
return err("Error decoding content key")
|
2023-09-28 16:16:41 +00:00
|
|
|
|
2024-02-28 17:31:45 +00:00
|
|
|
case key.contentType
|
2023-12-19 18:59:38 +00:00
|
|
|
of unused:
|
|
|
|
raiseAssert "Should not be used and fail at decoding"
|
2023-09-28 16:16:41 +00:00
|
|
|
of lightClientBootstrap:
|
2024-07-25 18:15:26 +00:00
|
|
|
let bootstrap = decodeLightClientBootstrapForked(n.forkDigests, content).valueOr:
|
|
|
|
return err("Error decoding bootstrap: " & error)
|
|
|
|
|
|
|
|
withForkyBootstrap(bootstrap):
|
|
|
|
when lcDataFork > LightClientDataFork.None:
|
|
|
|
# Try getting last finality update from db. If the node is LC synced
|
|
|
|
# this data should be there. Then check is done to see if the headers
|
|
|
|
# are the same.
|
|
|
|
# Note that this will only work for newly created LC bootstraps. If
|
|
|
|
# backfill of bootstraps is to be supported, they need to be provided
|
|
|
|
# with a proof against historical summaries.
|
|
|
|
# See also:
|
|
|
|
# https://github.com/ethereum/portal-network-specs/issues/296
|
|
|
|
let finalityUpdate = n.beaconDb.getLastFinalityUpdate()
|
|
|
|
if finalityUpdate.isOk():
|
|
|
|
withForkyFinalityUpdate(finalityUpdate.value):
|
|
|
|
when lcDataFork > LightClientDataFork.None:
|
|
|
|
if forkyFinalityUpdate.finalized_header.beacon !=
|
|
|
|
forkyBootstrap.header.beacon:
|
|
|
|
return err("Bootstrap header does not match recent finalized header")
|
|
|
|
|
|
|
|
if forkyBootstrap.isValidBootstrap(n.beaconDb.cfg):
|
|
|
|
ok()
|
|
|
|
else:
|
|
|
|
err("Error validating LC bootstrap")
|
|
|
|
else:
|
|
|
|
err("No LC data before Altair")
|
|
|
|
elif n.trustedBlockRoot.isSome():
|
|
|
|
# If not yet synced, try trusted block root
|
|
|
|
let blockRoot = hash_tree_root(forkyBootstrap.header.beacon)
|
|
|
|
if blockRoot != n.trustedBlockRoot.get():
|
|
|
|
return err("Bootstrap header does not match trusted block root")
|
|
|
|
|
|
|
|
if forkyBootstrap.isValidBootstrap(n.beaconDb.cfg):
|
|
|
|
ok()
|
|
|
|
else:
|
|
|
|
err("Error validating LC bootstrap")
|
|
|
|
else:
|
|
|
|
err("Cannot validate LC bootstrap")
|
|
|
|
else:
|
|
|
|
err("No LC data before Altair")
|
2023-09-28 16:16:41 +00:00
|
|
|
of lightClientUpdate:
|
2024-07-26 16:56:32 +00:00
|
|
|
let updates = decodeLightClientUpdatesByRange(n.forkDigests, content).valueOr:
|
|
|
|
return err("Error decoding content: " & error)
|
|
|
|
|
|
|
|
# Only new updates can be verified as they get applied by the LC processor,
|
|
|
|
# so verification works only by being part of the sync process.
|
|
|
|
# This means that no backfill is possible, for that we need updates that
|
|
|
|
# get provided with a proof against historical_summaries, see also:
|
|
|
|
# https://github.com/ethereum/portal-network-specs/issues/305
|
|
|
|
# It is however a little more tricky, even updates that we do not have
|
|
|
|
# applied yet may fail here if the list of updates does not contain first
|
|
|
|
# the next update that is required currently for the sync.
|
|
|
|
for update in updates:
|
|
|
|
let res = await n.processor.updateVerifier(update)
|
|
|
|
if res.isErr():
|
|
|
|
return err("Error verifying LC updates: " & $res.error)
|
|
|
|
|
|
|
|
ok()
|
2023-09-28 16:16:41 +00:00
|
|
|
of lightClientFinalityUpdate:
|
2024-02-28 17:31:45 +00:00
|
|
|
let update = decodeLightClientFinalityUpdateForked(n.forkDigests, content).valueOr:
|
|
|
|
return err("Error decoding content: " & error)
|
2023-10-18 21:29:20 +00:00
|
|
|
|
2024-02-28 17:31:45 +00:00
|
|
|
let res = n.processor[].processLightClientFinalityUpdate(MsgSource.gossip, update)
|
2023-10-18 21:29:20 +00:00
|
|
|
if res.isErr():
|
|
|
|
err("Error processing update: " & $res.error[1])
|
2023-09-28 16:16:41 +00:00
|
|
|
else:
|
2023-10-18 21:29:20 +00:00
|
|
|
ok()
|
2023-09-28 16:16:41 +00:00
|
|
|
of lightClientOptimisticUpdate:
|
2024-02-28 17:31:45 +00:00
|
|
|
let update = decodeLightClientOptimisticUpdateForked(n.forkDigests, content).valueOr:
|
|
|
|
return err("Error decoding content: " & error)
|
2023-10-18 21:29:20 +00:00
|
|
|
|
2024-02-28 17:31:45 +00:00
|
|
|
let res = n.processor[].processLightClientOptimisticUpdate(MsgSource.gossip, update)
|
2023-10-18 21:29:20 +00:00
|
|
|
if res.isErr():
|
|
|
|
err("Error processing update: " & $res.error[1])
|
2023-09-28 16:16:41 +00:00
|
|
|
else:
|
2023-10-18 21:29:20 +00:00
|
|
|
ok()
|
2024-01-26 22:38:12 +00:00
|
|
|
of beacon_content.ContentType.historicalSummaries:
|
2024-05-24 21:15:04 +00:00
|
|
|
let summariesWithProof =
|
|
|
|
?decodeSsz(n.forkDigests, content, HistoricalSummariesWithProof)
|
2024-01-26 22:38:12 +00:00
|
|
|
|
|
|
|
n.validateHistoricalSummaries(summariesWithProof)
|
2022-10-24 12:16:40 +00:00
|
|
|
|
|
|
|
proc validateContent(
|
2024-02-28 17:31:45 +00:00
|
|
|
n: BeaconNetwork, contentKeys: ContentKeysList, contentItems: seq[seq[byte]]
|
2024-06-14 12:21:30 +00:00
|
|
|
): Future[bool] {.async: (raises: [CancelledError]).} =
|
2022-10-24 12:16:40 +00:00
|
|
|
# content passed here can have less items then contentKeys, but not more.
|
|
|
|
for i, contentItem in contentItems:
|
2023-10-18 21:29:20 +00:00
|
|
|
let
|
|
|
|
contentKey = contentKeys[i]
|
2024-07-26 16:56:32 +00:00
|
|
|
validation = await n.validateContent(contentItem, contentKey)
|
2023-10-18 21:29:20 +00:00
|
|
|
if validation.isOk():
|
2022-10-24 12:16:40 +00:00
|
|
|
let contentIdOpt = n.portalProtocol.toContentId(contentKey)
|
|
|
|
if contentIdOpt.isNone():
|
|
|
|
error "Received offered content with invalid content key", contentKey
|
|
|
|
return false
|
|
|
|
|
|
|
|
let contentId = contentIdOpt.get()
|
2022-11-08 17:31:45 +00:00
|
|
|
n.portalProtocol.storeContent(contentKey, contentId, contentItem)
|
2022-10-24 12:16:40 +00:00
|
|
|
|
|
|
|
info "Received offered content validated successfully", contentKey
|
|
|
|
else:
|
2023-10-18 21:29:20 +00:00
|
|
|
error "Received offered content failed validation",
|
|
|
|
contentKey, error = validation.error
|
2022-10-24 12:16:40 +00:00
|
|
|
return false
|
|
|
|
|
|
|
|
return true
|
|
|
|
|
2024-06-14 12:21:30 +00:00
|
|
|
proc processContentLoop(n: BeaconNetwork) {.async: (raises: []).} =
|
2022-10-24 12:16:40 +00:00
|
|
|
try:
|
|
|
|
while true:
|
2024-02-28 17:31:45 +00:00
|
|
|
let (srcNodeId, contentKeys, contentItems) = await n.contentQueue.popFirst()
|
2022-10-24 12:16:40 +00:00
|
|
|
|
|
|
|
# When there is one invalid content item, all other content items are
|
|
|
|
# dropped and not gossiped around.
|
|
|
|
# TODO: Differentiate between failures due to invalid data and failures
|
|
|
|
# due to missing network data for validation.
|
|
|
|
if await n.validateContent(contentKeys, contentItems):
|
2023-10-05 17:29:39 +00:00
|
|
|
asyncSpawn n.portalProtocol.randomGossipDiscardPeers(
|
2023-09-04 10:21:01 +00:00
|
|
|
srcNodeId, contentKeys, contentItems
|
2022-10-24 12:16:40 +00:00
|
|
|
)
|
|
|
|
except CancelledError:
|
|
|
|
trace "processContentLoop canceled"
|
|
|
|
|
2024-09-19 13:38:49 +00:00
|
|
|
proc statusLogLoop(n: BeaconNetwork) {.async: (raises: []).} =
|
|
|
|
try:
|
|
|
|
while true:
|
|
|
|
info "Beacon network status",
|
|
|
|
routingTableNodes = n.portalProtocol.routingTable.len()
|
|
|
|
|
|
|
|
await sleepAsync(60.seconds)
|
|
|
|
except CancelledError:
|
|
|
|
trace "statusLogLoop canceled"
|
|
|
|
|
2023-10-20 10:06:25 +00:00
|
|
|
proc start*(n: BeaconNetwork) =
|
2024-06-20 02:48:45 +00:00
|
|
|
info "Starting Portal beacon chain network"
|
2024-09-19 13:38:49 +00:00
|
|
|
|
2022-10-24 12:16:40 +00:00
|
|
|
n.portalProtocol.start()
|
|
|
|
n.processContentLoop = processContentLoop(n)
|
2024-10-05 11:39:42 +00:00
|
|
|
n.statusLogLoop = statusLogLoop(n)
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2024-09-20 12:54:36 +00:00
|
|
|
proc stop*(n: BeaconNetwork) {.async: (raises: []).} =
|
2024-09-19 13:38:49 +00:00
|
|
|
info "Stopping Portal beacon chain network"
|
|
|
|
|
2024-09-20 12:54:36 +00:00
|
|
|
var futures: seq[Future[void]]
|
|
|
|
futures.add(n.portalProtocol.stop())
|
2022-10-24 12:16:40 +00:00
|
|
|
|
2024-09-20 12:54:36 +00:00
|
|
|
if not n.processContentLoop.isNil():
|
|
|
|
futures.add(n.processContentLoop.cancelAndWait())
|
2024-09-19 13:38:49 +00:00
|
|
|
|
|
|
|
if not n.statusLogLoop.isNil():
|
2024-09-20 12:54:36 +00:00
|
|
|
futures.add(n.statusLogLoop.cancelAndWait())
|
|
|
|
|
|
|
|
await noCancel(allFutures(futures))
|
|
|
|
|
2024-10-09 08:21:00 +00:00
|
|
|
n.beaconDb.close()
|
|
|
|
|
2024-09-20 12:54:36 +00:00
|
|
|
n.processContentLoop = nil
|
|
|
|
n.statusLogLoop = nil
|