mirror of
https://github.com/logos-storage/logos-storage-network-crawler.git
synced 2026-01-05 06:53:11 +00:00
Adds requestCheckDelay config option
This commit is contained in:
parent
c1b0c30cc0
commit
693b421a61
@ -67,7 +67,7 @@ method start*(c: ChainMetrics): Future[?!void] {.async.} =
|
||||
return await c.step()
|
||||
|
||||
if c.state.config.marketplaceEnable:
|
||||
await c.state.whileRunning(onStep, 10.minutes)
|
||||
await c.state.whileRunning(onStep, c.state.config.requestCheckDelay.minutes)
|
||||
|
||||
return success()
|
||||
|
||||
|
||||
@ -10,7 +10,7 @@ let doc =
|
||||
Codex Network Crawler. Generates network metrics.
|
||||
|
||||
Usage:
|
||||
codexcrawler [--logLevel=<l>] [--publicIp=<a>] [--metricsAddress=<ip>] [--metricsPort=<p>] [--dataDir=<dir>] [--discoveryPort=<p>] [--bootNodes=<n>] [--dhtEnable=<e>] [--stepDelay=<ms>] [--revisitDelay=<m>] [--checkDelay=<m>] [--expiryDelay=<m>] [--marketplaceEnable=<e>] [--ethProvider=<a>] [--marketplaceAddress=<a>]
|
||||
codexcrawler [--logLevel=<l>] [--publicIp=<a>] [--metricsAddress=<ip>] [--metricsPort=<p>] [--dataDir=<dir>] [--discoveryPort=<p>] [--bootNodes=<n>] [--dhtEnable=<e>] [--stepDelay=<ms>] [--revisitDelay=<m>] [--checkDelay=<m>] [--expiryDelay=<m>] [--marketplaceEnable=<e>] [--ethProvider=<a>] [--marketplaceAddress=<a>] [--requestCheckDelay=<m>]
|
||||
|
||||
Options:
|
||||
--logLevel=<l> Sets log level [default: INFO]
|
||||
@ -20,14 +20,17 @@ Options:
|
||||
--dataDir=<dir> Directory for storing data [default: crawler_data]
|
||||
--discoveryPort=<p> Port used for DHT [default: 8090]
|
||||
--bootNodes=<n> Semi-colon-separated list of Codex bootstrap SPRs [default: testnet_sprs]
|
||||
|
||||
--dhtEnable=<e> Set to "1" to enable DHT crawler [default: 0]
|
||||
--stepDelay=<ms> Delay in milliseconds per node visit [default: 1000]
|
||||
--revisitDelay=<m> Delay in minutes after which a node can be revisited [default: 60]
|
||||
--checkDelay=<m> Delay with which the 'revisitDelay' is checked for all known nodes [default: 10]
|
||||
--expiryDelay=<m> Delay in minutes after which unresponsive nodes are discarded [default: 1440] (24h)
|
||||
|
||||
--marketplaceEnable=<e> Set to "1" to enable marketplace metrics [default: 1]
|
||||
--ethProvider=<a> Address including http(s) or ws of the eth provider
|
||||
--marketplaceAddress=<a> Eth address of Codex contracts deployment
|
||||
--requestCheckDelay=<m> Delay in minutes after which storage contract status is (re)checked [default: 10]
|
||||
"""
|
||||
|
||||
import strutils
|
||||
@ -51,6 +54,7 @@ type Config* = ref object
|
||||
marketplaceEnable*: bool
|
||||
ethProvider*: string
|
||||
marketplaceAddress*: string
|
||||
requestCheckDelay*: int
|
||||
|
||||
proc `$`*(config: Config): string =
|
||||
"Crawler:" & " logLevel=" & config.logLevel & " publicIp=" & config.publicIp &
|
||||
@ -61,7 +65,7 @@ proc `$`*(config: Config): string =
|
||||
" expiryDelayMins=" & $config.expiryDelayMins & " checkDelayMins=" &
|
||||
$config.checkDelayMins & " marketplaceEnable=" & $config.marketplaceEnable &
|
||||
" ethProvider=" & config.ethProvider & " marketplaceAddress=" &
|
||||
config.marketplaceAddress
|
||||
config.marketplaceAddress & " requestCheckDelay=" & $config.requestCheckDelay
|
||||
|
||||
proc getDefaultTestnetBootNodes(): seq[string] =
|
||||
@[
|
||||
@ -124,4 +128,5 @@ proc parseConfig*(): Config =
|
||||
marketplaceEnable: getEnable(get("--marketplaceEnable")),
|
||||
ethProvider: get("--ethProvider"),
|
||||
marketplaceAddress: get("--marketplaceAddress"),
|
||||
requestCheckDelay: parseInt(get("--requestCheckDelay"))
|
||||
)
|
||||
|
||||
@ -19,9 +19,10 @@ EXPIRYDELAY=${CRAWLER_EXPIRYDELAY:-1440}
|
||||
MARKETPLACEENABLE=${CRAWLER_MARKETPLACEENABLE:-1}
|
||||
ETHPROVIDER=${CRAWLER_ETHPROVIDER:-NULL}
|
||||
MARKETPLACEADDRESS=${CRAWLER_MARKETPLACEADDRESS:-NULL}
|
||||
REQUESTCHECKDELAY=${CRAWLER_REQUESTCHECKDELAY:-10}
|
||||
|
||||
# Update CLI arguments
|
||||
set -- "$@" --logLevel="${LOGLEVEL}" --publicIp="${PUBLICIP}" --metricsAddress="${METRICSADDRESS}" --metricsPort="${METRICSPORT}" --dataDir="${DATADIR}" --discoveryPort="${DISCPORT}" --bootNodes="${BOOTNODES}" --dhtEnable="${DHTENABLE}" --stepDelay="${STEPDELAY}" --revisitDelay="${REVISITDELAY}" --expiryDelay="${EXPIRYDELAY}" --checkDelay="${CHECKDELAY}" --marketplaceEnable="${MARKETPLACEENABLE}" --ethProvider="${ETHPROVIDER}" --marketplaceAddress="${MARKETPLACEADDRESS}"
|
||||
set -- "$@" --logLevel="${LOGLEVEL}" --publicIp="${PUBLICIP}" --metricsAddress="${METRICSADDRESS}" --metricsPort="${METRICSPORT}" --dataDir="${DATADIR}" --discoveryPort="${DISCPORT}" --bootNodes="${BOOTNODES}" --dhtEnable="${DHTENABLE}" --stepDelay="${STEPDELAY}" --revisitDelay="${REVISITDELAY}" --expiryDelay="${EXPIRYDELAY}" --checkDelay="${CHECKDELAY}" --marketplaceEnable="${MARKETPLACEENABLE}" --ethProvider="${ETHPROVIDER}" --marketplaceAddress="${MARKETPLACEADDRESS}" --requestCheckDelay="${REQUESTCHECKDELAY}"
|
||||
|
||||
# Run
|
||||
echo "Run Codex Crawler"
|
||||
|
||||
@ -39,10 +39,10 @@ suite "ChainMetrics":
|
||||
proc onStep() {.async.} =
|
||||
(await state.steppers[0]()).tryGet()
|
||||
|
||||
test "start should start stepper for 10 minutes":
|
||||
test "start should start stepper for config.requestCheckDelay minutes":
|
||||
check:
|
||||
state.delays.len == 1
|
||||
state.delays[0] == 10.minutes
|
||||
state.delays[0] == state.config.requestCheckDelay.minutes
|
||||
|
||||
test "onStep should remove old non-running requests from request store":
|
||||
let rid = genRid()
|
||||
|
||||
@ -22,7 +22,11 @@ method whileRunning*(s: MockState, step: OnStep, delay: Duration) {.async.} =
|
||||
proc createMockState*(): MockState =
|
||||
MockState(
|
||||
status: ApplicationStatus.Running,
|
||||
config: Config(dhtEnable: true, marketplaceEnable: true),
|
||||
config: Config(
|
||||
dhtEnable: true,
|
||||
marketplaceEnable: true,
|
||||
requestCheckDelay: 4
|
||||
),
|
||||
events: Events(
|
||||
nodesFound: newAsyncDataEvent[seq[Nid]](),
|
||||
newNodesDiscovered: newAsyncDataEvent[seq[Nid]](),
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user