diff --git a/codexcrawler/config.nim b/codexcrawler/config.nim index 4411e3b..f81dd06 100644 --- a/codexcrawler/config.nim +++ b/codexcrawler/config.nim @@ -10,7 +10,7 @@ let doc = Codex Network Crawler. Generates network metrics. Usage: - codexcrawler [--logLevel=] [--publicIp=] [--metricsAddress=] [--metricsPort=

] [--dataDir=

] [--discoveryPort=

] [--bootNodes=] [--stepDelay=] [--revisitDelay=] [--checkDelay=] [--expiryDelay=] + codexcrawler [--logLevel=] [--publicIp=] [--metricsAddress=] [--metricsPort=

] [--dataDir=

] [--discoveryPort=

] [--bootNodes=] [--dhtEnable=] [--stepDelay=] [--revisitDelay=] [--checkDelay=] [--expiryDelay=] [--marketplaceEnable=] [--ethProvider=] [--marketplaceAddress=] Options: --logLevel= Sets log level [default: INFO] @@ -20,10 +20,14 @@ Options: --dataDir=

Directory for storing data [default: crawler_data] --discoveryPort=

Port used for DHT [default: 8090] --bootNodes= Semi-colon-separated list of Codex bootstrap SPRs [default: testnet_sprs] + --dhtEnable= Set to "1" to enable DHT crawler [default: 1] --stepDelay= Delay in milliseconds per node visit [default: 1000] --revisitDelay= Delay in minutes after which a node can be revisited [default: 60] --checkDelay= Delay with which the 'revisitDelay' is checked for all known nodes [default: 10] --expiryDelay= Delay in minutes after which unresponsive nodes are discarded [default: 1440] (24h) + --marketplaceEnable= Set to "1" to enable marketplace metrics [default: 1] + --ethProvider= Address including http(s) or ws of the eth provider + --marketplaceAddress= Eth address of Codex contracts deployment """ import strutils @@ -37,18 +41,27 @@ type Config* = ref object dataDir*: string discPort*: Port bootNodes*: seq[SignedPeerRecord] + + dhtEnable*: bool stepDelayMs*: int revisitDelayMins*: int checkDelayMins*: int expiryDelayMins*: int + marketplaceEnable*: bool + ethProvider*: string + marketplaceAddress*: string + proc `$`*(config: Config): string = "Crawler:" & " logLevel=" & config.logLevel & " publicIp=" & config.publicIp & " metricsAddress=" & $config.metricsAddress & " metricsPort=" & $config.metricsPort & - " dataDir=" & config.dataDir & " discPort=" & $config.discPort & " bootNodes=" & - config.bootNodes.mapIt($it).join(";") & " stepDelay=" & $config.stepDelayMs & - " revisitDelayMins=" & $config.revisitDelayMins & " expiryDelayMins=" & - $config.expiryDelayMins & " checkDelayMins=" & $config.checkDelayMins + " dataDir=" & config.dataDir & " discPort=" & $config.discPort & " dhtEnable=" & + $config.dhtEnable & " bootNodes=" & config.bootNodes.mapIt($it).join(";") & + " stepDelay=" & $config.stepDelayMs & " revisitDelayMins=" & $config.revisitDelayMins & + " expiryDelayMins=" & $config.expiryDelayMins & " checkDelayMins=" & + $config.checkDelayMins & " marketplaceEnable=" & $config.marketplaceEnable & + " ethProvider=" & config.ethProvider & " marketplaceAddress=" & + config.marketplaceAddress proc getDefaultTestnetBootNodes(): seq[string] = @[ @@ -86,6 +99,9 @@ proc stringToSpr(uri: string): SignedPeerRecord = proc getBootNodes(input: string): seq[SignedPeerRecord] = getBootNodeStrings(input).mapIt(stringToSpr(it)) +proc getEnable(input: string): bool = + input == "1" + proc parseConfig*(): Config = let args = docopt(doc, version = crawlerFullVersion) @@ -100,8 +116,12 @@ proc parseConfig*(): Config = dataDir: get("--dataDir"), discPort: Port(parseInt(get("--discoveryPort"))), bootNodes: getBootNodes(get("--bootNodes")), + dhtEnable: getEnable(get("--dhtEnable")), stepDelayMs: parseInt(get("--stepDelay")), revisitDelayMins: parseInt(get("--revisitDelay")), checkDelayMins: parseInt(get("--checkDelay")), expiryDelayMins: parseInt(get("--expiryDelay")), + marketplaceEnable: getEnable(get("--marketplaceEnable")), + ethProvider: get("--ethProvider"), + marketplaceAddress: get("--marketplaceAddress"), ) diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh index 9cad0bd..a9846a4 100644 --- a/docker/docker-entrypoint.sh +++ b/docker/docker-entrypoint.sh @@ -9,13 +9,19 @@ METRICSPORT=${CRAWLER_METRICSPORT:-8008} DATADIR=${CRAWLER_DATADIR:-crawler_data} DISCPORT=${CRAWLER_DISCPORT:-8090} BOOTNODES=${CRAWLER_BOOTNODES:-testnet_sprs} + +DHTENABLE=${CRAWLER_DHTENABLE:-1} STEPDELAY=${CRAWLER_STEPDELAY:-1000} REVISITDELAY=${CRAWLER_REVISITDELAY:-60} CHECKDELAY=${CRAWLER_CHECKDELAY:-10} EXPIRYDELAY=${CRAWLER_EXPIRYDELAY:-1440} +MARKETPLACEENABLE=${CRAWLER_MARKETPLACEENABLE:-1} +ETHPROVIDER=${CRAWLER_ETHPROVIDER:-NULL} +MARKETPLACEADDRESS=${CRAWLER_MARKETPLACEADDRESS:-NULL} + # Update CLI arguments -set -- "$@" --logLevel="${LOGLEVEL}" --publicIp="${PUBLICIP}" --metricsAddress="${METRICSADDRESS}" --metricsPort="${METRICSPORT}" --dataDir="${DATADIR}" --discoveryPort="${DISCPORT}" --bootNodes="${BOOTNODES}" --stepDelay="${STEPDELAY}" --revisitDelay="${REVISITDELAY}" --expiryDelay="${EXPIRYDELAY}" --checkDelay="${CHECKDELAY}" +set -- "$@" --logLevel="${LOGLEVEL}" --publicIp="${PUBLICIP}" --metricsAddress="${METRICSADDRESS}" --metricsPort="${METRICSPORT}" --dataDir="${DATADIR}" --discoveryPort="${DISCPORT}" --bootNodes="${BOOTNODES}" --dhtEnable="${DHTENABLE}" --stepDelay="${STEPDELAY}" --revisitDelay="${REVISITDELAY}" --expiryDelay="${EXPIRYDELAY}" --checkDelay="${CHECKDELAY}" --marketplaceEnable="${MARKETPLACEENABLE}" --ethProvider="${ETHPROVIDER}" --marketplaceAddress="${MARKETPLACEADDRESS}" # Run echo "Run Codex Crawler"