Merge branch 'master' into feature/bot-upgrade

# Conflicts:
#	Framework/NethereumWorkflow/BlockTimeFinder.cs
#	Framework/NethereumWorkflow/NethereumInteraction.cs
This commit is contained in:
Ben 2024-03-22 10:59:57 +01:00
commit 0a4c4c60c5
No known key found for this signature in database
GPG Key ID: 541B9D8C9F1426A1
41 changed files with 810 additions and 557 deletions

View File

@ -82,5 +82,5 @@ public static class CoreInterfaceExtensions
}
```
The primary reason to decouple deploying and wrapping functionalities is that some use cases require these steps to be performed by separate applications, and different moments in time. For this reason, whatever is returned by the deploy methods should be serializable. After deserialization at some later time, it should then be valid input for the wrap method. The Codex continuous tests system is a clear example of this use case: The `CodexNetDeployer` tool uses deploy methods to create Codex nodes. Then it writes the returned objects to a JSON file. Some time later, the `CodexContinousTests` application uses this JSON file to reconstruct the objects created by the deploy methods. It then uses the wrap methods to create accessors and interactors, which are used for testing.
The primary reason to decouple deploying and wrapping functionalities is that some use cases require these steps to be performed by separate applications, and different moments in time. For this reason, whatever is returned by the deploy methods should be serializable. After deserialization at some later time, it should then be valid input for the wrap method. The Codex continuous tests system is a clear example of this use case: The `CodexNetDeployer` tool uses deploy methods to create Codex nodes. Then it writes the returned objects to a JSON file. Some time later, the `CodexContinuousTests` application uses this JSON file to reconstruct the objects created by the deploy methods. It then uses the wrap methods to create accessors and interactors, which are used for testing.

View File

@ -21,7 +21,7 @@ Do you want to write some tests using this distributed test setup? Great! Here's
1. When using the auto-bootstrap, you have no control over the bootstrap node from your tests. You can't (for example) shut it down during the course of the test. If you need this level of control for your scenario, use the `CodexDistTest` instead.
1. If your test needs a long time to run, add the `[UseLongTimeouts]` function attribute. This will greatly increase maximum time-out values for operations like for example uploading and downloading files.
### Continuous tests
1. Add new code files to `Tests/CodexContinousTests/Tests`
1. Add new code files to `Tests/CodexContinuousTests/Tests`
1. Inherrit from `ContinuousTest`
1. Define one or more methods and decorate them with the `[TestMoment(...)]` attribute.
1. The TestMoment takes a number of seconds as argument. Each moment will be executed by the continuous test runner applying the given seconds as delay. (Non-cumulative. So two moments at T:10 will be executed one after another without delay, in this case the order of execution should not be depended upon.)

View File

@ -4,7 +4,7 @@ namespace Core
{
public interface IDownloadedLog
{
bool DoesLogContain(string expectedString);
string[] GetLinesContaining(string expectedString);
string[] FindLinesThatContain(params string[] tags);
void DeleteFile();
}
@ -18,20 +18,23 @@ namespace Core
this.logFile = logFile;
}
public bool DoesLogContain(string expectedString)
public string[] GetLinesContaining(string expectedString)
{
using var file = File.OpenRead(logFile.FullFilename);
using var streamReader = new StreamReader(file);
var lines = new List<string>();
var line = streamReader.ReadLine();
while (line != null)
{
if (line.Contains(expectedString)) return true;
if (line.Contains(expectedString))
{
lines.Add(line);
}
line = streamReader.ReadLine();
}
//Assert.Fail($"{owner} Unable to find string '{expectedString}' in CodexNode log file {logFile.FullFilename}");
return false;
return lines.ToArray(); ;
}
public string[] FindLinesThatContain(params string[] tags)

View File

@ -49,13 +49,14 @@ namespace KubernetesWorkflow
return CreatePodInfo(pod);
}
public void Stop(StartResult startResult)
public void Stop(StartResult startResult, bool waitTillStopped)
{
log.Debug();
if (startResult.InternalService != null) DeleteService(startResult.InternalService);
if (startResult.ExternalService != null) DeleteService(startResult.ExternalService);
DeleteDeployment(startResult.Deployment);
WaitUntilPodsForDeploymentAreOffline(startResult.Deployment);
if (waitTillStopped) WaitUntilPodsForDeploymentAreOffline(startResult.Deployment);
}
public void DownloadPodLog(RunningContainer container, ILogHandler logHandler, int? tailLines)
@ -498,10 +499,17 @@ namespace KubernetesWorkflow
Ports = CreateContainerPorts(recipe),
Env = CreateEnv(recipe),
VolumeMounts = CreateContainerVolumeMounts(recipe),
Resources = CreateResourceLimits(recipe)
Resources = CreateResourceLimits(recipe),
Command = CreateCommandList(recipe)
};
}
private IList<string> CreateCommandList(ContainerRecipe recipe)
{
if (recipe.CommandOverride == null || !recipe.CommandOverride.Command.Any()) return null!;
return recipe.CommandOverride.Command.ToList();
}
private V1ResourceRequirements CreateResourceLimits(ContainerRecipe recipe)
{
return new V1ResourceRequirements

View File

@ -0,0 +1,12 @@
namespace KubernetesWorkflow.Recipe
{
public class CommandOverride
{
public CommandOverride(params string[] command)
{
Command = command;
}
public string[] Command { get; }
}
}

View File

@ -2,13 +2,14 @@
{
public class ContainerRecipe
{
public ContainerRecipe(int number, string? nameOverride, string image, ContainerResources resources, SchedulingAffinity schedulingAffinity, bool setCriticalPriority, Port[] exposedPorts, Port[] internalPorts, EnvVar[] envVars, PodLabels podLabels, PodAnnotations podAnnotations, VolumeMount[] volumes, ContainerAdditionals additionals)
public ContainerRecipe(int number, string? nameOverride, string image, ContainerResources resources, SchedulingAffinity schedulingAffinity, CommandOverride commandOverride, bool setCriticalPriority, Port[] exposedPorts, Port[] internalPorts, EnvVar[] envVars, PodLabels podLabels, PodAnnotations podAnnotations, VolumeMount[] volumes, ContainerAdditionals additionals)
{
Number = number;
NameOverride = nameOverride;
Image = image;
Resources = resources;
SchedulingAffinity = schedulingAffinity;
CommandOverride = commandOverride;
SetCriticalPriority = setCriticalPriority;
ExposedPorts = exposedPorts;
InternalPorts = internalPorts;
@ -35,6 +36,7 @@
public string? NameOverride { get; }
public ContainerResources Resources { get; }
public SchedulingAffinity SchedulingAffinity { get; }
public CommandOverride CommandOverride { get; }
public bool SetCriticalPriority { get; }
public string Image { get; }
public Port[] ExposedPorts { get; }

View File

@ -14,6 +14,7 @@ namespace KubernetesWorkflow.Recipe
private RecipeComponentFactory factory = null!;
private ContainerResources resources = new ContainerResources();
private SchedulingAffinity schedulingAffinity = new SchedulingAffinity();
private CommandOverride commandOverride = new CommandOverride();
private bool setCriticalPriority;
public ContainerRecipe CreateRecipe(int index, int containerNumber, RecipeComponentFactory factory, StartupConfig config)
@ -24,7 +25,7 @@ namespace KubernetesWorkflow.Recipe
Initialize(config);
var recipe = new ContainerRecipe(containerNumber, config.NameOverride, Image, resources, schedulingAffinity, setCriticalPriority,
var recipe = new ContainerRecipe(containerNumber, config.NameOverride, Image, resources, schedulingAffinity, commandOverride, setCriticalPriority,
exposedPorts.ToArray(),
internalPorts.ToArray(),
envVars.ToArray(),
@ -43,6 +44,7 @@ namespace KubernetesWorkflow.Recipe
this.factory = null!;
resources = new ContainerResources();
schedulingAffinity = new SchedulingAffinity();
commandOverride = new CommandOverride();
setCriticalPriority = false;
return recipe;
@ -130,6 +132,11 @@ namespace KubernetesWorkflow.Recipe
schedulingAffinity = new SchedulingAffinity(notIn);
}
protected void OverrideCommand(params string[] command)
{
commandOverride = new CommandOverride(command);
}
protected void SetSystemCriticalPriority()
{
setCriticalPriority = true;

View File

@ -14,7 +14,7 @@ namespace KubernetesWorkflow
PodInfo GetPodInfo(RunningContainer container);
PodInfo GetPodInfo(RunningContainers containers);
CrashWatcher CreateCrashWatcher(RunningContainer container);
void Stop(RunningContainers containers);
void Stop(RunningContainers containers, bool waitTillStopped);
void DownloadContainerLog(RunningContainer container, ILogHandler logHandler, int? tailLines = null);
string ExecuteCommand(RunningContainer container, string command, params string[] args);
void DeleteNamespace();
@ -86,11 +86,11 @@ namespace KubernetesWorkflow
return K8s(c => c.CreateCrashWatcher(container));
}
public void Stop(RunningContainers runningContainers)
public void Stop(RunningContainers runningContainers, bool waitTillStopped)
{
K8s(controller =>
{
controller.Stop(runningContainers.StartResult);
controller.Stop(runningContainers.StartResult, waitTillStopped);
cluster.Configuration.Hooks.OnContainersStopped(runningContainers);
});
}

View File

@ -1,22 +0,0 @@
namespace NethereumWorkflow
{
public partial class BlockTimeFinder
{
public class BlockTimeEntry
{
public BlockTimeEntry(ulong blockNumber, DateTime utc)
{
BlockNumber = blockNumber;
Utc = utc;
}
public ulong BlockNumber { get; }
public DateTime Utc { get; }
public override string ToString()
{
return $"[{BlockNumber}] @ {Utc.ToString("o")}";
}
}
}
}

View File

@ -1,317 +0,0 @@
using Logging;
using Nethereum.RPC.Eth.DTOs;
using Nethereum.Web3;
using Utils;
namespace NethereumWorkflow
{
public partial class BlockTimeFinder
{
private const ulong FetchRange = 6;
private const int MaxEntries = 1024;
private static readonly Dictionary<ulong, BlockTimeEntry> entries = new Dictionary<ulong, BlockTimeEntry>();
private readonly Web3 web3;
private readonly ILog log;
public BlockTimeFinder(Web3 web3, ILog log)
{
this.web3 = web3;
this.log = log;
}
public BlockRange ConvertTimeRangeToBlockRange(TimeRange timeRange)
{
var lowest = GetLowestBlockNumberAfter(timeRange.From);
var highest = GetHighestBlockNumberBefore(timeRange.To);
var fromBlock = Math.Min(lowest, highest);
var toBlock = Math.Max(lowest, highest);
return new BlockRange(fromBlock, toBlock);
}
public ulong GetHighestBlockNumberBefore(DateTime moment)
{
AssertMomentIsInPast(moment);
Initialize();
var result = GetHighestBlockBefore(moment);
log.Log($"Highest block before [{moment.ToString("o")}] = {result}");
return result;
}
public ulong GetLowestBlockNumberAfter(DateTime moment)
{
AssertMomentIsInPast(moment);
Initialize();
var result = GetLowestBlockAfter(moment);
log.Log($"Lowest block after [{moment.ToString("o")}] = {result}");
return result;
}
private ulong GetHighestBlockBefore(DateTime moment)
{
var closestBefore = FindClosestBeforeEntry(moment);
var closestAfter = FindClosestAfterEntry(moment);
if (closestBefore != null &&
closestAfter != null &&
closestBefore.Utc < moment &&
closestAfter.Utc > moment &&
closestBefore.BlockNumber + 1 == closestAfter.BlockNumber)
{
return closestBefore.BlockNumber;
}
var newBlocks = FetchBlocksAround(moment);
if (newBlocks == 0)
{
log.Debug("Didn't find any new blocks.");
if (closestBefore != null) return closestBefore.BlockNumber;
throw new Exception("Failed to find highest before.");
}
return GetHighestBlockBefore(moment);
}
private ulong GetLowestBlockAfter(DateTime moment)
{
var closestBefore = FindClosestBeforeEntry(moment);
var closestAfter = FindClosestAfterEntry(moment);
if (closestBefore != null &&
closestAfter != null &&
closestBefore.Utc < moment &&
closestAfter.Utc > moment &&
closestBefore.BlockNumber + 1 == closestAfter.BlockNumber)
{
return closestAfter.BlockNumber;
}
var newBlocks = FetchBlocksAround(moment);
if (newBlocks == 0)
{
log.Debug("Didn't find any new blocks.");
if (closestAfter != null) return closestAfter.BlockNumber;
throw new Exception("Failed to find lowest before.");
}
return GetLowestBlockAfter(moment);
}
private int FetchBlocksAround(DateTime moment)
{
var timePerBlock = EstimateTimePerBlock();
log.Debug("Fetching blocks around " + moment.ToString("o") + " timePerBlock: " + timePerBlock.TotalSeconds);
EnsureRecentBlockIfNecessary(moment, timePerBlock);
var max = entries.Keys.Max();
var blockDifference = CalculateBlockDifference(moment, timePerBlock, max);
return
FetchUp(max, blockDifference) +
FetchDown(max, blockDifference);
}
private int FetchDown(ulong max, ulong blockDifference)
{
var target = GetTarget(max, blockDifference);
var fetchDown = FetchRange;
var newBlocks = 0;
while (fetchDown > 0)
{
if (!entries.ContainsKey(target))
{
var newBlock = AddBlockNumber("FD" + fetchDown, target);
if (newBlock == null) return newBlocks;
newBlocks++;
fetchDown--;
}
target--;
if (target <= 0) return newBlocks;
}
return newBlocks;
}
private int FetchUp(ulong max, ulong blockDifference)
{
var target = GetTarget(max, blockDifference);
var fetchUp = FetchRange;
var newBlocks = 0;
while (fetchUp > 0)
{
if (!entries.ContainsKey(target))
{
var newBlock = AddBlockNumber("FU" + fetchUp, target);
if (newBlock == null) return newBlocks;
newBlocks++;
fetchUp--;
}
target++;
if (target >= max) return newBlocks;
}
return newBlocks;
}
private ulong GetTarget(ulong max, ulong blockDifference)
{
if (max <= blockDifference) return 1;
return max - blockDifference;
}
private ulong CalculateBlockDifference(DateTime moment, TimeSpan timePerBlock, ulong max)
{
var latest = entries[max];
var timeDifference = latest.Utc - moment;
double secondsDifference = Math.Abs(timeDifference.TotalSeconds);
double secondsPerBlock = timePerBlock.TotalSeconds;
double numberOfBlocksDifference = secondsDifference / secondsPerBlock;
var blockDifference = Convert.ToUInt64(numberOfBlocksDifference);
if (blockDifference < 1) blockDifference = 1;
return blockDifference;
}
private void EnsureRecentBlockIfNecessary(DateTime moment, TimeSpan timePerBlock)
{
var max = entries.Keys.Max();
var latest = entries[max];
var maxRetry = 10;
while (moment > latest.Utc)
{
var newBlock = AddCurrentBlock();
if (newBlock == null || newBlock.BlockNumber == latest.BlockNumber)
{
maxRetry--;
if (maxRetry == 0) throw new Exception("Unable to fetch recent block after 10x tries.");
Thread.Sleep(timePerBlock);
}
max = entries.Keys.Max();
latest = entries[max];
}
}
private BlockTimeEntry? AddBlockNumber(string a, decimal blockNumber)
{
return AddBlockNumber(a, Convert.ToUInt64(blockNumber));
}
private BlockTimeEntry? AddBlockNumber(string a, ulong blockNumber)
{
log.Debug(a + " - Adding blockNumber: " + blockNumber);
if (entries.ContainsKey(blockNumber))
{
return entries[blockNumber];
}
if (entries.Count > MaxEntries)
{
log.Debug("Entries cleared!");
entries.Clear();
Initialize();
}
var time = GetTimestampFromBlock(blockNumber);
if (time == null)
{
log.Debug("Failed to get block for number: " + blockNumber);
return null;
}
var entry = new BlockTimeEntry(blockNumber, time.Value);
log.Debug("Found block " + entry.BlockNumber + " at " + entry.Utc.ToString("o"));
entries.Add(blockNumber, entry);
return entry;
}
private TimeSpan EstimateTimePerBlock()
{
var min = entries.Keys.Min();
var max = entries.Keys.Max();
var clippedMin = Math.Max(max - 100, min);
var minTime = entries[min].Utc;
var clippedMinBlock = AddBlockNumber("EST", clippedMin);
if (clippedMinBlock != null) minTime = clippedMinBlock.Utc;
var maxTime = entries[max].Utc;
var elapsedTime = maxTime - minTime;
double elapsedSeconds = elapsedTime.TotalSeconds;
double numberOfBlocks = max - min;
double secondsPerBlock = elapsedSeconds / numberOfBlocks;
var result = TimeSpan.FromSeconds(secondsPerBlock);
if (result.TotalSeconds < 1.0) result = TimeSpan.FromSeconds(1.0);
return result;
}
private void Initialize()
{
if (!entries.Any())
{
AddCurrentBlock();
AddBlockNumber("INIT", entries.Single().Key - 1);
}
}
private static void AssertMomentIsInPast(DateTime moment)
{
if (moment > DateTime.UtcNow) throw new Exception("Moment must be UTC and must be in the past.");
}
private BlockTimeEntry? AddCurrentBlock()
{
var number = Time.Wait(web3.Eth.Blocks.GetBlockNumber.SendRequestAsync());
var blockNumber = number.ToDecimal();
return AddBlockNumber("CUR", blockNumber);
}
private DateTime? GetTimestampFromBlock(ulong blockNumber)
{
try
{
var block = Time.Wait(web3.Eth.Blocks.GetBlockWithTransactionsByNumber.SendRequestAsync(new BlockParameter(blockNumber)));
if (block == null) return null;
return DateTimeOffset.FromUnixTimeSeconds(Convert.ToInt64(block.Timestamp.ToDecimal())).UtcDateTime;
}
catch (Exception ex)
{
log.Error(nameof(GetTimestampFromBlock) + " Exception: " + ex);
throw;
}
}
private BlockTimeEntry? FindClosestBeforeEntry(DateTime moment)
{
BlockTimeEntry? result = null;
foreach (var entry in entries.Values)
{
if (result == null)
{
if (entry.Utc < moment) result = entry;
}
else
{
if (entry.Utc > result.Utc && entry.Utc < moment) result = entry;
}
}
return result;
}
private BlockTimeEntry? FindClosestAfterEntry(DateTime moment)
{
BlockTimeEntry? result = null;
foreach (var entry in entries.Values)
{
if (result == null)
{
if (entry.Utc > moment) result = entry;
}
else
{
if (entry.Utc < result.Utc && entry.Utc > moment) result = entry;
}
}
return result;
}
}
}

View File

@ -0,0 +1,39 @@
namespace NethereumWorkflow.BlockUtils
{
public class BlockCache
{
public delegate void CacheClearedEvent();
private const int MaxEntries = 1024;
private readonly Dictionary<ulong, BlockTimeEntry> entries = new Dictionary<ulong, BlockTimeEntry>();
public event CacheClearedEvent? OnCacheCleared;
public BlockTimeEntry Add(ulong number, DateTime dateTime)
{
return Add(new BlockTimeEntry(number, dateTime));
}
public BlockTimeEntry Add(BlockTimeEntry entry)
{
if (!entries.ContainsKey(entry.BlockNumber))
{
if (entries.Count > MaxEntries)
{
entries.Clear();
var e = OnCacheCleared;
if (e != null) e();
}
entries.Add(entry.BlockNumber, entry);
}
return entries[entry.BlockNumber];
}
public BlockTimeEntry? Get(ulong number)
{
if (!entries.TryGetValue(number, out BlockTimeEntry? value)) return null;
return value;
}
}
}

View File

@ -0,0 +1,19 @@
namespace NethereumWorkflow.BlockUtils
{
public class BlockTimeEntry
{
public BlockTimeEntry(ulong blockNumber, DateTime utc)
{
BlockNumber = blockNumber;
Utc = utc;
}
public ulong BlockNumber { get; }
public DateTime Utc { get; }
public override string ToString()
{
return $"[{BlockNumber}] @ {Utc.ToString("o")}";
}
}
}

View File

@ -0,0 +1,95 @@
using Logging;
namespace NethereumWorkflow.BlockUtils
{
public class BlockTimeFinder
{
private readonly BlockCache cache;
private readonly BlockchainBounds bounds;
private readonly IWeb3Blocks web3;
private readonly ILog log;
public BlockTimeFinder(BlockCache cache, IWeb3Blocks web3, ILog log)
{
this.web3 = web3;
this.log = log;
this.cache = cache;
bounds = new BlockchainBounds(cache, web3);
}
public ulong? GetHighestBlockNumberBefore(DateTime moment)
{
bounds.Initialize();
if (moment <= bounds.Genesis.Utc) return null;
if (moment >= bounds.Current.Utc) return bounds.Current.BlockNumber;
return Search(bounds.Genesis, bounds.Current, moment, HighestBeforeSelector);
}
public ulong? GetLowestBlockNumberAfter(DateTime moment)
{
bounds.Initialize();
if (moment >= bounds.Current.Utc) return null;
if (moment <= bounds.Genesis.Utc) return bounds.Genesis.BlockNumber;
return Search(bounds.Genesis, bounds.Current, moment, LowestAfterSelector);
}
private ulong Search(BlockTimeEntry lower, BlockTimeEntry upper, DateTime target, Func<DateTime, BlockTimeEntry, bool> isWhatIwant)
{
var middle = GetMiddle(lower, upper);
if (middle.BlockNumber == lower.BlockNumber)
{
if (isWhatIwant(target, upper)) return upper.BlockNumber;
}
if (isWhatIwant(target, middle))
{
return middle.BlockNumber;
}
if (middle.Utc > target)
{
return Search(lower, middle, target, isWhatIwant);
}
else
{
return Search(middle, upper, target, isWhatIwant);
}
}
private BlockTimeEntry GetMiddle(BlockTimeEntry lower, BlockTimeEntry upper)
{
ulong range = upper.BlockNumber - lower.BlockNumber;
ulong number = lower.BlockNumber + range / 2;
return GetBlock(number);
}
private bool HighestBeforeSelector(DateTime target, BlockTimeEntry entry)
{
var next = GetBlock(entry.BlockNumber + 1);
return
entry.Utc < target &&
next.Utc > target;
}
private bool LowestAfterSelector(DateTime target, BlockTimeEntry entry)
{
var previous = GetBlock(entry.BlockNumber - 1);
return
entry.Utc > target &&
previous.Utc < target;
}
private BlockTimeEntry GetBlock(ulong number)
{
if (number < bounds.Genesis.BlockNumber) throw new Exception("Can't fetch block before genesis.");
if (number > bounds.Current.BlockNumber) throw new Exception("Can't fetch block after current.");
var dateTime = web3.GetTimestampForBlock(number);
if (dateTime == null) throw new Exception("Failed to get dateTime for block that should exist.");
return cache.Add(number, dateTime.Value);
}
}
}

View File

@ -0,0 +1,106 @@
namespace NethereumWorkflow.BlockUtils
{
public class BlockchainBounds
{
private readonly BlockCache cache;
private readonly IWeb3Blocks web3;
public BlockTimeEntry Genesis { get; private set; } = null!;
public BlockTimeEntry Current { get; private set; } = null!;
public BlockchainBounds(BlockCache cache, IWeb3Blocks web3)
{
this.cache = cache;
this.web3 = web3;
cache.OnCacheCleared += Initialize;
}
public void Initialize()
{
AddCurrentBlock();
LookForGenesisBlock();
if (Current.BlockNumber == Genesis.BlockNumber)
{
throw new Exception("Unsupported condition: Current block is genesis block.");
}
}
private void LookForGenesisBlock()
{
if (Genesis != null)
{
cache.Add(Genesis);
return;
}
var blockTime = web3.GetTimestampForBlock(0);
if (blockTime != null)
{
AddGenesisBlock(0, blockTime.Value);
return;
}
LookForGenesisBlock(0, Current);
}
private void LookForGenesisBlock(ulong lower, BlockTimeEntry upper)
{
if (Genesis != null) return;
var range = upper.BlockNumber - lower;
if (range == 1)
{
var lowTime = web3.GetTimestampForBlock(lower);
if (lowTime != null)
{
AddGenesisBlock(lower, lowTime.Value);
}
else
{
AddGenesisBlock(upper);
}
return;
}
var current = lower + range / 2;
var blockTime = web3.GetTimestampForBlock(current);
if (blockTime != null)
{
var newUpper = cache.Add(current, blockTime.Value);
LookForGenesisBlock(lower, newUpper);
}
else
{
LookForGenesisBlock(current, upper);
}
}
private void AddCurrentBlock()
{
var currentBlockNumber = web3.GetCurrentBlockNumber();
var blockTime = web3.GetTimestampForBlock(currentBlockNumber);
if (blockTime == null) throw new Exception("Unable to get dateTime for current block.");
AddCurrentBlock(currentBlockNumber, blockTime.Value);
}
private void AddCurrentBlock(ulong currentBlockNumber, DateTime dateTime)
{
Current = new BlockTimeEntry(currentBlockNumber, dateTime);
cache.Add(Current);
}
private void AddGenesisBlock(ulong number, DateTime dateTime)
{
AddGenesisBlock(new BlockTimeEntry(number, dateTime));
}
private void AddGenesisBlock(BlockTimeEntry entry)
{
Genesis = entry;
cache.Add(Genesis);
}
}
}

View File

@ -3,13 +3,16 @@ using Nethereum.ABI.FunctionEncoding.Attributes;
using Nethereum.Contracts;
using Nethereum.RPC.Eth.DTOs;
using Nethereum.Web3;
using NethereumWorkflow.BlockUtils;
using Utils;
using BlockRange = Utils.BlockRange;
namespace NethereumWorkflow
{
public class NethereumInteraction
{
// BlockCache is a static instance: It stays alive for the duration of the application runtime.
private readonly static BlockCache blockCache = new BlockCache();
private readonly ILog log;
private readonly Web3 web3;
@ -88,25 +91,33 @@ namespace NethereumWorkflow
public List<EventLog<TEvent>> GetEvents<TEvent>(string address, TimeRange timeRange) where TEvent : IEventDTO, new()
{
var blockTimeFinder = new BlockTimeFinder(web3, log);
var blockRange = blockTimeFinder.ConvertTimeRangeToBlockRange(timeRange);
return GetEvents<TEvent>(address, blockRange);
var wrapper = new Web3Wrapper(web3, log);
var blockTimeFinder = new BlockTimeFinder(blockCache, wrapper, log);
var fromBlock = blockTimeFinder.GetLowestBlockNumberAfter(timeRange.From);
var toBlock = blockTimeFinder.GetHighestBlockNumberBefore(timeRange.To);
if (!fromBlock.HasValue)
{
log.Error("Failed to find lowest block for time range: " + timeRange);
throw new Exception("Failed");
}
if (!toBlock.HasValue)
{
log.Error("Failed to find highest block for time range: " + timeRange);
throw new Exception("Failed");
}
return GetEvents<TEvent>(address, fromBlock.Value, toBlock.Value);
}
public List<EventLog<TEvent>> GetEvents<TEvent>(string address, BlockRange blockRange) where TEvent : IEventDTO, new()
public List<EventLog<TEvent>> GetEvents<TEvent>(string address, ulong fromBlockNumber, ulong toBlockNumber) where TEvent : IEventDTO, new()
{
log.Debug($"Getting events of type [{typeof(TEvent).Name}] in block range [{blockRange.From} - {blockRange.To}]");
var eventHandler = web3.Eth.GetEvent<TEvent>(address);
var from = new BlockParameter(blockRange.From);
var to = new BlockParameter(blockRange.To);
var from = new BlockParameter(fromBlockNumber);
var to = new BlockParameter(toBlockNumber);
var blockFilter = Time.Wait(eventHandler.CreateFilterBlockRangeAsync(from, to));
return Time.Wait(eventHandler.GetAllChangesAsync(blockFilter));
}
public BlockRange ConvertTimeRangeToBlockRange(TimeRange timeRange)
{
var blockTimeFinder = new BlockTimeFinder(web3, log);
return blockTimeFinder.ConvertTimeRangeToBlockRange(timeRange);
}
}
}

View File

@ -0,0 +1,46 @@
using Logging;
using Nethereum.RPC.Eth.DTOs;
using Nethereum.Web3;
using Utils;
namespace NethereumWorkflow
{
public interface IWeb3Blocks
{
ulong GetCurrentBlockNumber();
DateTime? GetTimestampForBlock(ulong blockNumber);
}
public class Web3Wrapper : IWeb3Blocks
{
private readonly Web3 web3;
private readonly ILog log;
public Web3Wrapper(Web3 web3, ILog log)
{
this.web3 = web3;
this.log = log;
}
public ulong GetCurrentBlockNumber()
{
var number = Time.Wait(web3.Eth.Blocks.GetBlockNumber.SendRequestAsync());
return Convert.ToUInt64(number.ToDecimal());
}
public DateTime? GetTimestampForBlock(ulong blockNumber)
{
try
{
var block = Time.Wait(web3.Eth.Blocks.GetBlockWithTransactionsByNumber.SendRequestAsync(new BlockParameter(blockNumber)));
if (block == null) return null;
return DateTimeOffset.FromUnixTimeSeconds(Convert.ToInt64(block.Timestamp.ToDecimal())).UtcDateTime;
}
catch (Exception ex)
{
log.Error("Exception while getting timestamp for block: " + ex);
return null;
}
}
}
}

View File

@ -7,7 +7,7 @@ namespace CodexContractsPlugin
{
public class CodexContractsContainerRecipe : ContainerRecipeFactory
{
public static string DockerImage { get; } = "codexstorage/codex-contracts-eth:sha-965529d-dist-tests";
public static string DockerImage { get; } = "codexstorage/codex-contracts-eth:latest-dist-tests";
public const string MarketplaceAddressFilename = "/hardhat/deployments/codexdisttestnetwork/Marketplace.json";
public const string MarketplaceArtifactFilename = "/hardhat/artifacts/contracts/Marketplace.sol/Marketplace.json";

View File

@ -33,7 +33,7 @@ namespace CodexContractsPlugin
try
{
var result = DeployContract(container, workflow, gethNode);
workflow.Stop(containers);
workflow.Stop(containers, waitTillStopped: false);
Log("Container stopped.");
return result;
}

View File

@ -9,7 +9,7 @@ namespace CodexPlugin
{
private readonly MarketplaceStarter marketplaceStarter = new MarketplaceStarter();
private const string DefaultDockerImage = "codexstorage/nim-codex:latest-dist-tests";
private const string DefaultDockerImage = "codexstorage/nim-codex:sha-e4ddb94-dist-tests";
public const string ApiPortTag = "codex_api_port";
public const string ListenPortTag = "codex_listen_port";
public const string MetricsPortTag = "codex_metrics_port";
@ -105,7 +105,6 @@ namespace CodexPlugin
AddEnvVar("CODEX_ETH_PROVIDER", $"{wsAddress.Host.Replace("http://", "ws://")}:{wsAddress.Port}");
AddEnvVar("CODEX_MARKETPLACE_ADDRESS", marketplaceAddress);
AddEnvVar("CODEX_PERSISTENCE", "true");
// Custom scripting in the Codex test image will write this variable to a private-key file,
// and pass the correct filename to Codex.
@ -113,7 +112,9 @@ namespace CodexPlugin
AddEnvVar("PRIV_KEY", mStart.PrivateKey);
Additional(mStart);
if (config.MarketplaceConfig.IsValidator)
var marketplaceSetup = config.MarketplaceConfig.MarketplaceSetup;
SetCommandOverride(marketplaceSetup);
if (marketplaceSetup.IsValidator)
{
AddEnvVar("CODEX_VALIDATOR", "true");
}
@ -125,6 +126,18 @@ namespace CodexPlugin
}
}
private void SetCommandOverride(MarketplaceSetup ms)
{
if (ms.IsStorageNode)
{
OverrideCommand("bash", "/docker-entrypoint.sh", "codex", "persistence", "prover");
}
else
{
OverrideCommand("bash", "/docker-entrypoint.sh", "codex", "persistence");
}
}
private Port CreateApiPort(CodexStartupConfig config, string tag)
{
if (config.PublicTestNet == null) return AddExposedPort(tag);

View File

@ -26,7 +26,7 @@ namespace CodexPlugin
CrashWatcher CrashWatcher { get; }
PodInfo GetPodInfo();
ITransferSpeeds TransferSpeeds { get; }
void Stop();
void Stop(bool waitTillStopped);
}
public class CodexNode : ICodexNode
@ -153,13 +153,13 @@ namespace CodexPlugin
return CodexAccess.GetPodInfo();
}
public void Stop()
public void Stop(bool waitTillStopped)
{
if (Group.Count() > 1) throw new InvalidOperationException("Codex-nodes that are part of a group cannot be " +
"individually shut down. Use 'BringOffline()' on the group object to stop the group. This method is only " +
"available for codex-nodes in groups of 1.");
Group.BringOffline();
Group.BringOffline(waitTillStopped);
}
public void EnsureOnlineGetVersionResponse()

View File

@ -7,7 +7,7 @@ namespace CodexPlugin
{
public interface ICodexNodeGroup : IEnumerable<ICodexNode>, IHasManyMetricScrapeTargets
{
void BringOffline();
void BringOffline(bool waitTillStopped);
ICodexNode this[int index] { get; }
}
@ -31,9 +31,9 @@ namespace CodexPlugin
}
}
public void BringOffline()
public void BringOffline(bool waitTillStopped)
{
starter.BringOffline(this);
starter.BringOffline(this, waitTillStopped);
// Clear everything. Prevent accidental use.
Nodes = Array.Empty<CodexNode>();
Containers = null!;

View File

@ -17,7 +17,8 @@ namespace CodexPlugin
ICodexSetup WithBlockMaintenanceInterval(TimeSpan duration);
ICodexSetup WithBlockMaintenanceNumber(int numberOfBlocks);
ICodexSetup EnableMetrics();
ICodexSetup EnableMarketplace(IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens, bool isValidator = false);
ICodexSetup EnableMarketplace(IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens);
ICodexSetup EnableMarketplace(IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens, Action<IMarketplaceSetup> marketplaceSetup);
/// <summary>
/// Provides an invalid proof every N proofs
/// </summary>
@ -25,6 +26,12 @@ namespace CodexPlugin
ICodexSetup AsPublicTestNet(CodexTestNetConfig testNetConfig);
}
public interface IMarketplaceSetup
{
IMarketplaceSetup AsStorageNode();
IMarketplaceSetup AsValidator();
}
public class CodexLogCustomTopics
{
public CodexLogCustomTopics(CodexLogLevel discV5, CodexLogLevel libp2p, CodexLogLevel blockExchange)
@ -115,9 +122,17 @@ namespace CodexPlugin
return this;
}
public ICodexSetup EnableMarketplace(IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens, bool isValidator = false)
public ICodexSetup EnableMarketplace(IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens)
{
MarketplaceConfig = new MarketplaceInitialConfig(gethNode, codexContracts, initialEth, initialTokens, isValidator);
return EnableMarketplace(gethNode, codexContracts, initialEth, initialTokens, s => { });
}
public ICodexSetup EnableMarketplace(IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens, Action<IMarketplaceSetup> marketplaceSetup)
{
var ms = new MarketplaceSetup();
marketplaceSetup(ms);
MarketplaceConfig = new MarketplaceInitialConfig(ms, gethNode, codexContracts, initialEth, initialTokens);
return this;
}
@ -146,7 +161,35 @@ namespace CodexPlugin
if (BootstrapSpr != null) yield return $"BootstrapNode={BootstrapSpr}";
if (StorageQuota != null) yield return $"StorageQuota={StorageQuota}";
if (SimulateProofFailures != null) yield return $"SimulateProofFailures={SimulateProofFailures}";
if (MarketplaceConfig != null) yield return $"IsValidator={MarketplaceConfig.IsValidator}";
if (MarketplaceConfig != null) yield return $"MarketplaceSetup={MarketplaceConfig.MarketplaceSetup}";
}
}
public class MarketplaceSetup : IMarketplaceSetup
{
public bool IsStorageNode { get; private set; }
public bool IsValidator { get; private set; }
public IMarketplaceSetup AsStorageNode()
{
IsStorageNode = true;
return this;
}
public IMarketplaceSetup AsValidator()
{
IsValidator = true;
return this;
}
public override string ToString()
{
var result = "[(clientNode)"; // When marketplace is enabled, being a clientNode is implicit.
result += IsStorageNode ? "(storageNode)" : "()";
result += IsValidator ? "(validator)" : "()";
result += "]";
return result;
}
}
}

View File

@ -48,14 +48,14 @@ namespace CodexPlugin
return group;
}
public void BringOffline(CodexNodeGroup group)
public void BringOffline(CodexNodeGroup group, bool waitTillStopped)
{
Log($"Stopping {group.Describe()}...");
StopCrashWatcher(group);
var workflow = pluginTools.CreateWorkflow();
foreach (var c in group.Containers)
{
workflow.Stop(c);
workflow.Stop(c, waitTillStopped);
}
Log("Stopped.");
}

View File

@ -68,10 +68,17 @@ namespace CodexPlugin
"blockexcnetwork",
"blockexcnetworkpeer"
};
var contractClockTopics = new[]
{
"contracts",
"clock"
};
level = $"{level};" +
$"{CustomTopics.DiscV5.ToString()!.ToLowerInvariant()}:{string.Join(",", discV5Topics)};" +
$"{CustomTopics.Libp2p.ToString()!.ToLowerInvariant()}:{string.Join(",", libp2pTopics)}";
$"{CustomTopics.Libp2p.ToString()!.ToLowerInvariant()}:{string.Join(",", libp2pTopics)};" +
// Contract clock is always set to warn. It logs a trace every second.
$"{CodexLogLevel.Warn.ToString().ToLowerInvariant()}:{string.Join(",", contractClockTopics)}";
if (CustomTopics.BlockExchange != null)
{

View File

@ -1,16 +1,13 @@
using CodexContractsPlugin;
using Logging;
using Logging;
using Newtonsoft.Json;
using Utils;
using System.Numerics;
namespace CodexPlugin
{
public interface IMarketplaceAccess
{
string MakeStorageAvailable(ByteSize size, TestToken minPriceForTotalSpace, TestToken maxCollateral, TimeSpan maxDuration);
StoragePurchaseContract RequestStorage(ContentId contentId, TestToken pricePerSlotPerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration);
StoragePurchaseContract RequestStorage(ContentId contentId, TestToken pricePerSlotPerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration, TimeSpan expiry);
string MakeStorageAvailable(StorageAvailability availability);
StoragePurchaseContract RequestStorage(StoragePurchase purchase);
}
public class MarketplaceAccess : IMarketplaceAccess
@ -24,35 +21,12 @@ namespace CodexPlugin
this.codexAccess = codexAccess;
}
public StoragePurchaseContract RequestStorage(ContentId contentId, TestToken pricePerSlotPerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration)
public StoragePurchaseContract RequestStorage(StoragePurchase purchase)
{
return RequestStorage(contentId, pricePerSlotPerSecond, requiredCollateral, minRequiredNumberOfNodes, proofProbability, duration, duration / 2);
}
purchase.Log(log);
var request = purchase.ToApiRequest();
public StoragePurchaseContract RequestStorage(ContentId contentId, TestToken pricePerSlotPerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration, TimeSpan expiry)
{
var expireUtc = DateTimeOffset.UtcNow.ToUnixTimeSeconds() + expiry.TotalSeconds;
var request = new CodexSalesRequestStorageRequest
{
duration = ToDecInt(duration.TotalSeconds),
proofProbability = ToDecInt(proofProbability),
reward = ToDecInt(pricePerSlotPerSecond),
collateral = ToDecInt(requiredCollateral),
expiry = ToDecInt(expireUtc),
nodes = minRequiredNumberOfNodes,
tolerance = null,
};
Log($"Requesting storage for: {contentId.Id}... (" +
$"pricePerSlotPerSecond: {pricePerSlotPerSecond}, " +
$"requiredCollateral: {requiredCollateral}, " +
$"minRequiredNumberOfNodes: {minRequiredNumberOfNodes}, " +
$"proofProbability: {proofProbability}, " +
$"expiry: {Time.FormatDuration(expiry)}, " +
$"duration: {Time.FormatDuration(duration)})");
var response = codexAccess.RequestStorage(request, contentId.Id);
var response = codexAccess.RequestStorage(request, purchase.ContentId.Id);
if (response == "Purchasing not available" ||
response == "Expiry required" ||
@ -64,24 +38,13 @@ namespace CodexPlugin
Log($"Storage requested successfully. PurchaseId: '{response}'.");
return new StoragePurchaseContract(log, codexAccess, response, duration);
return new StoragePurchaseContract(log, codexAccess, response, purchase);
}
public string MakeStorageAvailable(ByteSize totalSpace, TestToken minPriceForTotalSpace, TestToken maxCollateral, TimeSpan maxDuration)
public string MakeStorageAvailable(StorageAvailability availability)
{
var request = new CodexSalesAvailabilityRequest
{
size = ToDecInt(totalSpace.SizeInBytes),
duration = ToDecInt(maxDuration.TotalSeconds),
maxCollateral = ToDecInt(maxCollateral),
minPrice = ToDecInt(minPriceForTotalSpace)
};
Log($"Making storage available... (" +
$"size: {totalSpace}, " +
$"minPriceForTotalSpace: {minPriceForTotalSpace}, " +
$"maxCollateral: {maxCollateral}, " +
$"maxDuration: {Time.FormatDuration(maxDuration)})");
availability.Log(log);
var request = availability.ToApiRequest();
var response = codexAccess.SalesAvailability(request);
@ -90,18 +53,6 @@ namespace CodexPlugin
return response.id;
}
private string ToDecInt(double d)
{
var i = new BigInteger(d);
return i.ToString("D");
}
public string ToDecInt(TestToken t)
{
var i = new BigInteger(t.Amount);
return i.ToString("D");
}
private void Log(string msg)
{
log.Log($"{codexAccess.Container.Name} {msg}");
@ -110,22 +61,16 @@ namespace CodexPlugin
public class MarketplaceUnavailable : IMarketplaceAccess
{
public StoragePurchaseContract RequestStorage(ContentId contentId, TestToken pricePerBytePerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration)
{
Unavailable();
return null!;
}
public StoragePurchaseContract RequestStorage(ContentId contentId, TestToken pricePerSlotPerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration, TimeSpan expiry)
public string MakeStorageAvailable(StorageAvailability availability)
{
Unavailable();
throw new NotImplementedException();
}
public string MakeStorageAvailable(ByteSize size, TestToken minPricePerBytePerSecond, TestToken maxCollateral, TimeSpan duration)
public StoragePurchaseContract RequestStorage(StoragePurchase purchase)
{
Unavailable();
return string.Empty;
throw new NotImplementedException();
}
private void Unavailable()
@ -139,22 +84,26 @@ namespace CodexPlugin
{
private readonly ILog log;
private readonly CodexAccess codexAccess;
private readonly TimeSpan gracePeriod = TimeSpan.FromSeconds(10);
private DateTime? contractStartUtc;
public StoragePurchaseContract(ILog log, CodexAccess codexAccess, string purchaseId, TimeSpan contractDuration)
public StoragePurchaseContract(ILog log, CodexAccess codexAccess, string purchaseId, StoragePurchase purchase)
{
this.log = log;
this.codexAccess = codexAccess;
PurchaseId = purchaseId;
ContractDuration = contractDuration;
Purchase = purchase;
}
public string PurchaseId { get; }
public TimeSpan ContractDuration { get; }
public StoragePurchase Purchase { get; }
public void WaitForStorageContractStarted()
{
WaitForStorageContractStarted(TimeSpan.FromSeconds(30));
var timeout = Purchase.Expiry + gracePeriod;
WaitForStorageContractState(timeout, "started");
contractStartUtc = DateTime.UtcNow;
}
public void WaitForStorageContractFinished()
@ -163,39 +112,14 @@ namespace CodexPlugin
{
WaitForStorageContractStarted();
}
var gracePeriod = TimeSpan.FromSeconds(10);
var currentContractTime = DateTime.UtcNow - contractStartUtc!.Value;
var timeout = (ContractDuration - currentContractTime) + gracePeriod;
var timeout = (Purchase.Duration - currentContractTime) + gracePeriod;
WaitForStorageContractState(timeout, "finished");
}
public void WaitForStorageContractFinished(ByteSize contractFileSize)
public CodexStoragePurchase GetPurchaseStatus(string purchaseId)
{
if (!contractStartUtc.HasValue)
{
WaitForStorageContractStarted(contractFileSize.ToTimeSpan());
}
var gracePeriod = TimeSpan.FromSeconds(10);
var currentContractTime = DateTime.UtcNow - contractStartUtc!.Value;
var timeout = (ContractDuration - currentContractTime) + gracePeriod;
WaitForStorageContractState(timeout, "finished");
}
/// <summary>
/// Wait for contract to start. Max timeout depends on contract filesize. Allows more time for larger files.
/// </summary>
public void WaitForStorageContractStarted(ByteSize contractFileSize)
{
var filesizeInMb = contractFileSize.SizeInBytes / (1024 * 1024);
var maxWaitTime = TimeSpan.FromSeconds(filesizeInMb * 10.0);
WaitForStorageContractStarted(maxWaitTime);
}
public void WaitForStorageContractStarted(TimeSpan timeout)
{
WaitForStorageContractState(timeout, "started");
contractStartUtc = DateTime.UtcNow;
return codexAccess.GetPurchaseStatus(purchaseId);
}
private void WaitForStorageContractState(TimeSpan timeout, string desiredState)
@ -228,10 +152,5 @@ namespace CodexPlugin
}
log.Log($"Contract '{desiredState}'.");
}
public CodexStoragePurchase GetPurchaseStatus(string purchaseId)
{
return codexAccess.GetPurchaseStatus(purchaseId);
}
}
}

View File

@ -5,19 +5,19 @@ namespace CodexPlugin
{
public class MarketplaceInitialConfig
{
public MarketplaceInitialConfig(IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens, bool isValidator)
public MarketplaceInitialConfig(MarketplaceSetup marketplaceSetup, IGethNode gethNode, ICodexContracts codexContracts, Ether initialEth, TestToken initialTokens)
{
MarketplaceSetup = marketplaceSetup;
GethNode = gethNode;
CodexContracts = codexContracts;
InitialEth = initialEth;
InitialTokens = initialTokens;
IsValidator = isValidator;
}
public MarketplaceSetup MarketplaceSetup { get; }
public IGethNode GethNode { get; }
public ICodexContracts CodexContracts { get; }
public Ether InitialEth { get; }
public TestToken InitialTokens { get; }
public bool IsValidator { get; }
}
}

View File

@ -0,0 +1,101 @@
using CodexContractsPlugin;
using Logging;
using System.Numerics;
using Utils;
namespace CodexPlugin
{
public class StoragePurchase : MarketplaceType
{
public StoragePurchase(ContentId cid)
{
ContentId = cid;
}
public ContentId ContentId { get; set; }
public TestToken PricePerSlotPerSecond { get; set; } = 1.TestTokens();
public TestToken RequiredCollateral { get; set; } = 1.TestTokens();
public uint MinRequiredNumberOfNodes { get; set; }
public uint NodeFailureTolerance { get; set; }
public int ProofProbability { get; set; }
public TimeSpan Duration { get; set; }
public TimeSpan Expiry { get; set; }
public CodexSalesRequestStorageRequest ToApiRequest()
{
return new CodexSalesRequestStorageRequest
{
duration = ToDecInt(Duration.TotalSeconds),
proofProbability = ToDecInt(ProofProbability),
reward = ToDecInt(PricePerSlotPerSecond),
collateral = ToDecInt(RequiredCollateral),
expiry = ToDecInt(DateTimeOffset.UtcNow.ToUnixTimeSeconds() + Expiry.TotalSeconds),
nodes = MinRequiredNumberOfNodes,
tolerance = NodeFailureTolerance
};
}
public void Log(ILog log)
{
log.Log($"Requesting storage for: {ContentId.Id}... (" +
$"pricePerSlotPerSecond: {PricePerSlotPerSecond}, " +
$"requiredCollateral: {RequiredCollateral}, " +
$"minRequiredNumberOfNodes: {MinRequiredNumberOfNodes}, " +
$"nodeFailureTolerance: {NodeFailureTolerance}, " +
$"proofProbability: {ProofProbability}, " +
$"expiry: {Time.FormatDuration(Expiry)}, " +
$"duration: {Time.FormatDuration(Duration)})");
}
}
public class StorageAvailability : MarketplaceType
{
public StorageAvailability(ByteSize totalSpace, TimeSpan maxDuration, TestToken minPriceForTotalSpace, TestToken maxCollateral)
{
TotalSpace = totalSpace;
MaxDuration = maxDuration;
MinPriceForTotalSpace = minPriceForTotalSpace;
MaxCollateral = maxCollateral;
}
public ByteSize TotalSpace { get; }
public TimeSpan MaxDuration { get; }
public TestToken MinPriceForTotalSpace { get; }
public TestToken MaxCollateral { get; }
public CodexSalesAvailabilityRequest ToApiRequest()
{
return new CodexSalesAvailabilityRequest
{
size = ToDecInt(TotalSpace.SizeInBytes),
duration = ToDecInt(MaxDuration.TotalSeconds),
maxCollateral = ToDecInt(MaxCollateral),
minPrice = ToDecInt(MinPriceForTotalSpace)
};
}
public void Log(ILog log)
{
log.Log($"Making storage available... (" +
$"size: {TotalSpace}, " +
$"maxDuration: {Time.FormatDuration(MaxDuration)}, " +
$"minPriceForTotalSpace: {MinPriceForTotalSpace}, " +
$"maxCollateral: {MaxCollateral})");
}
}
public abstract class MarketplaceType
{
protected string ToDecInt(double d)
{
var i = new BigInteger(d);
return i.ToString("D");
}
protected string ToDecInt(TestToken t)
{
var i = new BigInteger(t.Amount);
return i.ToString("D");
}
}
}

View File

@ -12,7 +12,7 @@ Nethereum: v4.14.0
These are test assemblies that use NUnit3 to perform tests against transient Codex nodes.
Read more [HERE](/Tests/CodexTests/README.md)
## Tests/ContinousTests
## Tests/ContinuousTests
A console application that runs tests in an endless loop against a persistent deployment of Codex nodes.
Read more [HERE](/Tests/CodexContinuousTests/README.md)

View File

@ -4,7 +4,7 @@ public class Program
{
public static void Main(string[] args)
{
Console.WriteLine("Codex Continous-Test-Runner.");
Console.WriteLine("Codex Continuous-Test-Runner.");
var runner = new ContinuousTestRunner(args, Cancellation.Cts.Token);

View File

@ -13,7 +13,7 @@ Report for: 08-2023
(Stopped: The number of tests that can successfully run on the test-net is not high enough to justify the cost of leaving it running.)
## Deployment Configuration
Continous Test-net is deployed to the kubernetes cluster with the following configuration:
Continuous Test-net is deployed to the kubernetes cluster with the following configuration:
5x Codex Nodes:
- Log-level: Trace

View File

@ -11,7 +11,7 @@ Report for: 07-2023
(Faulted: Tests fail with such frequency that the information gathered does not justify the cost of leaving the test-net running.)
## Deployment Configuration
Continous Test-net is deployed to the kubernetes cluster with the following configuration:
Continuous Test-net is deployed to the kubernetes cluster with the following configuration:
5x Codex Nodes:
- Log-level: Trace

View File

@ -11,7 +11,7 @@ Report for: 09-2023
(Stopped: The number of tests that can successfully run on the test-net is not high enough to justify the cost of leaving it running.)
## Deployment Configuration
Continous Test-net is deployed to the kubernetes cluster with the following configuration:
Continuous Test-net is deployed to the kubernetes cluster with the following configuration:
5x Codex Nodes:
- Log-level: Trace

View File

@ -21,7 +21,9 @@ namespace CodexTests.BasicTests
var group = AddCodex(5, o => o
.EnableMetrics()
.EnableMarketplace(geth, contract, 10.Eth(), 100000.TestTokens(), isValidator: true)
.EnableMarketplace(geth, contract, 10.Eth(), 100000.TestTokens(), s => s
.AsStorageNode()
.AsValidator())
.WithBlockTTL(TimeSpan.FromMinutes(5))
.WithBlockMaintenanceInterval(TimeSpan.FromSeconds(10))
.WithBlockMaintenanceNumber(100)
@ -31,13 +33,16 @@ namespace CodexTests.BasicTests
var rc = Ci.DeployMetricsCollector(nodes);
var availability = new StorageAvailability(
totalSpace: 500.MB(),
maxDuration: TimeSpan.FromMinutes(5),
minPriceForTotalSpace: 500.TestTokens(),
maxCollateral: 1024.TestTokens()
);
foreach (var node in nodes)
{
node.Marketplace.MakeStorageAvailable(
size: 500.MB(),
minPriceForTotalSpace: 500.TestTokens(),
maxCollateral: 1024.TestTokens(),
maxDuration: TimeSpan.FromMinutes(5));
node.Marketplace.MakeStorageAvailable(availability);
}
var endTime = DateTime.UtcNow + TimeSpan.FromHours(10);

View File

@ -17,7 +17,7 @@ namespace CodexTests.BasicTests
public void BotRewardTest()
{
var sellerInitialBalance = 234.TestTokens();
var buyerInitialBalance = 1000.TestTokens();
var buyerInitialBalance = 100000.TestTokens();
var fileSize = 10.MB();
var geth = Ci.StartGethNode(s => s.IsMiner().WithName("disttest-geth"));
@ -27,15 +27,19 @@ namespace CodexTests.BasicTests
.WithName("Seller")
.WithLogLevel(CodexLogLevel.Trace, new CodexLogCustomTopics(CodexLogLevel.Error, CodexLogLevel.Error, CodexLogLevel.Warn))
.WithStorageQuota(11.GB())
.EnableMarketplace(geth, contracts, initialEth: 10.Eth(), initialTokens: sellerInitialBalance, isValidator: true)
.WithSimulateProofFailures(failEveryNProofs: 3));
.EnableMarketplace(geth, contracts, initialEth: 10.Eth(), initialTokens: sellerInitialBalance, s => s
.AsStorageNode()
.AsValidator()));
AssertBalance(contracts, seller, Is.EqualTo(sellerInitialBalance));
seller.Marketplace.MakeStorageAvailable(
size: 10.GB(),
var availability = new StorageAvailability(
totalSpace: 10.GB(),
maxDuration: TimeSpan.FromMinutes(30),
minPriceForTotalSpace: 1.TestTokens(),
maxCollateral: 20.TestTokens(),
maxDuration: TimeSpan.FromMinutes(3));
maxCollateral: 20.TestTokens()
);
seller.Marketplace.MakeStorageAvailable(availability);
var testFile = GenerateTestFile(fileSize);
@ -83,20 +87,27 @@ namespace CodexTests.BasicTests
var i = 0;
var contentId = buyer.UploadFile(testFile);
var purchaseContract = buyer.Marketplace.RequestStorage(contentId,
pricePerSlotPerSecond: 2.TestTokens(),
requiredCollateral: 10.TestTokens(),
minRequiredNumberOfNodes: 1,
proofProbability: 5,
duration: TimeSpan.FromMinutes(1));
purchaseContract.WaitForStorageContractStarted(fileSize);
var purchase = new StoragePurchase(contentId)
{
PricePerSlotPerSecond = 2.TestTokens(),
RequiredCollateral = 10.TestTokens(),
MinRequiredNumberOfNodes = 5,
NodeFailureTolerance = 2,
ProofProbability = 5,
Duration = TimeSpan.FromMinutes(5),
Expiry = TimeSpan.FromMinutes(4)
};
var purchaseContract = buyer.Marketplace.RequestStorage(purchase);
purchaseContract.WaitForStorageContractStarted();
AssertBalance(contracts, seller, Is.LessThan(sellerInitialBalance), "Collateral was not placed.");
var request = GetOnChainStorageRequest(contracts);
AssertStorageRequest(request, contracts, buyer);
AssertSlotFilledEvents(contracts, request, seller);
AssertStorageRequest(request, purchase, contracts, buyer);
AssertSlotFilledEvents(contracts, purchase, request, seller);
AssertContractSlot(contracts, request, 0, seller);
purchaseContract.WaitForStorageContractFinished();
@ -104,8 +115,6 @@ namespace CodexTests.BasicTests
AssertBalance(contracts, seller, Is.GreaterThan(sellerInitialBalance), "Seller was not paid for storage.");
AssertBalance(contracts, buyer, Is.LessThan(buyerInitialBalance), "Buyer was not charged for storage.");
Assert.That(contracts.GetRequestState(request), Is.EqualTo(RequestState.Finished));
// waiting for block retransmit fix: CheckLogForErrors(seller, buyer);
}
[Test]
@ -125,24 +134,29 @@ namespace CodexTests.BasicTests
Assert.That(discN, Is.LessThan(bootN));
}
private void AssertSlotFilledEvents(ICodexContracts contracts, Request request, ICodexNode seller)
private void AssertSlotFilledEvents(ICodexContracts contracts, StoragePurchase purchase, Request request, ICodexNode seller)
{
// Expect 1 fulfilled event for the purchase.
var requestFulfilledEvents = contracts.GetRequestFulfilledEvents(GetTestRunTimeRange());
Assert.That(requestFulfilledEvents.Length, Is.EqualTo(1));
CollectionAssert.AreEqual(request.RequestId, requestFulfilledEvents[0].RequestId);
// Expect 1 filled-slot event for each slot in the purchase.
var filledSlotEvents = contracts.GetSlotFilledEvents(GetTestRunTimeRange());
Assert.That(filledSlotEvents.Length, Is.EqualTo(1));
var filledSlotEvent = filledSlotEvents.Single();
Assert.That(filledSlotEvent.SlotIndex.IsZero);
Assert.That(filledSlotEvent.RequestId.ToHex(), Is.EqualTo(request.RequestId.ToHex()));
Assert.That(filledSlotEvent.Host, Is.EqualTo(seller.EthAddress));
Assert.That(filledSlotEvents.Length, Is.EqualTo(purchase.MinRequiredNumberOfNodes));
for (var i = 0; i < purchase.MinRequiredNumberOfNodes; i++)
{
var filledSlotEvent = filledSlotEvents.Single(e => e.SlotIndex == i);
Assert.That(filledSlotEvent.RequestId.ToHex(), Is.EqualTo(request.RequestId.ToHex()));
Assert.That(filledSlotEvent.Host, Is.EqualTo(seller.EthAddress));
}
}
private void AssertStorageRequest(Request request, ICodexContracts contracts, ICodexNode buyer)
private void AssertStorageRequest(Request request, StoragePurchase purchase, ICodexContracts contracts, ICodexNode buyer)
{
Assert.That(contracts.GetRequestState(request), Is.EqualTo(RequestState.Started));
Assert.That(request.ClientAddress, Is.EqualTo(buyer.EthAddress));
Assert.That(request.Ask.Slots, Is.EqualTo(1));
Assert.That(request.Ask.Slots, Is.EqualTo(purchase.MinRequiredNumberOfNodes));
}
private Request GetOnChainStorageRequest(ICodexContracts contracts)

View File

@ -21,7 +21,7 @@ namespace CodexTests.BasicTests
{
var primary = Ci.StartCodexNode();
primary.Stop();
primary.Stop(waitTillStopped: true);
primary = Ci.StartCodexNode();

View File

@ -10,12 +10,10 @@ namespace CodexTests.BasicTests
[Test]
public void TwoClientTest()
{
var group = Ci.StartCodexNodes(2);
var uploader = AddCodex(s => s.WithName("Uploader"));
var downloader = AddCodex(s => s.WithName("Downloader").WithBootstrapNode(uploader));
var primary = group[0];
var secondary = group[1];
PerformTwoClientTest(primary, secondary);
PerformTwoClientTest(uploader, downloader);
}
[Test]
@ -28,29 +26,27 @@ namespace CodexTests.BasicTests
return;
}
var primary = Ci.StartCodexNode(s => s.At(locations.Get(0)));
var secondary = Ci.StartCodexNode(s => s.At(locations.Get(1)));
var uploader = Ci.StartCodexNode(s => s.WithName("Uploader").At(locations.Get(0)));
var downloader = Ci.StartCodexNode(s => s.WithName("Downloader").WithBootstrapNode(uploader).At(locations.Get(1)));
PerformTwoClientTest(primary, secondary);
PerformTwoClientTest(uploader, downloader);
}
private void PerformTwoClientTest(ICodexNode primary, ICodexNode secondary)
private void PerformTwoClientTest(ICodexNode uploader, ICodexNode downloader)
{
PerformTwoClientTest(primary, secondary, 1.MB());
PerformTwoClientTest(uploader, downloader, 10.MB());
}
private void PerformTwoClientTest(ICodexNode primary, ICodexNode secondary, ByteSize size)
private void PerformTwoClientTest(ICodexNode uploader, ICodexNode downloader, ByteSize size)
{
primary.ConnectToPeer(secondary);
var testFile = GenerateTestFile(size);
var contentId = primary.UploadFile(testFile);
var contentId = uploader.UploadFile(testFile);
var downloadedFile = secondary.DownloadContent(contentId);
var downloadedFile = downloader.DownloadContent(contentId);
testFile.AssertIsEqual(downloadedFile);
CheckLogForErrors(primary, secondary);
CheckLogForErrors(uploader, downloader);
}
}
}

View File

@ -92,6 +92,7 @@ namespace CodexTests
public void CheckLogForErrors(ICodexNode node)
{
Log($"Checking {node.GetName()} log for errors.");
var log = Ci.DownloadLog(node);
log.AssertLogDoesNotContain("Block validation failed");

View File

@ -7,7 +7,7 @@ namespace DistTestCore
{
public static void AssertLogContains(this IDownloadedLog log, string expectedString)
{
Assert.That(log.DoesLogContain(expectedString), $"Did not find '{expectedString}' in log.");
Assert.That(log.GetLinesContaining(expectedString).Any(), $"Did not find '{expectedString}' in log.");
}
public static void AssertLogDoesNotContain(this IDownloadedLog log, params string[] unexpectedStrings)
@ -15,9 +15,10 @@ namespace DistTestCore
var errors = new List<string>();
foreach (var str in unexpectedStrings)
{
if (log.DoesLogContain(str))
var lines = log.GetLinesContaining(str);
foreach (var line in lines)
{
errors.Add($"Did find '{str}' in log.");
errors.Add($"Found '{str}' in line '{line}'.");
}
}
CollectionAssert.IsEmpty(errors);

View File

@ -7,12 +7,14 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Moq" Version="4.20.70" />
<PackageReference Include="nunit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.4.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Framework\NethereumWorkflow\NethereumWorkflow.csproj" />
<ProjectReference Include="..\..\Framework\Utils\Utils.csproj" />
</ItemGroup>

View File

@ -0,0 +1,135 @@
using Logging;
using Moq;
using NethereumWorkflow;
using NethereumWorkflow.BlockUtils;
using NUnit.Framework;
namespace FrameworkTests.NethereumWorkflow
{
[TestFixture]
public class BlockTimeFinderTests
{
private readonly Mock<ILog> log = new Mock<ILog>();
private Mock<IWeb3Blocks> web3 = new Mock<IWeb3Blocks>();
private Dictionary<ulong, Block> blocks = new Dictionary<ulong, Block>();
private BlockTimeFinder finder = null!;
private void SetupBlockchain()
{
var start = DateTime.UtcNow.AddDays(-1).AddSeconds(-30);
blocks = new Dictionary<ulong, Block>();
for (ulong i = 0; i < 30; i++)
{
ulong d = 100 + i;
blocks.Add(d, new Block(d, start + TimeSpan.FromSeconds(i * 2)));
}
}
[SetUp]
public void SetUp()
{
SetupBlockchain();
web3 = new Mock<IWeb3Blocks>();
web3.Setup(w => w.GetCurrentBlockNumber()).Returns(blocks.Keys.Max());
web3.Setup(w => w.GetTimestampForBlock(It.IsAny<ulong>())).Returns<ulong>(d =>
{
if (blocks.ContainsKey(d)) return blocks[d].Time;
return null;
});
finder = new BlockTimeFinder(new BlockCache(), web3.Object, log.Object);
}
[Test]
public void FindsMiddleOfChain()
{
var b1 = blocks[115];
var b2 = blocks[116];
var momentBetween = b1.JustAfter;
var b1Number = finder.GetHighestBlockNumberBefore(momentBetween);
var b2Number = finder.GetLowestBlockNumberAfter(momentBetween);
Assert.That(b1Number, Is.EqualTo(b1.Number));
Assert.That(b2Number, Is.EqualTo(b2.Number));
}
[Test]
public void FindsFrontOfChain_Lowest()
{
var first = blocks.First().Value;
var firstNumber = finder.GetLowestBlockNumberAfter(first.JustBefore);
Assert.That(firstNumber, Is.EqualTo(first.Number));
}
[Test]
public void FindsFrontOfChain_Highest()
{
var first = blocks.First().Value;
var firstNumber = finder.GetHighestBlockNumberBefore(first.JustAfter);
Assert.That(firstNumber, Is.EqualTo(first.Number));
}
[Test]
public void FindsTailOfChain_Lowest()
{
var last = blocks.Last().Value;
var lastNumber = finder.GetLowestBlockNumberAfter(last.JustBefore);
Assert.That(lastNumber, Is.EqualTo(last.Number));
}
[Test]
public void FindsTailOfChain_Highest()
{
var last = blocks.Last().Value;
var lastNumber = finder.GetHighestBlockNumberBefore(last.JustAfter);
Assert.That(lastNumber, Is.EqualTo(last.Number));
}
[Test]
public void FailsToFindBlockBeforeFrontOfChain()
{
var first = blocks.First().Value;
var notFound = finder.GetHighestBlockNumberBefore(first.Time);
Assert.That(notFound, Is.Null);
}
[Test]
public void FailsToFindBlockAfterTailOfChain()
{
var last = blocks.Last().Value;
var notFound = finder.GetLowestBlockNumberAfter(last.Time);
Assert.That(notFound, Is.Null);
}
}
public class Block
{
public Block(ulong number, DateTime time)
{
Number = number;
Time = time;
}
public ulong Number { get; }
public DateTime Time { get; }
public DateTime JustBefore { get { return Time.AddSeconds(-1); } }
public DateTime JustAfter { get { return Time.AddSeconds(1); } }
}
}

View File

@ -41,7 +41,11 @@ namespace CodexNetDeployer
if (config.ShouldMakeStorageAvailable)
{
s.EnableMarketplace(gethNode, contracts, 100.Eth(), config.InitialTestTokens.TestTokens(), validatorsLeft > 0);
s.EnableMarketplace(gethNode, contracts, 100.Eth(), config.InitialTestTokens.TestTokens(), s =>
{
if (validatorsLeft > 0) s.AsValidator();
if (config.ShouldMakeStorageAvailable) s.AsStorageNode();
});
}
if (bootstrapNode != null) s.WithBootstrapNode(bootstrapNode);
@ -63,11 +67,14 @@ namespace CodexNetDeployer
if (config.ShouldMakeStorageAvailable)
{
var response = codexNode.Marketplace.MakeStorageAvailable(
size: config.StorageSell!.Value.MB(),
var availability = new StorageAvailability(
totalSpace: config.StorageSell!.Value.MB(),
maxDuration: TimeSpan.FromSeconds(config.MaxDuration),
minPriceForTotalSpace: config.MinPrice.TestTokens(),
maxCollateral: config.MaxCollateral.TestTokens(),
maxDuration: TimeSpan.FromSeconds(config.MaxDuration));
maxCollateral: config.MaxCollateral.TestTokens()
);
var response = codexNode.Marketplace.MakeStorageAvailable(availability);
if (!string.IsNullOrEmpty(response))
{