Merge branch 'feature/marketplace-contracts'

This commit is contained in:
benbierens 2023-04-19 12:58:43 +02:00
commit baf605464c
No known key found for this signature in database
GPG Key ID: FE44815D96D0A1AA
100 changed files with 3706 additions and 1719 deletions

View File

@ -1,17 +0,0 @@
namespace CodexDistTestCore
{
public class CodexDebugResponse
{
public string id { get; set; } = string.Empty;
public string[] addrs { get; set; } = new string[0];
public string repo { get; set; } = string.Empty;
public string spr { get; set; } = string.Empty;
public CodexDebugVersionResponse codex { get; set; } = new();
}
public class CodexDebugVersionResponse
{
public string version { get; set; } = string.Empty;
public string revision { get; set; } = string.Empty;
}
}

View File

@ -1,17 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<RootNamespace>CodexDistTestCore</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="KubernetesClient" Version="10.1.4" />
<PackageReference Include="nunit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.4.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.5.0" />
</ItemGroup>
</Project>

View File

@ -1,84 +0,0 @@
namespace CodexDistTestCore
{
public class CodexNodeContainer
{
public CodexNodeContainer(string name, int servicePort, string servicePortName, int apiPort, string containerPortName, int discoveryPort, int listenPort, string dataDir, int metricsPort)
{
Name = name;
ServicePort = servicePort;
ServicePortName = servicePortName;
ApiPort = apiPort;
ContainerPortName = containerPortName;
DiscoveryPort = discoveryPort;
ListenPort = listenPort;
DataDir = dataDir;
MetricsPort = metricsPort;
}
public string Name { get; }
public int ServicePort { get; }
public string ServicePortName { get; }
public int ApiPort { get; }
public string ContainerPortName { get; }
public int DiscoveryPort { get; }
public int ListenPort { get; }
public string DataDir { get; }
public int MetricsPort { get; }
}
public class CodexGroupNumberSource
{
private readonly NumberSource codexNodeGroupNumberSource = new NumberSource(0);
private readonly NumberSource groupContainerNameSource = new NumberSource(1);
private readonly NumberSource servicePortSource = new NumberSource(30001);
public int GetNextCodexNodeGroupNumber()
{
return codexNodeGroupNumberSource.GetNextNumber();
}
public string GetNextServicePortName()
{
return $"node{groupContainerNameSource.GetNextNumber()}";
}
public int GetNextServicePort()
{
return servicePortSource.GetNextNumber();
}
}
public class CodexNodeContainerFactory
{
private readonly NumberSource containerNameSource = new NumberSource(1);
private readonly NumberSource codexPortSource = new NumberSource(8080);
private readonly CodexGroupNumberSource groupContainerFactory;
public CodexNodeContainerFactory(CodexGroupNumberSource groupContainerFactory)
{
this.groupContainerFactory = groupContainerFactory;
}
public CodexNodeContainer CreateNext(OfflineCodexNodes offline)
{
var n = containerNameSource.GetNextNumber();
return new CodexNodeContainer(
name: $"codex-node{n}",
servicePort: groupContainerFactory.GetNextServicePort(),
servicePortName: groupContainerFactory.GetNextServicePortName(),
apiPort: codexPortSource.GetNextNumber(),
containerPortName: $"api-{n}",
discoveryPort: codexPortSource.GetNextNumber(),
listenPort: codexPortSource.GetNextNumber(),
dataDir: $"datadir{n}",
metricsPort: GetMetricsPort(offline)
);
}
private int GetMetricsPort(OfflineCodexNodes offline)
{
if (offline.MetricsEnabled) return codexPortSource.GetNextNumber();
return 0;
}
}
}

View File

@ -1,111 +0,0 @@
using CodexDistTestCore.Config;
using k8s.Models;
using System.Collections;
namespace CodexDistTestCore
{
public interface ICodexNodeGroup : IEnumerable<IOnlineCodexNode>
{
IOfflineCodexNodes BringOffline();
IOnlineCodexNode this[int index] { get; }
}
public class CodexNodeGroup : ICodexNodeGroup
{
private readonly TestLog log;
private readonly IK8sManager k8SManager;
public CodexNodeGroup(TestLog log, int orderNumber, OfflineCodexNodes origin, IK8sManager k8SManager, OnlineCodexNode[] nodes)
{
this.log = log;
OrderNumber = orderNumber;
Origin = origin;
this.k8SManager = k8SManager;
Nodes = nodes;
foreach (var n in nodes) n.Group = this;
}
public IOnlineCodexNode this[int index]
{
get
{
return Nodes[index];
}
}
public IOfflineCodexNodes BringOffline()
{
return k8SManager.BringOffline(this);
}
public int OrderNumber { get; }
public OfflineCodexNodes Origin { get; }
public OnlineCodexNode[] Nodes { get; }
public V1Deployment? Deployment { get; set; }
public V1Service? Service { get; set; }
public PodInfo? PodInfo { get; set; }
public CodexNodeContainer[] GetContainers()
{
return Nodes.Select(n => n.Container).ToArray();
}
public IEnumerator<IOnlineCodexNode> GetEnumerator()
{
return Nodes.Cast<IOnlineCodexNode>().GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return Nodes.GetEnumerator();
}
public V1ObjectMeta GetServiceMetadata()
{
return new V1ObjectMeta
{
Name = "codex-test-entrypoint-" + OrderNumber,
NamespaceProperty = K8sCluster.K8sNamespace
};
}
public V1ObjectMeta GetDeploymentMetadata()
{
return new V1ObjectMeta
{
Name = "codex-test-node-" + OrderNumber,
NamespaceProperty = K8sCluster.K8sNamespace
};
}
public CodexNodeLog DownloadLog(IOnlineCodexNode node)
{
var logDownloader = new PodLogDownloader(log, k8SManager);
var n = (OnlineCodexNode)node;
return logDownloader.DownloadLog(n);
}
public Dictionary<string, string> GetSelector()
{
return new Dictionary<string, string> { { "codex-test-node", "dist-test-" + OrderNumber } };
}
public string Describe()
{
return $"CodexNodeGroup#{OrderNumber}-{Origin.Describe()}";
}
}
public class PodInfo
{
public PodInfo(string name, string ip)
{
Name = name;
Ip = ip;
}
public string Name { get; }
public string Ip { get; }
}
}

View File

@ -1,65 +0,0 @@
using k8s.Models;
namespace CodexDistTestCore.Config
{
public class CodexDockerImage
{
public string GetImageTag()
{
return "thatbenbierens/nim-codex:sha-b204837";
}
public string GetExpectedImageRevision()
{
return "b20483";
}
public List<V1EnvVar> CreateEnvironmentVariables(OfflineCodexNodes node, CodexNodeContainer environment)
{
var formatter = new EnvFormatter();
formatter.Create(node, environment);
return formatter.Result;
}
private class EnvFormatter
{
public List<V1EnvVar> Result { get; } = new List<V1EnvVar>();
public void Create(OfflineCodexNodes node, CodexNodeContainer container)
{
AddVar("API_PORT", container.ApiPort.ToString());
AddVar("DATA_DIR", container.DataDir);
AddVar("DISC_PORT", container.DiscoveryPort.ToString());
AddVar("LISTEN_ADDRS", $"/ip4/0.0.0.0/tcp/{container.ListenPort}");
if (node.BootstrapNode != null)
{
var debugInfo = node.BootstrapNode.GetDebugInfo();
AddVar("BOOTSTRAP_SPR", debugInfo.spr);
}
if (node.LogLevel != null)
{
AddVar("LOG_LEVEL", node.LogLevel.ToString()!.ToUpperInvariant());
}
if (node.StorageQuota != null)
{
AddVar("STORAGE_QUOTA", node.StorageQuota.SizeInBytes.ToString()!);
}
if (node.MetricsEnabled)
{
AddVar("METRICS_ADDR", "0.0.0.0");
AddVar("METRICS_PORT", container.MetricsPort.ToString());
}
}
private void AddVar(string key, string value)
{
Result.Add(new V1EnvVar
{
Name = key,
Value = value
});
}
}
}
}

View File

@ -1,7 +0,0 @@
namespace CodexDistTestCore.Config
{
public class FileManagerConfig
{
public const string Folder = "TestDataFiles";
}
}

View File

@ -1,40 +0,0 @@
using k8s;
namespace CodexDistTestCore.Config
{
public class K8sCluster
{
public const string K8sNamespace = "codex-test-namespace";
private const string KubeConfigFile = "C:\\kube\\config";
private readonly Dictionary<Location, string> K8sNodeLocationMap = new Dictionary<Location, string>
{
{ Location.BensLaptop, "worker01" },
{ Location.BensOldGamingMachine, "worker02" },
};
private KubernetesClientConfiguration? config;
public KubernetesClientConfiguration GetK8sClientConfig()
{
if (config != null) return config;
//config = KubernetesClientConfiguration.BuildConfigFromConfigFile(KubeConfigFile);
config = KubernetesClientConfiguration.BuildDefaultConfig();
return config;
}
public string GetIp()
{
var c = GetK8sClientConfig();
var host = c.Host.Replace("https://", "");
return host.Substring(0, host.IndexOf(':'));
}
public string GetNodeLabelForLocation(Location location)
{
if (location == Location.Unspecified) return string.Empty;
return K8sNodeLocationMap[location];
}
}
}

View File

@ -1,7 +0,0 @@
namespace CodexDistTestCore.Config
{
public class LogConfig
{
public const string LogRoot = "D:/CodexTestLogs";
}
}

View File

@ -1,120 +0,0 @@
using CodexDistTestCore.Config;
using NUnit.Framework;
namespace CodexDistTestCore
{
[SetUpFixture]
public abstract class DistTest
{
private TestLog log = null!;
private FileManager fileManager = null!;
private K8sManager k8sManager = null!;
[OneTimeSetUp]
public void GlobalSetup()
{
// Previous test run may have been interrupted.
// Begin by cleaning everything up.
log = new TestLog();
fileManager = new FileManager(log);
k8sManager = new K8sManager(log, fileManager);
try
{
k8sManager.DeleteAllResources();
fileManager.DeleteAllTestFiles();
}
catch (Exception ex)
{
GlobalTestFailure.HasFailed = true;
log.Error($"Global setup cleanup failed with: {ex}");
throw;
}
log.Log("Global setup cleanup successful");
}
[SetUp]
public void SetUpDistTest()
{
if (GlobalTestFailure.HasFailed)
{
Assert.Inconclusive("Skip test: Previous test failed during clean up.");
}
else
{
var dockerImage = new CodexDockerImage();
log = new TestLog();
log.Log($"Using docker image '{dockerImage.GetImageTag()}'");
fileManager = new FileManager(log);
k8sManager = new K8sManager(log, fileManager);
}
}
[TearDown]
public void TearDownDistTest()
{
try
{
log.EndTest();
IncludeLogsAndMetricsOnTestFailure();
k8sManager.DeleteAllResources();
fileManager.DeleteAllTestFiles();
}
catch (Exception ex)
{
log.Error("Cleanup failed: " + ex.Message);
GlobalTestFailure.HasFailed = true;
}
}
public TestFile GenerateTestFile(ByteSize size)
{
return fileManager.GenerateTestFile(size);
}
public IOfflineCodexNodes SetupCodexNodes(int numberOfNodes)
{
return new OfflineCodexNodes(k8sManager, numberOfNodes);
}
private void IncludeLogsAndMetricsOnTestFailure()
{
var result = TestContext.CurrentContext.Result;
if (result.Outcome.Status == NUnit.Framework.Interfaces.TestStatus.Failed)
{
if (IsDownloadingLogsAndMetricsEnabled())
{
log.Log("Downloading all CodexNode logs and metrics because of test failure...");
k8sManager.ForEachOnlineGroup(DownloadLogs);
k8sManager.DownloadAllMetrics();
}
else
{
log.Log("Skipping download of all CodexNode logs and metrics due to [DontDownloadLogsAndMetricsOnFailure] attribute.");
}
}
}
private void DownloadLogs(CodexNodeGroup group)
{
foreach (var node in group)
{
var downloader = new PodLogDownloader(log, k8sManager);
var n = (OnlineCodexNode)node;
downloader.DownloadLog(n);
}
}
private bool IsDownloadingLogsAndMetricsEnabled()
{
var testProperties = TestContext.CurrentContext.Test.Properties;
return !testProperties.ContainsKey(PodLogDownloader.DontDownloadLogsOnFailureKey);
}
}
public static class GlobalTestFailure
{
public static bool HasFailed { get; set; } = false;
}
}

View File

@ -1,120 +0,0 @@
namespace CodexDistTestCore
{
public interface IK8sManager
{
ICodexNodeGroup BringOnline(OfflineCodexNodes node);
IOfflineCodexNodes BringOffline(ICodexNodeGroup node);
void FetchPodLog(OnlineCodexNode node, IPodLogHandler logHandler);
}
public class K8sManager : IK8sManager
{
private readonly CodexGroupNumberSource codexGroupNumberSource = new CodexGroupNumberSource();
private readonly List<CodexNodeGroup> onlineCodexNodeGroups = new List<CodexNodeGroup>();
private readonly KnownK8sPods knownPods = new KnownK8sPods();
private readonly TestLog log;
private readonly IFileManager fileManager;
private readonly MetricsAggregator metricsAggregator;
public K8sManager(TestLog log, IFileManager fileManager)
{
this.log = log;
this.fileManager = fileManager;
metricsAggregator = new MetricsAggregator(log, this);
}
public ICodexNodeGroup BringOnline(OfflineCodexNodes offline)
{
var online = CreateOnlineCodexNodes(offline);
K8s(k => k.BringOnline(online, offline));
log.Log($"{online.Describe()} online.");
if (offline.MetricsEnabled)
{
BringOnlineMetrics(online);
}
return online;
}
public IOfflineCodexNodes BringOffline(ICodexNodeGroup node)
{
var online = GetAndRemoveActiveNodeFor(node);
K8s(k => k.BringOffline(online));
log.Log($"{online.Describe()} offline.");
return online.Origin;
}
public void DeleteAllResources()
{
K8s(k => k.DeleteAllResources());
}
public void ForEachOnlineGroup(Action<CodexNodeGroup> action)
{
foreach (var group in onlineCodexNodeGroups) action(group);
}
public void FetchPodLog(OnlineCodexNode node, IPodLogHandler logHandler)
{
K8s(k => k.FetchPodLog(node, logHandler));
}
public PrometheusInfo BringOnlinePrometheus(string config, int prometheusNumber)
{
var spec = new K8sPrometheusSpecs(codexGroupNumberSource.GetNextServicePort(), prometheusNumber, config);
PrometheusInfo? info = null;
K8s(k => info = k.BringOnlinePrometheus(spec));
return info!;
}
public void DownloadAllMetrics()
{
metricsAggregator.DownloadAllMetrics();
}
private void BringOnlineMetrics(CodexNodeGroup group)
{
var onlineNodes = group.Nodes.Cast<OnlineCodexNode>().ToArray();
metricsAggregator.BeginCollectingMetricsFor(onlineNodes);
}
private CodexNodeGroup CreateOnlineCodexNodes(OfflineCodexNodes offline)
{
var containers = CreateContainers(offline);
var online = containers.Select(c => new OnlineCodexNode(log, fileManager, c)).ToArray();
var result = new CodexNodeGroup(log, codexGroupNumberSource.GetNextCodexNodeGroupNumber(), offline, this, online);
onlineCodexNodeGroups.Add(result);
return result;
}
private CodexNodeContainer[] CreateContainers(OfflineCodexNodes offline)
{
var factory = new CodexNodeContainerFactory(codexGroupNumberSource);
var containers = new List<CodexNodeContainer>();
for (var i = 0; i < offline.NumberOfNodes; i++) containers.Add(factory.CreateNext(offline));
return containers.ToArray();
}
private CodexNodeGroup GetAndRemoveActiveNodeFor(ICodexNodeGroup node)
{
var n = (CodexNodeGroup)node;
onlineCodexNodeGroups.Remove(n);
return n;
}
private void K8s(Action<K8sOperations> action)
{
var k8s = new K8sOperations(knownPods);
action(k8s);
k8s.Close();
}
}
}

View File

@ -1,324 +0,0 @@
using CodexDistTestCore.Config;
using k8s;
using k8s.KubeConfigModels;
using k8s.Models;
using NUnit.Framework;
namespace CodexDistTestCore
{
public class K8sOperations
{
private readonly CodexDockerImage dockerImage = new CodexDockerImage();
private readonly K8sCluster k8sCluster = new K8sCluster();
private readonly Kubernetes client;
private readonly KnownK8sPods knownPods;
public K8sOperations(KnownK8sPods knownPods)
{
this.knownPods = knownPods;
client = new Kubernetes(k8sCluster.GetK8sClientConfig());
}
public void Close()
{
client.Dispose();
}
public void BringOnline(CodexNodeGroup online, OfflineCodexNodes offline)
{
EnsureTestNamespace();
CreateDeployment(online, offline);
CreateService(online);
WaitUntilOnline(online);
FetchPodInfo(online);
}
public void BringOffline(CodexNodeGroup online)
{
var deploymentName = online.Deployment.Name();
DeleteDeployment(online);
DeleteService(online);
WaitUntilOffline(deploymentName);
}
public void DeleteAllResources()
{
DeleteNamespace();
WaitUntilZeroPods();
WaitUntilNamespaceDeleted();
}
public void FetchPodLog(OnlineCodexNode node, IPodLogHandler logHandler)
{
var stream = client.ReadNamespacedPodLog(node.Group.PodInfo!.Name, K8sNamespace, node.Container.Name);
logHandler.Log(stream);
}
public PrometheusInfo BringOnlinePrometheus(K8sPrometheusSpecs spec)
{
EnsureTestNamespace();
CreatePrometheusDeployment(spec);
CreatePrometheusService(spec);
WaitUntilPrometheusOnline(spec);
return new PrometheusInfo(spec.ServicePort, FetchNewPod());
}
private void FetchPodInfo(CodexNodeGroup online)
{
online.PodInfo = FetchNewPod();
}
private PodInfo FetchNewPod()
{
var pods = client.ListNamespacedPod(K8sNamespace).Items;
var newPods = pods.Where(p => !knownPods.Contains(p.Name())).ToArray();
Assert.That(newPods.Length, Is.EqualTo(1), "Expected only 1 pod to be created. Test infra failure.");
var newPod = newPods.Single();
var info = new PodInfo(newPod.Name(), newPod.Status.PodIP);
Assert.That(!string.IsNullOrEmpty(info.Name), "Invalid pod name received. Test infra failure.");
Assert.That(!string.IsNullOrEmpty(info.Ip), "Invalid pod IP received. Test infra failure.");
knownPods.Add(newPod.Name());
return info;
}
#region Waiting
private void WaitUntilOnline(CodexNodeGroup online)
{
WaitUntil(() =>
{
online.Deployment = client.ReadNamespacedDeployment(online.Deployment.Name(), K8sNamespace);
return online.Deployment?.Status.AvailableReplicas != null && online.Deployment.Status.AvailableReplicas > 0;
});
}
private void WaitUntilOffline(string deploymentName)
{
WaitUntil(() =>
{
var deployment = client.ReadNamespacedDeployment(deploymentName, K8sNamespace);
return deployment == null || deployment.Status.AvailableReplicas == 0;
});
}
private void WaitUntilZeroPods()
{
WaitUntil(() => !client.ListNamespacedPod(K8sNamespace).Items.Any());
}
private void WaitUntilNamespaceDeleted()
{
WaitUntil(() => !IsTestNamespaceOnline());
}
private void WaitUntilPrometheusOnline(K8sPrometheusSpecs spec)
{
var deploymentName = spec.GetDeploymentName();
WaitUntil(() =>
{
var deployment = client.ReadNamespacedDeployment(deploymentName, K8sNamespace);
return deployment?.Status.AvailableReplicas != null && deployment.Status.AvailableReplicas > 0;
});
}
private void WaitUntil(Func<bool> predicate)
{
var start = DateTime.UtcNow;
var state = predicate();
while (!state)
{
if (DateTime.UtcNow - start > Timing.K8sOperationTimeout())
{
Assert.Fail("K8s operation timed out.");
throw new TimeoutException();
}
Timing.WaitForK8sServiceDelay();
state = predicate();
}
}
#endregion
#region Service management
private void CreateService(CodexNodeGroup online)
{
var serviceSpec = new V1Service
{
ApiVersion = "v1",
Metadata = online.GetServiceMetadata(),
Spec = new V1ServiceSpec
{
Type = "NodePort",
Selector = online.GetSelector(),
Ports = CreateServicePorts(online)
}
};
online.Service = client.CreateNamespacedService(serviceSpec, K8sNamespace);
}
private List<V1ServicePort> CreateServicePorts(CodexNodeGroup online)
{
var result = new List<V1ServicePort>();
var containers = online.GetContainers();
foreach (var container in containers)
{
result.Add(new V1ServicePort
{
Name = container.ServicePortName,
Protocol = "TCP",
Port = container.ApiPort,
TargetPort = container.ContainerPortName,
NodePort = container.ServicePort
});
}
return result;
}
private void DeleteService(CodexNodeGroup online)
{
if (online.Service == null) return;
client.DeleteNamespacedService(online.Service.Name(), K8sNamespace);
online.Service = null;
}
private void CreatePrometheusService(K8sPrometheusSpecs spec)
{
client.CreateNamespacedService(spec.CreatePrometheusService(), K8sNamespace);
}
#endregion
#region Deployment management
private void CreateDeployment(CodexNodeGroup online, OfflineCodexNodes offline)
{
var deploymentSpec = new V1Deployment
{
ApiVersion = "apps/v1",
Metadata = online.GetDeploymentMetadata(),
Spec = new V1DeploymentSpec
{
Replicas = 1,
Selector = new V1LabelSelector
{
MatchLabels = online.GetSelector()
},
Template = new V1PodTemplateSpec
{
Metadata = new V1ObjectMeta
{
Labels = online.GetSelector()
},
Spec = new V1PodSpec
{
NodeSelector = CreateNodeSelector(offline),
Containers = CreateDeploymentContainers(online, offline)
}
}
}
};
online.Deployment = client.CreateNamespacedDeployment(deploymentSpec, K8sNamespace);
}
private IDictionary<string, string> CreateNodeSelector(OfflineCodexNodes offline)
{
if (offline.Location == Location.Unspecified) return new Dictionary<string, string>();
return new Dictionary<string, string>
{
{ "codex-test-location", k8sCluster.GetNodeLabelForLocation(offline.Location) }
};
}
private List<V1Container> CreateDeploymentContainers(CodexNodeGroup online, OfflineCodexNodes offline)
{
var result = new List<V1Container>();
var containers = online.GetContainers();
foreach (var container in containers)
{
result.Add(new V1Container
{
Name = container.Name,
Image = dockerImage.GetImageTag(),
Ports = new List<V1ContainerPort>
{
new V1ContainerPort
{
ContainerPort = container.ApiPort,
Name = container.ContainerPortName
}
},
Env = dockerImage.CreateEnvironmentVariables(offline, container)
});
}
return result;
}
private void DeleteDeployment(CodexNodeGroup online)
{
if (online.Deployment == null) return;
client.DeleteNamespacedDeployment(online.Deployment.Name(), K8sNamespace);
online.Deployment = null;
}
private void CreatePrometheusDeployment(K8sPrometheusSpecs spec)
{
client.CreateNamespacedDeployment(spec.CreatePrometheusDeployment(), K8sNamespace);
}
#endregion
#region Namespace management
private void EnsureTestNamespace()
{
if (IsTestNamespaceOnline()) return;
var namespaceSpec = new V1Namespace
{
ApiVersion = "v1",
Metadata = new V1ObjectMeta
{
Name = K8sNamespace,
Labels = new Dictionary<string, string> { { "name", K8sNamespace } }
}
};
client.CreateNamespace(namespaceSpec);
}
private void DeleteNamespace()
{
if (IsTestNamespaceOnline())
{
client.DeleteNamespace(K8sNamespace, null, null, gracePeriodSeconds: 0);
}
}
private string K8sNamespace
{
get { return K8sCluster.K8sNamespace; }
}
#endregion
private bool IsTestNamespaceOnline()
{
return client.ListNamespace().Items.Any(n => n.Metadata.Name == K8sNamespace);
}
}
}

View File

@ -1,122 +0,0 @@
using CodexDistTestCore.Config;
using k8s.Models;
namespace CodexDistTestCore
{
public class K8sPrometheusSpecs
{
public const string ContainerName = "dtest-prom";
public const string ConfigFilepath = "/etc/prometheus/prometheus.yml";
private const string dockerImage = "thatbenbierens/prometheus-envconf:latest";
private const string portName = "prom-1";
private readonly string config;
public K8sPrometheusSpecs(int servicePort, int prometheusNumber, string config)
{
ServicePort = servicePort;
PrometheusNumber = prometheusNumber;
this.config = config;
}
public int ServicePort { get; }
public int PrometheusNumber { get; }
public string GetDeploymentName()
{
return "test-prom" + PrometheusNumber;
}
public V1Deployment CreatePrometheusDeployment()
{
var deploymentSpec = new V1Deployment
{
ApiVersion = "apps/v1",
Metadata = new V1ObjectMeta
{
Name = GetDeploymentName(),
NamespaceProperty = K8sCluster.K8sNamespace
},
Spec = new V1DeploymentSpec
{
Replicas = 1,
Selector = new V1LabelSelector
{
MatchLabels = CreateSelector()
},
Template = new V1PodTemplateSpec
{
Metadata = new V1ObjectMeta
{
Labels = CreateSelector()
},
Spec = new V1PodSpec
{
Containers = new List<V1Container>
{
new V1Container
{
Name = ContainerName,
Image = dockerImage,
Ports = new List<V1ContainerPort>
{
new V1ContainerPort
{
ContainerPort = 9090,
Name = portName
}
},
Env = new List<V1EnvVar>
{
new V1EnvVar
{
Name = "PROM_CONFIG",
Value = config
}
}
}
}
}
}
}
};
return deploymentSpec;
}
public V1Service CreatePrometheusService()
{
var serviceSpec = new V1Service
{
ApiVersion = "v1",
Metadata = new V1ObjectMeta
{
Name = "codex-prom-service" + PrometheusNumber,
NamespaceProperty = K8sCluster.K8sNamespace
},
Spec = new V1ServiceSpec
{
Type = "NodePort",
Selector = CreateSelector(),
Ports = new List<V1ServicePort>
{
new V1ServicePort
{
Name = "prom-service" + PrometheusNumber,
Protocol = "TCP",
Port = 9090,
TargetPort = portName,
NodePort = ServicePort
}
}
}
};
return serviceSpec;
}
private Dictionary<string, string> CreateSelector()
{
return new Dictionary<string, string> { { "test-prom", "dtest-prom" } };
}
}
}

View File

@ -1,78 +0,0 @@
using NUnit.Framework;
using System.Text;
namespace CodexDistTestCore
{
public class MetricsAggregator
{
private readonly NumberSource prometheusNumberSource = new NumberSource(0);
private readonly TestLog log;
private readonly K8sManager k8sManager;
private readonly Dictionary<MetricsQuery, OnlineCodexNode[]> activePrometheuses = new Dictionary<MetricsQuery, OnlineCodexNode[]>();
public MetricsAggregator(TestLog log, K8sManager k8sManager)
{
this.log = log;
this.k8sManager = k8sManager;
}
public void BeginCollectingMetricsFor(OnlineCodexNode[] nodes)
{
log.Log($"Starting metrics collecting for {nodes.Length} nodes...");
var config = GeneratePrometheusConfig(nodes);
var prometheus = k8sManager.BringOnlinePrometheus(config, prometheusNumberSource.GetNextNumber());
var query = new MetricsQuery(prometheus);
activePrometheuses.Add(query, nodes);
log.Log("Metrics service started.");
foreach(var node in nodes)
{
node.Metrics = new MetricsAccess(query, node);
}
}
public void DownloadAllMetrics()
{
var download = new MetricsDownloader(log, activePrometheuses);
download.DownloadAllMetrics();
}
private string GeneratePrometheusConfig(OnlineCodexNode[] nodes)
{
var config = "";
config += "global:\n";
config += " scrape_interval: 30s\n";
config += " scrape_timeout: 10s\n";
config += "\n";
config += "scrape_configs:\n";
config += " - job_name: services\n";
config += " metrics_path: /metrics\n";
config += " static_configs:\n";
config += " - targets:\n";
foreach (var node in nodes)
{
var ip = node.Group.PodInfo!.Ip;
var port = node.Container.MetricsPort;
config += $" - '{ip}:{port}'\n";
}
var bytes = Encoding.ASCII.GetBytes(config);
return Convert.ToBase64String(bytes);
}
}
public class PrometheusInfo
{
public PrometheusInfo(int servicePort, PodInfo podInfo)
{
ServicePort = servicePort;
PodInfo = podInfo;
}
public int ServicePort { get; }
public PodInfo PodInfo { get; }
}
}

View File

@ -1,96 +0,0 @@
namespace CodexDistTestCore
{
public interface IOfflineCodexNodes
{
IOfflineCodexNodes At(Location location);
IOfflineCodexNodes WithLogLevel(CodexLogLevel level);
IOfflineCodexNodes WithBootstrapNode(IOnlineCodexNode node);
IOfflineCodexNodes WithStorageQuota(ByteSize storageQuota);
IOfflineCodexNodes EnableMetrics();
ICodexNodeGroup BringOnline();
}
public enum CodexLogLevel
{
Trace,
Debug,
Info,
Warn,
Error
}
public enum Location
{
Unspecified,
BensLaptop,
BensOldGamingMachine,
}
public class OfflineCodexNodes : IOfflineCodexNodes
{
private readonly IK8sManager k8SManager;
public int NumberOfNodes { get; }
public Location Location { get; private set; }
public CodexLogLevel? LogLevel { get; private set; }
public IOnlineCodexNode? BootstrapNode { get; private set; }
public ByteSize? StorageQuota { get; private set; }
public bool MetricsEnabled { get; private set; }
public OfflineCodexNodes(IK8sManager k8SManager, int numberOfNodes)
{
this.k8SManager = k8SManager;
NumberOfNodes = numberOfNodes;
Location = Location.Unspecified;
MetricsEnabled = false;
}
public ICodexNodeGroup BringOnline()
{
return k8SManager.BringOnline(this);
}
public IOfflineCodexNodes At(Location location)
{
Location = location;
return this;
}
public IOfflineCodexNodes WithBootstrapNode(IOnlineCodexNode node)
{
BootstrapNode = node;
return this;
}
public IOfflineCodexNodes WithLogLevel(CodexLogLevel level)
{
LogLevel = level;
return this;
}
public IOfflineCodexNodes WithStorageQuota(ByteSize storageQuota)
{
StorageQuota = storageQuota;
return this;
}
public IOfflineCodexNodes EnableMetrics()
{
MetricsEnabled = true;
return this;
}
public string Describe()
{
var args = string.Join(',', DescribeArgs());
return $"{NumberOfNodes} CodexNodes with [{args}]";
}
private IEnumerable<string> DescribeArgs()
{
if (LogLevel != null) yield return ($"LogLevel={LogLevel}");
if (BootstrapNode != null) yield return ("BootstrapNode=set");
if (StorageQuota != null) yield return ($"StorageQuote={StorageQuota.SizeInBytes}");
}
}
}

View File

@ -1,73 +0,0 @@
using NUnit.Framework;
namespace CodexDistTestCore
{
public interface IPodLogHandler
{
void Log(Stream log);
}
[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)]
public class DontDownloadLogsAndMetricsOnFailureAttribute : PropertyAttribute
{
public DontDownloadLogsAndMetricsOnFailureAttribute()
: base(Timing.UseLongTimeoutsKey)
{
}
}
public class PodLogDownloader
{
public const string DontDownloadLogsOnFailureKey = "DontDownloadLogsOnFailure";
private readonly TestLog log;
private readonly IK8sManager k8SManager;
public PodLogDownloader(TestLog log, IK8sManager k8sManager)
{
this.log = log;
k8SManager = k8sManager;
}
public CodexNodeLog DownloadLog(OnlineCodexNode node)
{
var description = node.Describe();
var subFile = log.CreateSubfile();
log.Log($"Downloading logs for {description} to file {subFile.FilenameWithoutPath}");
var handler = new PodLogDownloadHandler(description, subFile);
k8SManager.FetchPodLog(node, handler);
return handler.CreateCodexNodeLog();
}
}
public class PodLogDownloadHandler : IPodLogHandler
{
private readonly string description;
private readonly LogFile log;
public PodLogDownloadHandler(string description, LogFile log)
{
this.description = description;
this.log = log;
}
public CodexNodeLog CreateCodexNodeLog()
{
return new CodexNodeLog(log);
}
public void Log(Stream stream)
{
log.Write($"{description} -->> {log.FilenameWithoutPath}");
log.WriteRaw(description);
var reader = new StreamReader(stream);
var line = reader.ReadLine();
while (line != null)
{
log.WriteRaw(line);
line = reader.ReadLine();
}
}
}
}

View File

@ -1,144 +0,0 @@
using CodexDistTestCore.Config;
using NUnit.Framework;
namespace CodexDistTestCore
{
public class TestLog
{
private readonly NumberSource subfileNumberSource = new NumberSource(0);
private readonly LogFile file;
private readonly DateTime now;
public TestLog()
{
now = DateTime.UtcNow;
var name = GetTestName();
file = new LogFile(now, name);
Log($"Begin: {name}");
}
public void Log(string message)
{
file.Write(message);
}
public void Error(string message)
{
Log($"[ERROR] {message}");
}
public void EndTest()
{
var result = TestContext.CurrentContext.Result;
Log($"Finished: {GetTestName()} = {result.Outcome.Status}");
if (!string.IsNullOrEmpty(result.Message))
{
Log(result.Message);
Log($"{result.StackTrace}");
}
if (result.Outcome.Status == NUnit.Framework.Interfaces.TestStatus.Failed)
{
RenameLogFile();
}
}
private void RenameLogFile()
{
file.ConcatToFilename("_FAILED");
}
public LogFile CreateSubfile(string ext = "log")
{
return new LogFile(now, $"{GetTestName()}_{subfileNumberSource.GetNextNumber().ToString().PadLeft(6, '0')}", ext);
}
private static string GetTestName()
{
var test = TestContext.CurrentContext.Test;
var className = test.ClassName!.Substring(test.ClassName.LastIndexOf('.') + 1);
var args = FormatArguments(test);
return $"{className}.{test.MethodName}{args}";
}
private static string FormatArguments(TestContext.TestAdapter test)
{
if (test.Arguments == null || !test.Arguments.Any()) return "";
return $"[{string.Join(',', test.Arguments)}]";
}
}
public class LogFile
{
private readonly DateTime now;
private string name;
private readonly string ext;
private readonly string filepath;
public LogFile(DateTime now, string name, string ext = "log")
{
this.now = now;
this.name = name;
this.ext = ext;
filepath = Path.Join(
LogConfig.LogRoot,
$"{now.Year}-{Pad(now.Month)}",
Pad(now.Day));
Directory.CreateDirectory(filepath);
GenerateFilename();
}
public string FullFilename { get; private set; } = string.Empty;
public string FilenameWithoutPath { get; private set; } = string.Empty;
public void Write(string message)
{
WriteRaw($"{GetTimestamp()} {message}");
}
public void WriteRaw(string message)
{
try
{
File.AppendAllLines(FullFilename, new[] { message });
}
catch (Exception ex)
{
Console.WriteLine("Writing to log has failed: " + ex);
}
}
public void ConcatToFilename(string toAdd)
{
var oldFullName = FullFilename;
name += toAdd;
GenerateFilename();
File.Move(oldFullName, FullFilename);
}
private static string Pad(int n)
{
return n.ToString().PadLeft(2, '0');
}
private static string GetTimestamp()
{
return $"[{DateTime.UtcNow.ToString("u")}]";
}
private void GenerateFilename()
{
FilenameWithoutPath = $"{Pad(now.Hour)}-{Pad(now.Minute)}-{Pad(now.Second)}Z_{name.Replace('.', '-')}.{ext}";
FullFilename = Path.Combine(filepath, FilenameWithoutPath);
}
}
}

View File

@ -1,16 +0,0 @@
namespace CodexDistTestCore
{
public static class Utils
{
public static void Sleep(TimeSpan span)
{
Thread.Sleep(span);
}
public static T Wait<T>(Task<T> task)
{
task.Wait();
return task.Result;
}
}
}

View File

@ -0,0 +1,39 @@
using KubernetesWorkflow;
namespace DistTestCore
{
public class BaseStarter
{
protected readonly TestLifecycle lifecycle;
protected readonly WorkflowCreator workflowCreator;
private Stopwatch? stopwatch;
public BaseStarter(TestLifecycle lifecycle, WorkflowCreator workflowCreator)
{
this.lifecycle = lifecycle;
this.workflowCreator = workflowCreator;
}
protected void LogStart(string msg)
{
Log(msg);
stopwatch = Stopwatch.Begin(lifecycle.Log, GetClassName());
}
protected void LogEnd(string msg)
{
stopwatch!.End(msg);
stopwatch = null;
}
protected void Log(string msg)
{
lifecycle.Log.Log($"{GetClassName()} {msg}");
}
private string GetClassName()
{
return $"({GetType().Name})";
}
}
}

View File

@ -1,4 +1,4 @@
namespace CodexDistTestCore
namespace DistTestCore
{
public class ByteSize
{
@ -8,9 +8,24 @@
}
public long SizeInBytes { get; }
public override bool Equals(object? obj)
{
return obj is ByteSize size && SizeInBytes == size.SizeInBytes;
}
public override int GetHashCode()
{
return HashCode.Combine(SizeInBytes);
}
public override string ToString()
{
return $"{SizeInBytes} bytes";
}
}
public static class IntExtensions
public static class ByteSizeIntExtensions
{
private const long Kilo = 1024;

View File

@ -0,0 +1,99 @@
using KubernetesWorkflow;
namespace DistTestCore.Codex
{
public class CodexAccess
{
public CodexAccess(RunningContainer runningContainer)
{
Container = runningContainer;
}
public RunningContainer Container { get; }
public CodexDebugResponse GetDebugInfo()
{
return Http().HttpGetJson<CodexDebugResponse>("debug/info");
}
public string UploadFile(FileStream fileStream)
{
return Http().HttpPostStream("upload", fileStream);
}
public Stream DownloadFile(string contentId)
{
return Http().HttpGetStream("download/" + contentId);
}
public CodexSalesAvailabilityResponse SalesAvailability(CodexSalesAvailabilityRequest request)
{
return Http().HttpPostJson<CodexSalesAvailabilityRequest, CodexSalesAvailabilityResponse>("sales/availability", request);
}
public CodexSalesRequestStorageResponse RequestStorage(CodexSalesRequestStorageRequest request, string contentId)
{
return Http().HttpPostJson<CodexSalesRequestStorageRequest, CodexSalesRequestStorageResponse>($"storage/request/{contentId}", request);
}
private Http Http()
{
var ip = Container.Pod.Cluster.IP;
var port = Container.ServicePorts[0].Number;
return new Http(ip, port, baseUrl: "/api/codex/v1");
}
public string ConnectToPeer(string peerId, string peerMultiAddress)
{
return Http().HttpGetString($"connect/{peerId}?addrs={peerMultiAddress}");
}
}
public class CodexDebugResponse
{
public string id { get; set; } = string.Empty;
public string[] addrs { get; set; } = new string[0];
public string repo { get; set; } = string.Empty;
public string spr { get; set; } = string.Empty;
public CodexDebugVersionResponse codex { get; set; } = new();
}
public class CodexDebugVersionResponse
{
public string version { get; set; } = string.Empty;
public string revision { get; set; } = string.Empty;
}
public class CodexSalesAvailabilityRequest
{
public string size { get; set; } = string.Empty;
public string duration { get; set; } = string.Empty;
public string minPrice { get; set; } = string.Empty;
public string maxCollateral { get; set; } = string.Empty;
}
public class CodexSalesAvailabilityResponse
{
public string id { get; set; } = string.Empty;
public string size { get; set; } = string.Empty;
public string duration { get; set; } = string.Empty;
public string minPrice { get; set; } = string.Empty;
public string maxCollateral { get; set; } = string.Empty;
}
public class CodexSalesRequestStorageRequest
{
public string duration { get; set; } = string.Empty;
public string proofProbability { get; set; } = string.Empty;
public string reward { get; set; } = string.Empty;
public string collateral { get; set; } = string.Empty;
public string? expiry { get; set; }
public uint? nodes { get; set; }
public uint? tolerance { get; set;}
}
public class CodexSalesRequestStorageResponse
{
public string purchaseId { get; set; } = string.Empty;
}
}

View File

@ -0,0 +1,53 @@
using DistTestCore.Marketplace;
using KubernetesWorkflow;
namespace DistTestCore.Codex
{
public class CodexContainerRecipe : ContainerRecipeFactory
{
public const string DockerImage = "thatbenbierens/nim-codex:sha-bf5512b";
public const string MetricsPortTag = "metrics_port";
protected override string Image => DockerImage;
protected override void Initialize(StartupConfig startupConfig)
{
var config = startupConfig.Get<CodexStartupConfig>();
AddExposedPortAndVar("API_PORT");
AddEnvVar("DATA_DIR", $"datadir{ContainerNumber}");
AddInternalPortAndVar("DISC_PORT");
var listenPort = AddInternalPort();
AddEnvVar("LISTEN_ADDRS", $"/ip4/0.0.0.0/tcp/{listenPort.Number}");
if (config.LogLevel != null)
{
AddEnvVar("LOG_LEVEL", config.LogLevel.ToString()!.ToUpperInvariant());
}
if (config.StorageQuota != null)
{
AddEnvVar("STORAGE_QUOTA", config.StorageQuota.SizeInBytes.ToString()!);
}
if (config.MetricsEnabled)
{
AddEnvVar("METRICS_ADDR", "0.0.0.0");
AddInternalPortAndVar("METRICS_PORT", tag: MetricsPortTag);
}
if (config.MarketplaceConfig != null)
{
var gethConfig = startupConfig.Get<GethStartResult>();
var companionNode = gethConfig.CompanionNodes[Index];
Additional(companionNode);
var ip = companionNode.RunningContainer.Pod.Ip;
var port = companionNode.RunningContainer.Recipe.GetPortByTag(GethContainerRecipe.WsPortTag).Number;
AddEnvVar("ETH_PROVIDER", $"ws://{ip}:{port}");
AddEnvVar("ETH_ACCOUNT", companionNode.Account);
AddEnvVar("ETH_MARKETPLACE_ADDRESS", gethConfig.MarketplaceNetwork.Marketplace.Address);
}
}
}
}

View File

@ -0,0 +1,11 @@
namespace DistTestCore.Codex
{
public enum CodexLogLevel
{
Trace,
Debug,
Info,
Warn,
Error
}
}

View File

@ -0,0 +1,16 @@
using DistTestCore.Marketplace;
using KubernetesWorkflow;
namespace DistTestCore.Codex
{
public class CodexStartupConfig
{
public Location Location { get; set; }
public CodexLogLevel? LogLevel { get; set; }
public ByteSize? StorageQuota { get; set; }
public bool MetricsEnabled { get; set; }
public MarketplaceInitialConfig? MarketplaceConfig { get; set; }
//public IOnlineCodexNode? BootstrapNode { get; private set; }
}
}

View File

@ -0,0 +1,32 @@
using DistTestCore.Codex;
using DistTestCore.Marketplace;
using DistTestCore.Metrics;
namespace DistTestCore
{
public interface ICodexNodeFactory
{
OnlineCodexNode CreateOnlineCodexNode(CodexAccess access, CodexNodeGroup group);
}
public class CodexNodeFactory : ICodexNodeFactory
{
private readonly TestLifecycle lifecycle;
private readonly IMetricsAccessFactory metricsAccessFactory;
private readonly IMarketplaceAccessFactory marketplaceAccessFactory;
public CodexNodeFactory(TestLifecycle lifecycle, IMetricsAccessFactory metricsAccessFactory, IMarketplaceAccessFactory marketplaceAccessFactory)
{
this.lifecycle = lifecycle;
this.metricsAccessFactory = metricsAccessFactory;
this.marketplaceAccessFactory = marketplaceAccessFactory;
}
public OnlineCodexNode CreateOnlineCodexNode(CodexAccess access, CodexNodeGroup group)
{
var metricsAccess = metricsAccessFactory.CreateMetricsAccess(access.Container);
var marketplaceAccess = marketplaceAccessFactory.CreateMarketplaceAccess(access);
return new OnlineCodexNode(lifecycle, access, group, metricsAccess, marketplaceAccess);
}
}
}

View File

@ -0,0 +1,86 @@
using DistTestCore.Codex;
using KubernetesWorkflow;
using System.Collections;
namespace DistTestCore
{
public interface ICodexNodeGroup : IEnumerable<IOnlineCodexNode>
{
ICodexSetup BringOffline();
IOnlineCodexNode this[int index] { get; }
}
public class CodexNodeGroup : ICodexNodeGroup
{
private readonly TestLifecycle lifecycle;
public CodexNodeGroup(TestLifecycle lifecycle, CodexSetup setup, RunningContainers containers, ICodexNodeFactory codexNodeFactory)
{
this.lifecycle = lifecycle;
Setup = setup;
Containers = containers;
Nodes = containers.Containers.Select(c => CreateOnlineCodexNode(c, codexNodeFactory)).ToArray();
}
public IOnlineCodexNode this[int index]
{
get
{
return Nodes[index];
}
}
public ICodexSetup BringOffline()
{
lifecycle.CodexStarter.BringOffline(this);
var result = Setup;
// Clear everything. Prevent accidental use.
Setup = null!;
Nodes = Array.Empty<OnlineCodexNode>();
Containers = null!;
return result;
}
public CodexSetup Setup { get; private set; }
public RunningContainers Containers { get; private set; }
public OnlineCodexNode[] Nodes { get; private set; }
public IEnumerator<IOnlineCodexNode> GetEnumerator()
{
return Nodes.Cast<IOnlineCodexNode>().GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return Nodes.GetEnumerator();
}
public string Describe()
{
return $"<CodexNodeGroup@{Containers.Describe()}-{Setup.Describe()}>";
}
private OnlineCodexNode CreateOnlineCodexNode(RunningContainer c, ICodexNodeFactory factory)
{
var access = new CodexAccess(c);
EnsureOnline(access);
return factory.CreateOnlineCodexNode(access, this);
}
private void EnsureOnline(CodexAccess access)
{
try
{
var debugInfo = access.GetDebugInfo();
if (debugInfo == null || string.IsNullOrEmpty(debugInfo.id)) throw new InvalidOperationException("Unable to get debug-info from codex node at startup.");
}
catch (Exception e)
{
lifecycle.Log.Error($"Failed to start codex node: {e}. Test infra failure.");
throw new InvalidOperationException($"Failed to start codex node. Test infra failure.", e);
}
}
}
}

View File

@ -0,0 +1,90 @@
using DistTestCore.Codex;
using DistTestCore.Marketplace;
using KubernetesWorkflow;
namespace DistTestCore
{
public interface ICodexSetup
{
ICodexSetup At(Location location);
ICodexSetup WithLogLevel(CodexLogLevel level);
//ICodexStartupConfig WithBootstrapNode(IOnlineCodexNode node);
ICodexSetup WithStorageQuota(ByteSize storageQuota);
ICodexSetup EnableMetrics();
ICodexSetup EnableMarketplace(TestToken initialBalance);
ICodexSetup EnableMarketplace(TestToken initialBalance, Ether initialEther);
ICodexNodeGroup BringOnline();
}
public class CodexSetup : CodexStartupConfig, ICodexSetup
{
private readonly CodexStarter starter;
public int NumberOfNodes { get; }
public CodexSetup(CodexStarter starter, int numberOfNodes)
{
this.starter = starter;
NumberOfNodes = numberOfNodes;
}
public ICodexNodeGroup BringOnline()
{
return starter.BringOnline(this);
}
public ICodexSetup At(Location location)
{
Location = location;
return this;
}
//public ICodexSetupConfig WithBootstrapNode(IOnlineCodexNode node)
//{
// BootstrapNode = node;
// return this;
//}
public ICodexSetup WithLogLevel(CodexLogLevel level)
{
LogLevel = level;
return this;
}
public ICodexSetup WithStorageQuota(ByteSize storageQuota)
{
StorageQuota = storageQuota;
return this;
}
public ICodexSetup EnableMetrics()
{
MetricsEnabled = true;
return this;
}
public ICodexSetup EnableMarketplace(TestToken initialBalance)
{
return EnableMarketplace(initialBalance, 1000.Eth());
}
public ICodexSetup EnableMarketplace(TestToken initialBalance, Ether initialEther)
{
MarketplaceConfig = new MarketplaceInitialConfig(initialEther, initialBalance);
return this;
}
public string Describe()
{
var args = string.Join(',', DescribeArgs());
return $"({NumberOfNodes} CodexNodes with [{args}])";
}
private IEnumerable<string> DescribeArgs()
{
if (LogLevel != null) yield return $"LogLevel={LogLevel}";
//if (BootstrapNode != null) yield return "BootstrapNode=set-not-shown-here";
if (StorageQuota != null) yield return $"StorageQuote={StorageQuota}";
}
}
}

View File

@ -0,0 +1,83 @@
using DistTestCore.Codex;
using KubernetesWorkflow;
namespace DistTestCore
{
public class CodexStarter : BaseStarter
{
public CodexStarter(TestLifecycle lifecycle, WorkflowCreator workflowCreator)
: base(lifecycle, workflowCreator)
{
}
public List<CodexNodeGroup> RunningGroups { get; } = new List<CodexNodeGroup>();
public ICodexNodeGroup BringOnline(CodexSetup codexSetup)
{
LogSeparator();
LogStart($"Starting {codexSetup.Describe()}...");
var gethStartResult = lifecycle.GethStarter.BringOnlineMarketplaceFor(codexSetup);
var startupConfig = new StartupConfig();
startupConfig.Add(codexSetup);
startupConfig.Add(gethStartResult);
var containers = StartCodexContainers(startupConfig, codexSetup.NumberOfNodes, codexSetup.Location);
var metricAccessFactory = lifecycle.PrometheusStarter.CollectMetricsFor(codexSetup, containers);
var codexNodeFactory = new CodexNodeFactory(lifecycle, metricAccessFactory, gethStartResult.MarketplaceAccessFactory);
var group = CreateCodexGroup(codexSetup, containers, codexNodeFactory);
LogEnd($"Started {codexSetup.NumberOfNodes} nodes at '{group.Containers.RunningPod.Ip}'. They are: [{string.Join(",", group.Select(n => n.GetName()))}]");
LogSeparator();
return group;
}
public void BringOffline(CodexNodeGroup group)
{
LogStart($"Stopping {group.Describe()}...");
var workflow = CreateWorkflow();
workflow.Stop(group.Containers);
RunningGroups.Remove(group);
LogEnd("Stopped.");
}
public void DeleteAllResources()
{
var workflow = CreateWorkflow();
workflow.DeleteAllResources();
RunningGroups.Clear();
}
public void DownloadLog(RunningContainer container, ILogHandler logHandler)
{
var workflow = CreateWorkflow();
workflow.DownloadContainerLog(container, logHandler);
}
private RunningContainers StartCodexContainers(StartupConfig startupConfig, int numberOfNodes, Location location)
{
var workflow = CreateWorkflow();
return workflow.Start(numberOfNodes, location, new CodexContainerRecipe(), startupConfig);
}
private CodexNodeGroup CreateCodexGroup(CodexSetup codexSetup, RunningContainers runningContainers, CodexNodeFactory codexNodeFactory)
{
var group = new CodexNodeGroup(lifecycle, codexSetup, runningContainers, codexNodeFactory);
RunningGroups.Add(group);
return group;
}
private StartupWorkflow CreateWorkflow()
{
return workflowCreator.CreateWorkflow();
}
private void LogSeparator()
{
Log("----------------------------------------------------------------------------");
}
}
}

View File

@ -0,0 +1,32 @@
using KubernetesWorkflow;
namespace DistTestCore
{
public class Configuration
{
public KubernetesWorkflow.Configuration GetK8sConfiguration()
{
return new KubernetesWorkflow.Configuration(
k8sNamespace: "codex-test-ns",
kubeConfigFile: null,
operationTimeout: Timing.K8sOperationTimeout(),
retryDelay: Timing.K8sServiceDelay(),
locationMap: new[]
{
new ConfigurationLocationEntry(Location.BensOldGamingMachine, "worker01"),
new ConfigurationLocationEntry(Location.BensLaptop, "worker02"),
}
);
}
public Logging.LogConfig GetLogConfig()
{
return new Logging.LogConfig("D:/CodexTestLogs");
}
public string GetFileManagerFolder()
{
return "TestDataFiles";
}
}
}

184
DistTestCore/DistTest.cs Normal file
View File

@ -0,0 +1,184 @@
using DistTestCore.Codex;
using DistTestCore.Logs;
using DistTestCore.Marketplace;
using DistTestCore.Metrics;
using KubernetesWorkflow;
using Logging;
using NUnit.Framework;
using Utils;
namespace DistTestCore
{
[SetUpFixture]
public abstract class DistTest
{
private readonly Configuration configuration = new Configuration();
private FixtureLog fixtureLog = null!;
private TestLifecycle lifecycle = null!;
private DateTime testStart = DateTime.MinValue;
[OneTimeSetUp]
public void GlobalSetup()
{
// Previous test run may have been interrupted.
// Begin by cleaning everything up.
fixtureLog = new FixtureLog(configuration.GetLogConfig());
try
{
Stopwatch.Measure(fixtureLog, "Global setup", () =>
{
var wc = new WorkflowCreator(configuration.GetK8sConfiguration());
wc.CreateWorkflow().DeleteAllResources();
});
}
catch (Exception ex)
{
GlobalTestFailure.HasFailed = true;
fixtureLog.Error($"Global setup cleanup failed with: {ex}");
throw;
}
fixtureLog.Log("Global setup cleanup successful");
fixtureLog.Log($"Codex image: '{CodexContainerRecipe.DockerImage}'");
fixtureLog.Log($"Prometheus image: '{PrometheusContainerRecipe.DockerImage}'");
fixtureLog.Log($"Geth image: '{GethContainerRecipe.DockerImage}'");
}
[SetUp]
public void SetUpDistTest()
{
if (GlobalTestFailure.HasFailed)
{
Assert.Inconclusive("Skip test: Previous test failed during clean up.");
}
else
{
CreateNewTestLifecycle();
}
}
[TearDown]
public void TearDownDistTest()
{
try
{
DisposeTestLifecycle();
}
catch (Exception ex)
{
fixtureLog.Error("Cleanup failed: " + ex.Message);
GlobalTestFailure.HasFailed = true;
}
}
public TestFile GenerateTestFile(ByteSize size)
{
return lifecycle.FileManager.GenerateTestFile(size);
}
public ICodexSetup SetupCodexNodes(int numberOfNodes)
{
return new CodexSetup(lifecycle.CodexStarter, numberOfNodes);
}
private void CreateNewTestLifecycle()
{
Stopwatch.Measure(fixtureLog, $"Setup for {GetCurrentTestName()}", () =>
{
lifecycle = new TestLifecycle(fixtureLog.CreateTestLog(), configuration);
testStart = DateTime.UtcNow;
});
}
private void DisposeTestLifecycle()
{
fixtureLog.Log($"{GetCurrentTestName()} = {GetTestResult()} ({GetTestDuration()})");
Stopwatch.Measure(fixtureLog, $"Teardown for {GetCurrentTestName()}", () =>
{
lifecycle.Log.EndTest();
IncludeLogsAndMetricsOnTestFailure();
lifecycle.DeleteAllResources();
lifecycle = null!;
});
}
private void IncludeLogsAndMetricsOnTestFailure()
{
var result = TestContext.CurrentContext.Result;
if (result.Outcome.Status == NUnit.Framework.Interfaces.TestStatus.Failed)
{
fixtureLog.MarkAsFailed();
if (IsDownloadingLogsAndMetricsEnabled())
{
lifecycle.Log.Log("Downloading all CodexNode logs and metrics because of test failure...");
DownloadAllLogs();
DownloadAllMetrics();
}
else
{
lifecycle.Log.Log("Skipping download of all CodexNode logs and metrics due to [DontDownloadLogsAndMetricsOnFailure] attribute.");
}
}
}
private string GetTestDuration()
{
var testDuration = DateTime.UtcNow - testStart;
return Time.FormatDuration(testDuration);
}
private void DownloadAllLogs()
{
OnEachCodexNode(node =>
{
lifecycle.DownloadLog(node);
});
}
private void DownloadAllMetrics()
{
var metricsDownloader = new MetricsDownloader(lifecycle.Log);
OnEachCodexNode(node =>
{
var m = node.Metrics as MetricsAccess;
if (m != null)
{
metricsDownloader.DownloadAllMetricsForNode(node.GetName(), m);
}
});
}
private void OnEachCodexNode(Action<OnlineCodexNode> action)
{
var allNodes = lifecycle.CodexStarter.RunningGroups.SelectMany(g => g.Nodes);
foreach (var node in allNodes)
{
action(node);
}
}
private string GetCurrentTestName()
{
return $"[{TestContext.CurrentContext.Test.Name}]";
}
private string GetTestResult()
{
return TestContext.CurrentContext.Result.Outcome.Status.ToString();
}
private bool IsDownloadingLogsAndMetricsEnabled()
{
var testProperties = TestContext.CurrentContext.Test.Properties;
return !testProperties.ContainsKey(DontDownloadLogsAndMetricsOnFailureAttribute.DontDownloadKey);
}
}
public static class GlobalTestFailure
{
public static bool HasFailed { get; set; } = false;
}
}

View File

@ -0,0 +1,22 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<RootNamespace>DistTestCore</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="nunit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.4.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\KubernetesWorkflow\KubernetesWorkflow.csproj" />
<ProjectReference Include="..\Logging\Logging.csproj" />
<ProjectReference Include="..\Nethereum\NethereumWorkflow.csproj" />
</ItemGroup>
</Project>

View File

@ -1,7 +1,7 @@
using CodexDistTestCore.Config;
using Logging;
using NUnit.Framework;
namespace CodexDistTestCore
namespace DistTestCore
{
public interface IFileManager
{
@ -14,20 +14,21 @@ namespace CodexDistTestCore
{
public const int ChunkSize = 1024 * 1024;
private readonly Random random = new Random();
private readonly List<TestFile> activeFiles = new List<TestFile>();
private readonly TestLog log;
private readonly string folder;
public FileManager(TestLog log)
public FileManager(TestLog log, Configuration configuration)
{
if (!Directory.Exists(FileManagerConfig.Folder)) Directory.CreateDirectory(FileManagerConfig.Folder);
folder = configuration.GetFileManagerFolder();
EnsureDirectory();
this.log = log;
}
public TestFile CreateEmptyTestFile()
{
var result = new TestFile(Path.Combine(FileManagerConfig.Folder, Guid.NewGuid().ToString() + "_test.bin"));
var result = new TestFile(Path.Combine(folder, Guid.NewGuid().ToString() + "_test.bin"));
File.Create(result.Filename).Close();
activeFiles.Add(result);
return result;
}
@ -41,8 +42,7 @@ namespace CodexDistTestCore
public void DeleteAllTestFiles()
{
foreach (var file in activeFiles) File.Delete(file.Filename);
activeFiles.Clear();
DeleteDirectory();
}
private void GenerateFileBytes(TestFile result, ByteSize size)
@ -63,6 +63,16 @@ namespace CodexDistTestCore
using var stream = new FileStream(result.Filename, FileMode.Append);
stream.Write(bytes, 0, bytes.Length);
}
private void EnsureDirectory()
{
if (!Directory.Exists(folder)) Directory.CreateDirectory(folder);
}
private void DeleteDirectory()
{
Directory.Delete(folder, true);
}
}
public class TestFile

View File

@ -0,0 +1,92 @@
using DistTestCore.Marketplace;
using KubernetesWorkflow;
namespace DistTestCore
{
public class GethStarter : BaseStarter
{
private readonly MarketplaceNetworkCache marketplaceNetworkCache;
private readonly GethCompanionNodeStarter companionNodeStarter;
public GethStarter(TestLifecycle lifecycle, WorkflowCreator workflowCreator)
: base(lifecycle, workflowCreator)
{
marketplaceNetworkCache = new MarketplaceNetworkCache(
new GethBootstrapNodeStarter(lifecycle, workflowCreator),
new CodexContractsStarter(lifecycle, workflowCreator));
companionNodeStarter = new GethCompanionNodeStarter(lifecycle, workflowCreator);
}
public GethStartResult BringOnlineMarketplaceFor(CodexSetup codexSetup)
{
if (codexSetup.MarketplaceConfig == null) return CreateMarketplaceUnavailableResult();
var marketplaceNetwork = marketplaceNetworkCache.Get();
var companionNodes = StartCompanionNodes(codexSetup, marketplaceNetwork);
LogStart("Setting up initial balance...");
TransferInitialBalance(marketplaceNetwork, codexSetup.MarketplaceConfig, companionNodes);
LogEnd($"Initial balance of {codexSetup.MarketplaceConfig.InitialTestTokens} set for {codexSetup.NumberOfNodes} nodes.");
return CreateGethStartResult(marketplaceNetwork, companionNodes);
}
private void TransferInitialBalance(MarketplaceNetwork marketplaceNetwork, MarketplaceInitialConfig marketplaceConfig, GethCompanionNodeInfo[] companionNodes)
{
var interaction = marketplaceNetwork.StartInteraction(lifecycle.Log);
var tokenAddress = marketplaceNetwork.Marketplace.TokenAddress;
foreach (var node in companionNodes)
{
interaction.TransferWeiTo(node.Account, marketplaceConfig.InitialEth.Wei);
interaction.MintTestTokens(node.Account, marketplaceConfig.InitialTestTokens.Amount, tokenAddress);
}
interaction.WaitForAllTransactions();
}
private GethStartResult CreateGethStartResult(MarketplaceNetwork marketplaceNetwork, GethCompanionNodeInfo[] companionNodes)
{
return new GethStartResult(CreateMarketplaceAccessFactory(marketplaceNetwork), marketplaceNetwork, companionNodes);
}
private GethStartResult CreateMarketplaceUnavailableResult()
{
return new GethStartResult(new MarketplaceUnavailableAccessFactory(), null!, Array.Empty<GethCompanionNodeInfo>());
}
private IMarketplaceAccessFactory CreateMarketplaceAccessFactory(MarketplaceNetwork marketplaceNetwork)
{
return new GethMarketplaceAccessFactory(lifecycle.Log, marketplaceNetwork);
}
private GethCompanionNodeInfo[] StartCompanionNodes(CodexSetup codexSetup, MarketplaceNetwork marketplaceNetwork)
{
return companionNodeStarter.StartCompanionNodesFor(codexSetup, marketplaceNetwork.Bootstrap);
}
}
public class MarketplaceNetworkCache
{
private readonly GethBootstrapNodeStarter bootstrapNodeStarter;
private readonly CodexContractsStarter codexContractsStarter;
private MarketplaceNetwork? network;
public MarketplaceNetworkCache(GethBootstrapNodeStarter bootstrapNodeStarter, CodexContractsStarter codexContractsStarter)
{
this.bootstrapNodeStarter = bootstrapNodeStarter;
this.codexContractsStarter = codexContractsStarter;
}
public MarketplaceNetwork Get()
{
if (network == null)
{
var bootstrapInfo = bootstrapNodeStarter.StartGethBootstrapNode();
var marketplaceInfo = codexContractsStarter.Start(bootstrapInfo);
network = new MarketplaceNetwork(bootstrapInfo, marketplaceInfo );
}
return network;
}
}
}

View File

@ -1,8 +1,10 @@
using Newtonsoft.Json;
using NUnit.Framework;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using Utils;
namespace CodexDistTestCore
namespace DistTestCore
{
public class Http
{
@ -26,14 +28,29 @@ namespace CodexDistTestCore
{
using var client = GetClient();
var url = GetUrl() + route;
var result = Utils.Wait(client.GetAsync(url));
return Utils.Wait(result.Content.ReadAsStringAsync());
var result = Time.Wait(client.GetAsync(url));
return Time.Wait(result.Content.ReadAsStringAsync());
});
}
public T HttpGetJson<T>(string route)
{
return JsonConvert.DeserializeObject<T>(HttpGetString(route))!;
var json = HttpGetString(route);
return TryJsonDeserialize<T>(json);
}
public TResponse HttpPostJson<TRequest, TResponse>(string route, TRequest body)
{
var json = Retry(() =>
{
using var client = GetClient();
var url = GetUrl() + route;
using var content = JsonContent.Create(body);
var result = Time.Wait(client.PostAsync(url, content));
return Time.Wait(result.Content.ReadAsStringAsync());
});
return TryJsonDeserialize<TResponse>(json);
}
public string HttpPostStream(string route, Stream stream)
@ -45,9 +62,9 @@ namespace CodexDistTestCore
var content = new StreamContent(stream);
content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
var response = Utils.Wait(client.PostAsync(url, content));
var response = Time.Wait(client.PostAsync(url, content));
return Utils.Wait(response.Content.ReadAsStringAsync());
return Time.Wait(response.Content.ReadAsStringAsync());
});
}
@ -58,7 +75,7 @@ namespace CodexDistTestCore
var client = GetClient();
var url = GetUrl() + route;
return Utils.Wait(client.GetStreamAsync(url));
return Time.Wait(client.GetStreamAsync(url));
});
}
@ -83,13 +100,27 @@ namespace CodexDistTestCore
retryCounter++;
if (retryCounter > Timing.HttpCallRetryCount())
{
Assert.Fail(exception.Message);
Assert.Fail(exception.ToString());
throw;
}
}
}
}
private static T TryJsonDeserialize<T>(string json)
{
try
{
return JsonConvert.DeserializeObject<T>(json)!;
}
catch (Exception exception)
{
var msg = $"Failed to deserialize JSON: '{json}' with exception: {exception}";
Assert.Fail(msg);
throw new InvalidOperationException(msg, exception);
}
}
private static HttpClient GetClient()
{
var client = new HttpClient();

View File

@ -1,6 +1,7 @@
using NUnit.Framework;
using Logging;
using NUnit.Framework;
namespace CodexDistTestCore
namespace DistTestCore.Logs
{
public interface ICodexNodeLog
{
@ -10,10 +11,12 @@ namespace CodexDistTestCore
public class CodexNodeLog : ICodexNodeLog
{
private readonly LogFile logFile;
private readonly OnlineCodexNode owner;
public CodexNodeLog(LogFile logFile)
public CodexNodeLog(LogFile logFile, OnlineCodexNode owner)
{
this.logFile = logFile;
this.owner = owner;
}
public void AssertLogContains(string expectedString)
@ -28,7 +31,7 @@ namespace CodexDistTestCore
line = streamReader.ReadLine();
}
Assert.Fail($"Unable to find string '{expectedString}' in CodexNode log file {logFile.FilenameWithoutPath}");
Assert.Fail($"{owner.GetName()} Unable to find string '{expectedString}' in CodexNode log file {logFile.FullFilename}");
}
}
}

View File

@ -0,0 +1,15 @@
using NUnit.Framework;
namespace DistTestCore.Logs
{
[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)]
public class DontDownloadLogsAndMetricsOnFailureAttribute : PropertyAttribute
{
public const string DontDownloadKey = "DontDownloadLogsAndMetrics";
public DontDownloadLogsAndMetricsOnFailureAttribute()
: base(DontDownloadKey)
{
}
}
}

View File

@ -0,0 +1,30 @@
using KubernetesWorkflow;
using Logging;
namespace DistTestCore.Logs
{
public class LogDownloadHandler : LogHandler, ILogHandler
{
private readonly OnlineCodexNode node;
private readonly LogFile log;
public LogDownloadHandler(OnlineCodexNode node, string description, LogFile log)
{
this.node = node;
this.log = log;
log.Write($"{description} -->> {log.FullFilename}");
log.WriteRaw(description);
}
public CodexNodeLog CreateCodexNodeLog()
{
return new CodexNodeLog(log, node);
}
protected override void ProcessLine(string line)
{
log.WriteRaw(line);
}
}
}

View File

@ -0,0 +1,16 @@
using KubernetesWorkflow;
namespace DistTestCore.Marketplace
{
public class CodexContractsContainerConfig
{
public CodexContractsContainerConfig(string bootstrapNodeIp, Port jsonRpcPort)
{
BootstrapNodeIp = bootstrapNodeIp;
JsonRpcPort = jsonRpcPort;
}
public string BootstrapNodeIp { get; }
public Port JsonRpcPort { get; }
}
}

View File

@ -0,0 +1,24 @@
using KubernetesWorkflow;
namespace DistTestCore.Marketplace
{
public class CodexContractsContainerRecipe : ContainerRecipeFactory
{
public const string DockerImage = "thatbenbierens/codex-contracts-deployment";
public const string MarketplaceAddressFilename = "/usr/app/deployments/codexdisttestnetwork/Marketplace.json";
protected override string Image => DockerImage;
protected override void Initialize(StartupConfig startupConfig)
{
var config = startupConfig.Get<CodexContractsContainerConfig>();
var ip = config.BootstrapNodeIp;
var port = config.JsonRpcPort.Number;
AddEnvVar("DISTTEST_NETWORK_URL", $"http://{ip}:{port}");
AddEnvVar("HARDHAT_NETWORK", "codexdisttestnetwork");
AddEnvVar("KEEP_ALIVE", "1");
}
}
}

View File

@ -0,0 +1,86 @@
using KubernetesWorkflow;
using Utils;
namespace DistTestCore.Marketplace
{
public class CodexContractsStarter : BaseStarter
{
private const string readyString = "Done! Sleeping indefinitely...";
public CodexContractsStarter(TestLifecycle lifecycle, WorkflowCreator workflowCreator)
: base(lifecycle, workflowCreator)
{
}
public MarketplaceInfo Start(GethBootstrapNodeInfo bootstrapNode)
{
LogStart("Deploying Codex contracts...");
var workflow = workflowCreator.CreateWorkflow();
var startupConfig = CreateStartupConfig(bootstrapNode.RunningContainers.Containers[0]);
var containers = workflow.Start(1, Location.Unspecified, new CodexContractsContainerRecipe(), startupConfig);
if (containers.Containers.Length != 1) throw new InvalidOperationException("Expected 1 Codex contracts container to be created. Test infra failure.");
var container = containers.Containers[0];
WaitUntil(() =>
{
var logHandler = new ContractsReadyLogHandler(readyString);
workflow.DownloadContainerLog(container, logHandler);
return logHandler.Found;
});
var extractor = new ContainerInfoExtractor(workflow, container);
var marketplaceAddress = extractor.ExtractMarketplaceAddress();
var interaction = bootstrapNode.StartInteraction(lifecycle.Log);
var tokenAddress = interaction.GetTokenAddress(marketplaceAddress);
LogEnd("Contracts deployed.");
return new MarketplaceInfo(marketplaceAddress, tokenAddress);
}
private void WaitUntil(Func<bool> predicate)
{
Time.WaitUntil(predicate, TimeSpan.FromMinutes(2), TimeSpan.FromSeconds(1));
}
private StartupConfig CreateStartupConfig(RunningContainer bootstrapContainer)
{
var startupConfig = new StartupConfig();
var contractsConfig = new CodexContractsContainerConfig(bootstrapContainer.Pod.Ip, bootstrapContainer.Recipe.GetPortByTag(GethContainerRecipe.HttpPortTag));
startupConfig.Add(contractsConfig);
return startupConfig;
}
}
public class MarketplaceInfo
{
public MarketplaceInfo(string address, string tokenAddress)
{
Address = address;
TokenAddress = tokenAddress;
}
public string Address { get; }
public string TokenAddress { get; }
}
public class ContractsReadyLogHandler : LogHandler
{
private readonly string targetString;
public ContractsReadyLogHandler(string targetString)
{
this.targetString = targetString;
}
public bool Found { get; private set; }
protected override void ProcessLine(string line)
{
if (line.Contains(targetString)) Found = true;
}
}
}

View File

@ -0,0 +1,131 @@
using KubernetesWorkflow;
using Newtonsoft.Json;
using System.Text;
namespace DistTestCore.Marketplace
{
public class ContainerInfoExtractor
{
private readonly StartupWorkflow workflow;
private readonly RunningContainer container;
public ContainerInfoExtractor(StartupWorkflow workflow, RunningContainer container)
{
this.workflow = workflow;
this.container = container;
}
public string ExtractAccount()
{
var account = Retry(FetchAccount);
if (string.IsNullOrEmpty(account)) throw new InvalidOperationException("Unable to fetch account for geth node. Test infra failure.");
return account;
}
public string ExtractPubKey()
{
var pubKey = Retry(FetchPubKey);
if (string.IsNullOrEmpty(pubKey)) throw new InvalidOperationException("Unable to fetch enode from geth node. Test infra failure.");
return pubKey;
}
public string ExtractBootstrapPrivateKey()
{
var privKey = Retry(FetchBootstrapPrivateKey);
if (string.IsNullOrEmpty(privKey)) throw new InvalidOperationException("Unable to fetch private key from geth node. Test infra failure.");
return privKey;
}
public string ExtractMarketplaceAddress()
{
var marketplaceAddress = Retry(FetchMarketplaceAddress);
if (string.IsNullOrEmpty(marketplaceAddress)) throw new InvalidOperationException("Unable to fetch marketplace account from codex-contracts node. Test infra failure.");
return marketplaceAddress;
}
private string Retry(Func<string> fetch)
{
var result = Catch(fetch);
if (string.IsNullOrEmpty(result))
{
Thread.Sleep(TimeSpan.FromSeconds(5));
result = fetch();
}
return result;
}
private string Catch(Func<string> fetch)
{
try
{
return fetch();
}
catch
{
return string.Empty;
}
}
private string FetchAccount()
{
return workflow.ExecuteCommand(container, "cat", GethContainerRecipe.AccountFilename);
}
private string FetchBootstrapPrivateKey()
{
return workflow.ExecuteCommand(container, "cat", GethContainerRecipe.BootstrapPrivateKeyFilename);
}
private string FetchMarketplaceAddress()
{
var json = workflow.ExecuteCommand(container, "cat", CodexContractsContainerRecipe.MarketplaceAddressFilename);
var marketplace = JsonConvert.DeserializeObject<MarketplaceJson>(json);
return marketplace!.address;
}
private string FetchPubKey()
{
var enodeFinder = new PubKeyFinder();
workflow.DownloadContainerLog(container, enodeFinder);
return enodeFinder.GetPubKey();
}
}
public class PubKeyFinder : LogHandler, ILogHandler
{
private const string openTag = "self=\"enode://";
private string pubKey = string.Empty;
public string GetPubKey()
{
return pubKey;
}
protected override void ProcessLine(string line)
{
if (line.Contains(openTag))
{
ExtractPubKey(line);
}
}
private void ExtractPubKey(string line)
{
var openIndex = line.IndexOf(openTag) + openTag.Length;
var closeIndex = line.IndexOf("@");
pubKey = line.Substring(
startIndex: openIndex,
length: closeIndex - openIndex);
}
}
public class MarketplaceJson
{
public string address { get; set; } = string.Empty;
}
}

View File

@ -0,0 +1,35 @@
using KubernetesWorkflow;
using Logging;
using NethereumWorkflow;
namespace DistTestCore.Marketplace
{
public class GethBootstrapNodeInfo
{
public GethBootstrapNodeInfo(RunningContainers runningContainers, string account, string pubKey, string privateKey, Port discoveryPort)
{
RunningContainers = runningContainers;
Account = account;
PubKey = pubKey;
PrivateKey = privateKey;
DiscoveryPort = discoveryPort;
}
public RunningContainers RunningContainers { get; }
public string Account { get; }
public string PubKey { get; }
public string PrivateKey { get; }
public Port DiscoveryPort { get; }
public NethereumInteraction StartInteraction(TestLog log)
{
var ip = RunningContainers.RunningPod.Cluster.IP;
var port = RunningContainers.Containers[0].ServicePorts[0].Number;
var account = Account;
var privateKey = PrivateKey;
var creator = new NethereumInteractionCreator(log, ip, port, account, privateKey);
return creator.CreateWorkflow();
}
}
}

View File

@ -0,0 +1,40 @@
using KubernetesWorkflow;
namespace DistTestCore.Marketplace
{
public class GethBootstrapNodeStarter : BaseStarter
{
public GethBootstrapNodeStarter(TestLifecycle lifecycle, WorkflowCreator workflowCreator)
: base(lifecycle, workflowCreator)
{
}
public GethBootstrapNodeInfo StartGethBootstrapNode()
{
LogStart("Starting Geth bootstrap node...");
var startupConfig = CreateBootstrapStartupConfig();
var workflow = workflowCreator.CreateWorkflow();
var containers = workflow.Start(1, Location.Unspecified, new GethContainerRecipe(), startupConfig);
if (containers.Containers.Length != 1) throw new InvalidOperationException("Expected 1 Geth bootstrap node to be created. Test infra failure.");
var bootstrapContainer = containers.Containers[0];
var extractor = new ContainerInfoExtractor(workflow, bootstrapContainer);
var account = extractor.ExtractAccount();
var pubKey = extractor.ExtractPubKey();
var privateKey = extractor.ExtractBootstrapPrivateKey();
var discoveryPort = bootstrapContainer.Recipe.GetPortByTag(GethContainerRecipe.DiscoveryPortTag);
LogEnd($"Geth bootstrap node started with account '{account}'");
return new GethBootstrapNodeInfo(containers, account, pubKey, privateKey, discoveryPort);
}
private StartupConfig CreateBootstrapStartupConfig()
{
var config = new StartupConfig();
config.Add(new GethStartupConfig(true, null!));
return config;
}
}
}

View File

@ -0,0 +1,16 @@
using KubernetesWorkflow;
namespace DistTestCore.Marketplace
{
public class GethCompanionNodeInfo
{
public GethCompanionNodeInfo(RunningContainer runningContainer, string account)
{
RunningContainer = runningContainer;
Account = account;
}
public RunningContainer RunningContainer { get; }
public string Account { get; }
}
}

View File

@ -0,0 +1,43 @@
using KubernetesWorkflow;
namespace DistTestCore.Marketplace
{
public class GethCompanionNodeStarter : BaseStarter
{
public GethCompanionNodeStarter(TestLifecycle lifecycle, WorkflowCreator workflowCreator)
: base(lifecycle, workflowCreator)
{
}
public GethCompanionNodeInfo[] StartCompanionNodesFor(CodexSetup codexSetup, GethBootstrapNodeInfo bootstrapNode)
{
LogStart($"Initializing companions for {codexSetup.NumberOfNodes} Codex nodes.");
var startupConfig = CreateCompanionNodeStartupConfig(bootstrapNode);
var workflow = workflowCreator.CreateWorkflow();
var containers = workflow.Start(codexSetup.NumberOfNodes, Location.Unspecified, new GethContainerRecipe(), startupConfig);
if (containers.Containers.Length != codexSetup.NumberOfNodes) throw new InvalidOperationException("Expected a Geth companion node to be created for each Codex node. Test infra failure.");
var result = containers.Containers.Select(c => CreateCompanionInfo(workflow, c)).ToArray();
LogEnd($"Initialized {codexSetup.NumberOfNodes} companion nodes. Their accounts: [{string.Join(",", result.Select(c => c.Account))}]");
return result;
}
private GethCompanionNodeInfo CreateCompanionInfo(StartupWorkflow workflow, RunningContainer container)
{
var extractor = new ContainerInfoExtractor(workflow, container);
var account = extractor.ExtractAccount();
return new GethCompanionNodeInfo(container, account);
}
private StartupConfig CreateCompanionNodeStartupConfig(GethBootstrapNodeInfo bootstrapNode)
{
var config = new StartupConfig();
config.Add(new GethStartupConfig(false, bootstrapNode));
return config;
}
}
}

View File

@ -0,0 +1,49 @@
using KubernetesWorkflow;
namespace DistTestCore.Marketplace
{
public class GethContainerRecipe : ContainerRecipeFactory
{
public const string DockerImage = "thatbenbierens/geth-confenv:latest";
public const string HttpPortTag = "http_port";
public const string WsPortTag = "ws_port";
public const string DiscoveryPortTag = "disc_port";
public const string AccountFilename = "account_string.txt";
public const string BootstrapPrivateKeyFilename = "bootstrap_private.key";
protected override string Image => DockerImage;
protected override void Initialize(StartupConfig startupConfig)
{
var config = startupConfig.Get<GethStartupConfig>();
var args = CreateArgs(config);
AddEnvVar("GETH_ARGS", args);
}
private string CreateArgs(GethStartupConfig config)
{
var discovery = AddInternalPort(tag: DiscoveryPortTag);
if (config.IsBootstrapNode)
{
AddEnvVar("IS_BOOTSTRAP", "1");
var exposedPort = AddExposedPort(tag: HttpPortTag);
return $"--http.port {exposedPort.Number} --discovery.port {discovery.Number} --nodiscover";
}
var port = AddInternalPort();
var authRpc = AddInternalPort();
var httpPort = AddInternalPort(tag: HttpPortTag);
var wsPort = AddInternalPort(tag: WsPortTag);
var bootPubKey = config.BootstrapNode.PubKey;
var bootIp = config.BootstrapNode.RunningContainers.Containers[0].Pod.Ip;
var bootPort = config.BootstrapNode.DiscoveryPort.Number;
var bootstrapArg = $"--bootnodes enode://{bootPubKey}@{bootIp}:{bootPort}";
return $"--port {port.Number} --discovery.port {discovery.Number} --authrpc.port {authRpc.Number} --http.port {httpPort.Number} --ws --ws.addr 0.0.0.0 --ws.port {wsPort.Number} --nodiscover {bootstrapArg}";
}
}
}

View File

@ -0,0 +1,16 @@
namespace DistTestCore.Marketplace
{
public class GethStartResult
{
public GethStartResult(IMarketplaceAccessFactory marketplaceAccessFactory, MarketplaceNetwork marketplaceNetwork, GethCompanionNodeInfo[] companionNodes)
{
MarketplaceAccessFactory = marketplaceAccessFactory;
MarketplaceNetwork = marketplaceNetwork;
CompanionNodes = companionNodes;
}
public IMarketplaceAccessFactory MarketplaceAccessFactory { get; }
public MarketplaceNetwork MarketplaceNetwork { get; }
public GethCompanionNodeInfo[] CompanionNodes { get; }
}
}

View File

@ -0,0 +1,14 @@
namespace DistTestCore.Marketplace
{
public class GethStartupConfig
{
public GethStartupConfig(bool isBootstrapNode, GethBootstrapNodeInfo bootstrapNode)
{
IsBootstrapNode = isBootstrapNode;
BootstrapNode = bootstrapNode;
}
public bool IsBootstrapNode { get; }
public GethBootstrapNodeInfo BootstrapNode { get; }
}
}

View File

@ -0,0 +1,148 @@
using DistTestCore.Codex;
using Logging;
using NUnit.Framework;
using NUnit.Framework.Constraints;
using System.Numerics;
using Utils;
namespace DistTestCore.Marketplace
{
public interface IMarketplaceAccess
{
string MakeStorageAvailable(ByteSize size, TestToken minPricePerBytePerSecond, TestToken maxCollateral, TimeSpan maxDuration);
string RequestStorage(ContentId contentId, TestToken pricePerBytePerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration);
void AssertThatBalance(IResolveConstraint constraint, string message = "");
TestToken GetBalance();
}
public class MarketplaceAccess : IMarketplaceAccess
{
private readonly TestLog log;
private readonly MarketplaceNetwork marketplaceNetwork;
private readonly GethCompanionNodeInfo companionNode;
private readonly CodexAccess codexAccess;
public MarketplaceAccess(TestLog log, MarketplaceNetwork marketplaceNetwork, GethCompanionNodeInfo companionNode, CodexAccess codexAccess)
{
this.log = log;
this.marketplaceNetwork = marketplaceNetwork;
this.companionNode = companionNode;
this.codexAccess = codexAccess;
}
public string RequestStorage(ContentId contentId, TestToken pricePerBytePerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration)
{
var request = new CodexSalesRequestStorageRequest
{
duration = ToHexBigInt(duration.TotalSeconds),
proofProbability = ToHexBigInt(proofProbability),
reward = ToHexBigInt(pricePerBytePerSecond),
collateral = ToHexBigInt(requiredCollateral),
expiry = null,
nodes = minRequiredNumberOfNodes,
tolerance = null,
};
Log($"Requesting storage for: {contentId.Id}... (" +
$"pricePerBytePerSecond: {pricePerBytePerSecond}, " +
$"requiredCollateral: {requiredCollateral}, " +
$"minRequiredNumberOfNodes: {minRequiredNumberOfNodes}, " +
$"proofProbability: {proofProbability}, " +
$"duration: {Time.FormatDuration(duration)})");
var response = codexAccess.RequestStorage(request, contentId.Id);
Log($"Storage requested successfully. PurchaseId: {response.purchaseId}");
return response.purchaseId;
}
public string MakeStorageAvailable(ByteSize size, TestToken minPricePerBytePerSecond, TestToken maxCollateral, TimeSpan maxDuration)
{
var request = new CodexSalesAvailabilityRequest
{
size = ToHexBigInt(size.SizeInBytes),
duration = ToHexBigInt(maxDuration.TotalSeconds),
maxCollateral = ToHexBigInt(maxCollateral),
minPrice = ToHexBigInt(minPricePerBytePerSecond)
};
Log($"Making storage available... (" +
$"size: {size}, " +
$"minPricePerBytePerSecond: {minPricePerBytePerSecond}, " +
$"maxCollateral: {maxCollateral}, " +
$"maxDuration: {Time.FormatDuration(maxDuration)})");
var response = codexAccess.SalesAvailability(request);
Log($"Storage successfully made available. Id: {response.id}");
return response.id;
}
private string ToHexBigInt(double d)
{
return "0x" + string.Format("{0:X}", Convert.ToInt64(d));
}
public string ToHexBigInt(TestToken t)
{
var bigInt = new BigInteger(t.Amount);
return "0x" + bigInt.ToString("X");
}
public void AssertThatBalance(IResolveConstraint constraint, string message = "")
{
Assert.That(GetBalance(), constraint, message);
}
public TestToken GetBalance()
{
var interaction = marketplaceNetwork.StartInteraction(log);
var account = companionNode.Account;
var amount = interaction.GetBalance(marketplaceNetwork.Marketplace.TokenAddress, account);
var balance = new TestToken(amount);
Log($"Balance of {account} is {balance}.");
return balance;
}
private void Log(string msg)
{
log.Log($"{codexAccess.Container.GetName()} {msg}");
}
}
public class MarketplaceUnavailable : IMarketplaceAccess
{
public string RequestStorage(ContentId contentId, TestToken pricePerBytePerSecond, TestToken requiredCollateral, uint minRequiredNumberOfNodes, int proofProbability, TimeSpan duration)
{
Unavailable();
return string.Empty;
}
public string MakeStorageAvailable(ByteSize size, TestToken minPricePerBytePerSecond, TestToken maxCollateral, TimeSpan duration)
{
Unavailable();
return string.Empty;
}
public void AssertThatBalance(IResolveConstraint constraint, string message = "")
{
Unavailable();
}
public TestToken GetBalance()
{
Unavailable();
return new TestToken(0);
}
private void Unavailable()
{
Assert.Fail("Incorrect test setup: Marketplace was not enabled for this group of Codex nodes. Add 'EnableMarketplace(...)' after 'SetupCodexNodes()' to enable it.");
throw new InvalidOperationException();
}
}
}

View File

@ -0,0 +1,42 @@
using DistTestCore.Codex;
using Logging;
namespace DistTestCore.Marketplace
{
public interface IMarketplaceAccessFactory
{
IMarketplaceAccess CreateMarketplaceAccess(CodexAccess access);
}
public class MarketplaceUnavailableAccessFactory : IMarketplaceAccessFactory
{
public IMarketplaceAccess CreateMarketplaceAccess(CodexAccess access)
{
return new MarketplaceUnavailable();
}
}
public class GethMarketplaceAccessFactory : IMarketplaceAccessFactory
{
private readonly TestLog log;
private readonly MarketplaceNetwork marketplaceNetwork;
public GethMarketplaceAccessFactory(TestLog log, MarketplaceNetwork marketplaceNetwork)
{
this.log = log;
this.marketplaceNetwork = marketplaceNetwork;
}
public IMarketplaceAccess CreateMarketplaceAccess(CodexAccess access)
{
var companionNode = GetGethCompanionNode(access);
return new MarketplaceAccess(log, marketplaceNetwork, companionNode, access);
}
private GethCompanionNodeInfo GetGethCompanionNode(CodexAccess access)
{
var node = access.Container.Recipe.Additionals.Single(a => a is GethCompanionNodeInfo);
return (GethCompanionNodeInfo)node;
}
}
}

View File

@ -0,0 +1,14 @@
namespace DistTestCore.Marketplace
{
public class MarketplaceInitialConfig
{
public MarketplaceInitialConfig(Ether initialEth, TestToken initialTestTokens)
{
InitialEth = initialEth;
InitialTestTokens = initialTestTokens;
}
public Ether InitialEth { get; }
public TestToken InitialTestTokens { get; }
}
}

View File

@ -0,0 +1,22 @@
using Logging;
using NethereumWorkflow;
namespace DistTestCore.Marketplace
{
public class MarketplaceNetwork
{
public MarketplaceNetwork(GethBootstrapNodeInfo bootstrap, MarketplaceInfo marketplace)
{
Bootstrap = bootstrap;
Marketplace = marketplace;
}
public GethBootstrapNodeInfo Bootstrap { get; }
public MarketplaceInfo Marketplace { get; }
public NethereumInteraction StartInteraction(TestLog log)
{
return Bootstrap.StartInteraction(log);
}
}
}

View File

@ -1,13 +1,70 @@
using NUnit.Framework;
using KubernetesWorkflow;
using Logging;
using NUnit.Framework;
using NUnit.Framework.Constraints;
using Utils;
namespace CodexDistTestCore
namespace DistTestCore.Metrics
{
public interface IMetricsAccess
{
void AssertThat(string metricName, IResolveConstraint constraint, string message = "");
}
public class MetricsAccess : IMetricsAccess
{
private readonly TestLog log;
private readonly MetricsQuery query;
private readonly RunningContainer node;
public MetricsAccess(TestLog log, MetricsQuery query, RunningContainer node)
{
this.log = log;
this.query = query;
this.node = node;
}
public void AssertThat(string metricName, IResolveConstraint constraint, string message = "")
{
var metricSet = GetMetricWithTimeout(metricName);
var metricValue = metricSet.Values[0].Value;
log.Log($"{node.GetName()} metric '{metricName}' = {metricValue}");
Assert.That(metricValue, constraint, message);
}
public Metrics? GetAllMetrics()
{
return query.GetAllMetricsForNode(node);
}
private MetricsSet GetMetricWithTimeout(string metricName)
{
var start = DateTime.UtcNow;
while (true)
{
var mostRecent = GetMostRecent(metricName);
if (mostRecent != null) return mostRecent;
if (DateTime.UtcNow - start > Timing.WaitForMetricTimeout())
{
Assert.Fail($"Timeout: Unable to get metric '{metricName}'.");
throw new TimeoutException();
}
Time.Sleep(TimeSpan.FromSeconds(2));
}
}
private MetricsSet? GetMostRecent(string metricName)
{
var result = query.GetMostRecent(metricName, node);
if (result == null) return null;
return result.Sets.LastOrDefault();
}
}
public class MetricsUnavailable : IMetricsAccess
{
public void AssertThat(string metricName, IResolveConstraint constraint, string message = "")
@ -16,48 +73,4 @@ namespace CodexDistTestCore
throw new InvalidOperationException();
}
}
public class MetricsAccess : IMetricsAccess
{
private readonly MetricsQuery query;
private readonly OnlineCodexNode node;
public MetricsAccess(MetricsQuery query, OnlineCodexNode node)
{
this.query = query;
this.node = node;
}
public void AssertThat(string metricName, IResolveConstraint constraint, string message = "")
{
var metricSet = GetMetricWithTimeout(metricName, node);
var metricValue = metricSet.Values[0].Value;
Assert.That(metricValue, constraint, message);
}
private MetricsSet GetMetricWithTimeout(string metricName, OnlineCodexNode node)
{
var start = DateTime.UtcNow;
while (true)
{
var mostRecent = GetMostRecent(metricName, node);
if (mostRecent != null) return mostRecent;
if (DateTime.UtcNow - start > Timing.WaitForMetricTimeout())
{
Assert.Fail($"Timeout: Unable to get metric '{metricName}'.");
throw new TimeoutException();
}
Utils.Sleep(TimeSpan.FromSeconds(2));
}
}
private MetricsSet? GetMostRecent(string metricName, OnlineCodexNode node)
{
var result = query.GetMostRecent(metricName, node);
if (result == null) return null;
return result.Sets.LastOrDefault();
}
}
}

View File

@ -0,0 +1,35 @@
using KubernetesWorkflow;
namespace DistTestCore.Metrics
{
public interface IMetricsAccessFactory
{
IMetricsAccess CreateMetricsAccess(RunningContainer codexContainer);
}
public class MetricsUnavailableAccessFactory : IMetricsAccessFactory
{
public IMetricsAccess CreateMetricsAccess(RunningContainer codexContainer)
{
return new MetricsUnavailable();
}
}
public class CodexNodeMetricsAccessFactory : IMetricsAccessFactory
{
private readonly TestLifecycle lifecycle;
private readonly RunningContainers prometheusContainer;
public CodexNodeMetricsAccessFactory(TestLifecycle lifecycle, RunningContainers prometheusContainer)
{
this.lifecycle = lifecycle;
this.prometheusContainer = prometheusContainer;
}
public IMetricsAccess CreateMetricsAccess(RunningContainer codexContainer)
{
var query = new MetricsQuery(prometheusContainer);
return new MetricsAccess(lifecycle.Log, query, codexContainer);
}
}
}

View File

@ -1,49 +1,32 @@
using System.Globalization;
using Logging;
using System.Globalization;
namespace CodexDistTestCore
namespace DistTestCore.Metrics
{
public class MetricsDownloader
{
private readonly TestLog log;
private readonly Dictionary<MetricsQuery, OnlineCodexNode[]> activePrometheuses;
public MetricsDownloader(TestLog log, Dictionary<MetricsQuery, OnlineCodexNode[]> activePrometheuses)
public MetricsDownloader(TestLog log)
{
this.log = log;
this.activePrometheuses = activePrometheuses;
}
public void DownloadAllMetrics()
public void DownloadAllMetricsForNode(string nodeName, MetricsAccess access)
{
foreach (var pair in activePrometheuses)
{
DownloadAllMetrics(pair.Key, pair.Value);
}
}
private void DownloadAllMetrics(MetricsQuery query, OnlineCodexNode[] nodes)
{
foreach (var node in nodes)
{
DownloadAllMetricsForNode(query, node);
}
}
private void DownloadAllMetricsForNode(MetricsQuery query, OnlineCodexNode node)
{
var metrics = query.GetAllMetricsForNode(node);
var metrics = access.GetAllMetrics();
if (metrics == null || metrics.Sets.Length == 0 || metrics.Sets.All(s => s.Values.Length == 0)) return;
var headers = new[] { "timestamp" }.Concat(metrics.Sets.Select(s => s.Name)).ToArray();
var map = CreateValueMap(metrics);
WriteToFile(node.GetName(), headers, map);
WriteToFile(nodeName, headers, map);
}
private void WriteToFile(string nodeName, string[] headers, Dictionary<DateTime, List<string>> map)
{
var file = log.CreateSubfile("csv");
log.Log($"Downloading metrics for {nodeName} to file {file.FilenameWithoutPath}");
log.Log($"Downloading metrics for {nodeName} to file {file.FullFilename}");
file.WriteRaw(string.Join(",", headers));

View File

@ -1,22 +1,26 @@
using CodexDistTestCore.Config;
using DistTestCore.Codex;
using KubernetesWorkflow;
using System.Globalization;
namespace CodexDistTestCore
namespace DistTestCore.Metrics
{
public class MetricsQuery
{
private readonly K8sCluster k8sCluster = new K8sCluster();
private readonly Http http;
public MetricsQuery(PrometheusInfo prometheusInfo)
public MetricsQuery(RunningContainers runningContainers)
{
RunningContainers = runningContainers;
http = new Http(
k8sCluster.GetIp(),
prometheusInfo.ServicePort,
runningContainers.RunningPod.Cluster.IP,
runningContainers.Containers[0].ServicePorts[0].Number,
"api/v1");
}
public Metrics? GetMostRecent(string metricName, OnlineCodexNode node)
public RunningContainers RunningContainers { get; }
public Metrics? GetMostRecent(string metricName, RunningContainer node)
{
var response = GetLastOverTime(metricName, GetInstanceStringForNode(node));
if (response == null) return null;
@ -41,7 +45,7 @@ namespace CodexDistTestCore
return MapResponseToMetrics(response);
}
public Metrics? GetAllMetricsForNode(OnlineCodexNode node)
public Metrics? GetAllMetricsForNode(RunningContainer node)
{
var response = http.HttpGetJson<PrometheusQueryResponse>($"query?query={GetInstanceStringForNode(node)}{GetQueryTimeRange()}");
if (response.status != "success") return null;
@ -107,16 +111,17 @@ namespace CodexDistTestCore
{
Timestamp = ToTimestamp(value[0]),
Value = ToValue(value[1])
};
};
}
private string GetInstanceNameForNode(OnlineCodexNode node)
private string GetInstanceNameForNode(RunningContainer node)
{
var pod = node.Group.PodInfo!;
return $"{pod.Ip}:{node.Container.MetricsPort}";
var ip = node.Pod.Ip;
var port = node.Recipe.GetPortByTag(CodexContainerRecipe.MetricsPortTag).Number;
return $"{ip}:{port}";
}
private string GetInstanceStringForNode(OnlineCodexNode node)
private string GetInstanceStringForNode(RunningContainer node)
{
return "{instance=\"" + GetInstanceNameForNode(node) + "\"}";
}

View File

@ -0,0 +1,19 @@
using KubernetesWorkflow;
namespace DistTestCore.Metrics
{
public class PrometheusContainerRecipe : ContainerRecipeFactory
{
public const string DockerImage = "thatbenbierens/prometheus-envconf:latest";
protected override string Image => DockerImage;
protected override void Initialize(StartupConfig startupConfig)
{
var config = startupConfig.Get<PrometheusStartupConfig>();
AddExposedPortAndVar("PROM_PORT");
AddEnvVar("PROM_CONFIG", config.PrometheusConfigBase64);
}
}
}

View File

@ -0,0 +1,12 @@
namespace DistTestCore.Metrics
{
public class PrometheusStartupConfig
{
public PrometheusStartupConfig(string prometheusConfigBase64)
{
PrometheusConfigBase64 = prometheusConfigBase64;
}
public string PrometheusConfigBase64 { get; }
}
}

View File

@ -1,46 +1,51 @@
using CodexDistTestCore.Config;
using DistTestCore.Codex;
using DistTestCore.Logs;
using DistTestCore.Marketplace;
using DistTestCore.Metrics;
using NUnit.Framework;
namespace CodexDistTestCore
namespace DistTestCore
{
public interface IOnlineCodexNode
{
string GetName();
CodexDebugResponse GetDebugInfo();
ContentId UploadFile(TestFile file);
TestFile? DownloadContent(ContentId contentId);
void ConnectToPeer(IOnlineCodexNode node);
ICodexNodeLog DownloadLog();
IMetricsAccess Metrics { get; }
IMarketplaceAccess Marketplace { get; }
}
public class OnlineCodexNode : IOnlineCodexNode
{
private const string SuccessfullyConnectedMessage = "Successfully connected to peer";
private const string UploadFailedMessage = "Unable to store block";
private readonly TestLifecycle lifecycle;
private readonly K8sCluster k8sCluster = new K8sCluster();
private readonly TestLog log;
private readonly IFileManager fileManager;
public OnlineCodexNode(TestLog log, IFileManager fileManager, CodexNodeContainer container)
public OnlineCodexNode(TestLifecycle lifecycle, CodexAccess codexAccess, CodexNodeGroup group, IMetricsAccess metricsAccess, IMarketplaceAccess marketplaceAccess)
{
this.log = log;
this.fileManager = fileManager;
Container = container;
this.lifecycle = lifecycle;
CodexAccess = codexAccess;
Group = group;
Metrics = metricsAccess;
Marketplace = marketplaceAccess;
}
public CodexNodeContainer Container { get; }
public CodexNodeGroup Group { get; internal set; } = null!;
public IMetricsAccess Metrics { get; set; } = new MetricsUnavailable();
public CodexAccess CodexAccess { get; }
public CodexNodeGroup Group { get; }
public IMetricsAccess Metrics { get; }
public IMarketplaceAccess Marketplace { get; }
public string GetName()
{
return $"<{Container.Name}>";
return CodexAccess.Container.GetName();
}
public CodexDebugResponse GetDebugInfo()
{
var response = Http().HttpGetJson<CodexDebugResponse>("debug/info");
var response = CodexAccess.GetDebugInfo();
Log($"Got DebugInfo with id: '{response.id}'.");
return response;
}
@ -49,7 +54,7 @@ namespace CodexDistTestCore
{
Log($"Uploading file of size {file.GetFileSize()}...");
using var fileStream = File.OpenRead(file.Filename);
var response = Http().HttpPostStream("upload", fileStream);
var response = CodexAccess.UploadFile(fileStream);
if (response.StartsWith(UploadFailedMessage))
{
Assert.Fail("Node failed to store block.");
@ -61,7 +66,7 @@ namespace CodexDistTestCore
public TestFile? DownloadContent(ContentId contentId)
{
Log($"Downloading for contentId: '{contentId.Id}'...");
var file = fileManager.CreateEmptyTestFile();
var file = lifecycle.FileManager.CreateEmptyTestFile();
DownloadToFile(contentId.Id, file);
Log($"Downloaded file of size {file.GetFileSize()} to '{file.Filename}'.");
return file;
@ -73,10 +78,7 @@ namespace CodexDistTestCore
Log($"Connecting to peer {peer.GetName()}...");
var peerInfo = node.GetDebugInfo();
var peerId = peerInfo.id;
var peerMultiAddress = GetPeerMultiAddress(peer, peerInfo);
var response = Http().HttpGetString($"connect/{peerId}?addrs={peerMultiAddress}");
var response = CodexAccess.ConnectToPeer(peerInfo.id, GetPeerMultiAddress(peer, peerInfo));
Assert.That(response, Is.EqualTo(SuccessfullyConnectedMessage), "Unable to connect codex nodes.");
Log($"Successfully connected to peer {peer.GetName()}.");
@ -84,12 +86,12 @@ namespace CodexDistTestCore
public ICodexNodeLog DownloadLog()
{
return Group.DownloadLog(this);
return lifecycle.DownloadLog(this);
}
public string Describe()
{
return $"{Group.Describe()} contains {GetName()}";
return $"({GetName()} in {Group.Describe()})";
}
private string GetPeerMultiAddress(OnlineCodexNode peer, CodexDebugResponse peerInfo)
@ -104,24 +106,19 @@ namespace CodexDistTestCore
// The peer we want to connect is in a different pod.
// We must replace the default IP with the pod IP in the multiAddress.
return multiAddress.Replace("0.0.0.0", peer.Group.PodInfo!.Ip);
return multiAddress.Replace("0.0.0.0", peer.Group.Containers.RunningPod.Ip);
}
private void DownloadToFile(string contentId, TestFile file)
{
using var fileStream = File.OpenWrite(file.Filename);
using var downloadStream = Http().HttpGetStream("download/" + contentId);
using var downloadStream = CodexAccess.DownloadFile(contentId);
downloadStream.CopyTo(fileStream);
}
private Http Http()
{
return new Http(ip: k8sCluster.GetIp(), port: Container.ServicePort, baseUrl: "/api/codex/v1");
}
private void Log(string msg)
{
log.Log($"{GetName()}: {msg}");
lifecycle.Log.Log($"{GetName()}: {msg}");
}
}

View File

@ -0,0 +1,56 @@
using DistTestCore.Codex;
using DistTestCore.Metrics;
using KubernetesWorkflow;
using System.Text;
namespace DistTestCore
{
public class PrometheusStarter : BaseStarter
{
public PrometheusStarter(TestLifecycle lifecycle, WorkflowCreator workflowCreator)
: base(lifecycle, workflowCreator)
{
}
public IMetricsAccessFactory CollectMetricsFor(CodexSetup codexSetup, RunningContainers containers)
{
if (!codexSetup.MetricsEnabled) return new MetricsUnavailableAccessFactory();
LogStart($"Starting metrics server for {containers.Describe()}");
var startupConfig = new StartupConfig();
startupConfig.Add(new PrometheusStartupConfig(GeneratePrometheusConfig(containers.Containers)));
var workflow = workflowCreator.CreateWorkflow();
var runningContainers = workflow.Start(1, Location.Unspecified, new PrometheusContainerRecipe(), startupConfig);
if (runningContainers.Containers.Length != 1) throw new InvalidOperationException("Expected only 1 Prometheus container to be created.");
LogEnd("Metrics server started.");
return new CodexNodeMetricsAccessFactory(lifecycle, runningContainers);
}
private string GeneratePrometheusConfig(RunningContainer[] nodes)
{
var config = "";
config += "global:\n";
config += " scrape_interval: 30s\n";
config += " scrape_timeout: 10s\n";
config += "\n";
config += "scrape_configs:\n";
config += " - job_name: services\n";
config += " metrics_path: /metrics\n";
config += " static_configs:\n";
config += " - targets:\n";
foreach (var node in nodes)
{
var ip = node.Pod.Ip;
var port = node.Recipe.GetPortByTag(CodexContainerRecipe.MetricsPortTag).Number;
config += $" - '{ip}:{port}'\n";
}
var bytes = Encoding.ASCII.GetBytes(config);
return Convert.ToBase64String(bytes);
}
}
}

36
DistTestCore/Stopwatch.cs Normal file
View File

@ -0,0 +1,36 @@
using Logging;
using Utils;
namespace DistTestCore
{
public class Stopwatch
{
private readonly DateTime start = DateTime.UtcNow;
private readonly BaseLog log;
private readonly string name;
public Stopwatch(BaseLog log, string name)
{
this.log = log;
this.name = name;
}
public static void Measure(BaseLog log, string name, Action action)
{
var sw = Begin(log, name);
action();
sw.End();
}
public static Stopwatch Begin(BaseLog log, string name)
{
return new Stopwatch(log, name);
}
public void End(string msg = "")
{
var duration = DateTime.UtcNow - start;
log.Log($"{name} {msg} ({Time.FormatDuration(duration)})");
}
}
}

View File

@ -0,0 +1,46 @@
using DistTestCore.Logs;
using KubernetesWorkflow;
using Logging;
namespace DistTestCore
{
public class TestLifecycle
{
private readonly WorkflowCreator workflowCreator;
public TestLifecycle(TestLog log, Configuration configuration)
{
Log = log;
workflowCreator = new WorkflowCreator(configuration.GetK8sConfiguration());
FileManager = new FileManager(Log, configuration);
CodexStarter = new CodexStarter(this, workflowCreator);
PrometheusStarter = new PrometheusStarter(this, workflowCreator);
GethStarter = new GethStarter(this, workflowCreator);
}
public TestLog Log { get; }
public FileManager FileManager { get; }
public CodexStarter CodexStarter { get; }
public PrometheusStarter PrometheusStarter { get; }
public GethStarter GethStarter { get; }
public void DeleteAllResources()
{
CodexStarter.DeleteAllResources();
FileManager.DeleteAllTestFiles();
}
public ICodexNodeLog DownloadLog(OnlineCodexNode node)
{
var subFile = Log.CreateSubfile();
var description = node.Describe();
var handler = new LogDownloadHandler(node, description, subFile);
Log.Log($"Downloading logs for {description} to file '{subFile.FullFilename}'");
CodexStarter.DownloadLog(node.CodexAccess.Container, handler);
return new CodexNodeLog(subFile, node);
}
}
}

View File

@ -1,6 +1,7 @@
using NUnit.Framework;
using Utils;
namespace CodexDistTestCore
namespace DistTestCore
{
[AttributeUsage(AttributeTargets.Method, AllowMultiple = false)]
public class UseLongTimeoutsAttribute : PropertyAttribute
@ -27,12 +28,12 @@ namespace CodexDistTestCore
public static void HttpCallRetryDelay()
{
Utils.Sleep(GetTimes().HttpCallRetryDelay());
Time.Sleep(GetTimes().HttpCallRetryDelay());
}
public static void WaitForK8sServiceDelay()
public static TimeSpan K8sServiceDelay()
{
Utils.Sleep(GetTimes().WaitForK8sServiceDelay());
return GetTimes().WaitForK8sServiceDelay();
}
public static TimeSpan K8sOperationTimeout()

87
DistTestCore/Tokens.cs Normal file
View File

@ -0,0 +1,87 @@
namespace DistTestCore
{
public class Ether
{
public Ether(decimal wei)
{
Wei = wei;
}
public decimal Wei { get; }
public override bool Equals(object? obj)
{
return obj is Ether ether && Wei == ether.Wei;
}
public override int GetHashCode()
{
return HashCode.Combine(Wei);
}
public override string ToString()
{
return $"{Wei} Wei";
}
}
public class TestToken
{
public TestToken(decimal amount)
{
Amount = amount;
}
public decimal Amount { get; }
public override bool Equals(object? obj)
{
return obj is TestToken token && Amount == token.Amount;
}
public override int GetHashCode()
{
return HashCode.Combine(Amount);
}
public override string ToString()
{
return $"{Amount} TestTokens";
}
}
public static class TokensIntExtensions
{
private const decimal weiPerEth = 1000000000000000000;
public static TestToken TestTokens(this int i)
{
return TestTokens(Convert.ToDecimal(i));
}
public static TestToken TestTokens(this decimal i)
{
return new TestToken(i);
}
public static Ether Eth(this int i)
{
return Eth(Convert.ToDecimal(i));
}
public static Ether Wei(this int i)
{
return Wei(Convert.ToDecimal(i));
}
public static Ether Eth(this decimal i)
{
return new Ether(i * weiPerEth);
}
public static Ether Wei(this decimal i)
{
return new Ether(i);
}
}
}

View File

@ -0,0 +1,52 @@
using k8s;
using Utils;
namespace KubernetesWorkflow
{
public class CommandRunner
{
private readonly Kubernetes client;
private readonly string k8sNamespace;
private readonly RunningPod pod;
private readonly string containerName;
private readonly string command;
private readonly string[] arguments;
private readonly List<string> lines = new List<string>();
public CommandRunner(Kubernetes client, string k8sNamespace, RunningPod pod, string containerName, string command, string[] arguments)
{
this.client = client;
this.k8sNamespace = k8sNamespace;
this.pod = pod;
this.containerName = containerName;
this.command = command;
this.arguments = arguments;
}
public void Run()
{
var input = new[] { command }.Concat(arguments).ToArray();
Time.Wait(client.NamespacedPodExecAsync(
pod.Name, k8sNamespace, containerName, input, false, Callback, new CancellationToken()));
}
public string GetStdOut()
{
return string.Join(Environment.NewLine, lines);
}
private Task Callback(Stream stdIn, Stream stdOut, Stream stdErr)
{
using var streamReader = new StreamReader(stdOut);
var line = streamReader.ReadLine();
while (line != null)
{
lines.Add(line);
line = streamReader.ReadLine();
}
return Task.CompletedTask;
}
}
}

View File

@ -0,0 +1,32 @@
namespace KubernetesWorkflow
{
public class Configuration
{
public Configuration(string k8sNamespace, string? kubeConfigFile, TimeSpan operationTimeout, TimeSpan retryDelay, ConfigurationLocationEntry[] locationMap)
{
K8sNamespace = k8sNamespace;
KubeConfigFile = kubeConfigFile;
OperationTimeout = operationTimeout;
RetryDelay = retryDelay;
LocationMap = locationMap;
}
public string K8sNamespace { get; }
public string? KubeConfigFile { get; }
public TimeSpan OperationTimeout { get; }
public TimeSpan RetryDelay { get; }
public ConfigurationLocationEntry[] LocationMap { get; }
}
public class ConfigurationLocationEntry
{
public ConfigurationLocationEntry(Location location, string workerName)
{
Location = location;
WorkerName = workerName;
}
public Location Location { get; }
public string WorkerName { get; }
}
}

View File

@ -0,0 +1,52 @@
namespace KubernetesWorkflow
{
public class ContainerRecipe
{
public ContainerRecipe(int number, string image, Port[] exposedPorts, Port[] internalPorts, EnvVar[] envVars, object[] additionals)
{
Number = number;
Image = image;
ExposedPorts = exposedPorts;
InternalPorts = internalPorts;
EnvVars = envVars;
Additionals = additionals;
}
public string Name { get { return $"ctnr{Number}"; } }
public int Number { get; }
public string Image { get; }
public Port[] ExposedPorts { get; }
public Port[] InternalPorts { get; }
public EnvVar[] EnvVars { get; }
public object[] Additionals { get; }
public Port GetPortByTag(string tag)
{
return ExposedPorts.Concat(InternalPorts).Single(p => p.Tag == tag);
}
}
public class Port
{
public Port(int number, string tag)
{
Number = number;
Tag = tag;
}
public int Number { get; }
public string Tag { get; }
}
public class EnvVar
{
public EnvVar(string name, string value)
{
Name = name;
Value = value;
}
public string Name { get; }
public string Value { get; }
}
}

View File

@ -0,0 +1,74 @@
namespace KubernetesWorkflow
{
public abstract class ContainerRecipeFactory
{
private readonly List<Port> exposedPorts = new List<Port>();
private readonly List<Port> internalPorts = new List<Port>();
private readonly List<EnvVar> envVars = new List<EnvVar>();
private readonly List<object> additionals = new List<object>();
private RecipeComponentFactory factory = null!;
public ContainerRecipe CreateRecipe(int index, int containerNumber, RecipeComponentFactory factory, StartupConfig config)
{
this.factory = factory;
ContainerNumber = containerNumber;
Index = index;
Initialize(config);
var recipe = new ContainerRecipe(containerNumber, Image, exposedPorts.ToArray(), internalPorts.ToArray(), envVars.ToArray(), additionals.ToArray());
exposedPorts.Clear();
internalPorts.Clear();
envVars.Clear();
additionals.Clear();
this.factory = null!;
return recipe;
}
protected abstract string Image { get; }
protected int ContainerNumber { get; private set; } = 0;
protected int Index { get; private set; } = 0;
protected abstract void Initialize(StartupConfig config);
protected Port AddExposedPort(string tag = "")
{
var p = factory.CreatePort(tag);
exposedPorts.Add(p);
return p;
}
protected Port AddInternalPort(string tag = "")
{
var p = factory.CreatePort(tag);
internalPorts.Add(p);
return p;
}
protected void AddExposedPortAndVar(string name, string tag = "")
{
AddEnvVar(name, AddExposedPort(tag));
}
protected void AddInternalPortAndVar(string name, string tag = "")
{
AddEnvVar(name, AddInternalPort(tag));
}
protected void AddEnvVar(string name, string value)
{
envVars.Add(factory.CreateEnvVar(name, value));
}
protected void AddEnvVar(string name, Port value)
{
envVars.Add(factory.CreateEnvVar(name, value.Number));
}
protected void Additional(object userData)
{
additionals.Add(userData);
}
}
}

View File

@ -0,0 +1,56 @@
using k8s;
namespace KubernetesWorkflow
{
public class K8sCluster
{
public K8sCluster(Configuration configuration)
{
Configuration = configuration;
}
public Configuration Configuration { get; }
public string IP { get; private set; } = string.Empty;
public KubernetesClientConfiguration GetK8sClientConfig()
{
var config = GetConfig();
UpdateIp(config);
return config;
}
public string GetNodeLabelForLocation(Location location)
{
if (location == Location.Unspecified) return string.Empty;
return Configuration.LocationMap.Single(l => l.Location == location).WorkerName;
}
public TimeSpan K8sOperationTimeout()
{
return Configuration.OperationTimeout;
}
public TimeSpan WaitForK8sServiceDelay()
{
return Configuration.RetryDelay;
}
private KubernetesClientConfiguration GetConfig()
{
if (Configuration.KubeConfigFile != null)
{
return KubernetesClientConfiguration.BuildConfigFromConfigFile(Configuration.KubeConfigFile);
}
else
{
return KubernetesClientConfiguration.BuildDefaultConfig();
}
}
private void UpdateIp(KubernetesClientConfiguration config)
{
var host = config.Host.Replace("https://", "");
IP = host.Substring(0, host.IndexOf(':'));
}
}
}

View File

@ -0,0 +1,372 @@
using k8s;
using k8s.Models;
using Utils;
namespace KubernetesWorkflow
{
public class K8sController
{
private readonly K8sCluster cluster;
private readonly KnownK8sPods knownPods;
private readonly WorkflowNumberSource workflowNumberSource;
private readonly Kubernetes client;
public K8sController(K8sCluster cluster, KnownK8sPods knownPods, WorkflowNumberSource workflowNumberSource)
{
this.cluster = cluster;
this.knownPods = knownPods;
this.workflowNumberSource = workflowNumberSource;
client = new Kubernetes(cluster.GetK8sClientConfig());
}
public void Dispose()
{
client.Dispose();
}
public RunningPod BringOnline(ContainerRecipe[] containerRecipes, Location location)
{
EnsureTestNamespace();
var deploymentName = CreateDeployment(containerRecipes, location);
var (serviceName, servicePortsMap) = CreateService(containerRecipes);
var (podName, podIp) = FetchNewPod();
return new RunningPod(cluster, podName, podIp, deploymentName, serviceName, servicePortsMap);
}
public void Stop(RunningPod pod)
{
if (!string.IsNullOrEmpty(pod.ServiceName)) DeleteService(pod.ServiceName);
DeleteDeployment(pod.DeploymentName);
WaitUntilDeploymentOffline(pod.DeploymentName);
WaitUntilPodOffline(pod.Name);
}
public void DownloadPodLog(RunningPod pod, ContainerRecipe recipe, ILogHandler logHandler)
{
using var stream = client.ReadNamespacedPodLog(pod.Name, K8sNamespace, recipe.Name);
logHandler.Log(stream);
}
public string ExecuteCommand(RunningPod pod, string containerName, string command, params string[] args)
{
var runner = new CommandRunner(client, K8sNamespace, pod, containerName, command, args);
runner.Run();
return runner.GetStdOut();
}
public void DeleteAllResources()
{
DeleteNamespace();
WaitUntilNamespaceDeleted();
}
#region Namespace management
private void EnsureTestNamespace()
{
if (IsTestNamespaceOnline()) return;
var namespaceSpec = new V1Namespace
{
ApiVersion = "v1",
Metadata = new V1ObjectMeta
{
Name = K8sNamespace,
Labels = new Dictionary<string, string> { { "name", K8sNamespace } }
}
};
client.CreateNamespace(namespaceSpec);
WaitUntilNamespaceCreated();
}
private void DeleteNamespace()
{
if (IsTestNamespaceOnline())
{
client.DeleteNamespace(K8sNamespace, null, null, gracePeriodSeconds: 0);
}
}
private string K8sNamespace
{
get { return cluster.Configuration.K8sNamespace; }
}
private bool IsTestNamespaceOnline()
{
return client.ListNamespace().Items.Any(n => n.Metadata.Name == K8sNamespace);
}
#endregion
#region Deployment management
private string CreateDeployment(ContainerRecipe[] containerRecipes, Location location)
{
var deploymentSpec = new V1Deployment
{
ApiVersion = "apps/v1",
Metadata = CreateDeploymentMetadata(),
Spec = new V1DeploymentSpec
{
Replicas = 1,
Selector = new V1LabelSelector
{
MatchLabels = GetSelector()
},
Template = new V1PodTemplateSpec
{
Metadata = new V1ObjectMeta
{
Labels = GetSelector()
},
Spec = new V1PodSpec
{
NodeSelector = CreateNodeSelector(location),
Containers = CreateDeploymentContainers(containerRecipes)
}
}
}
};
client.CreateNamespacedDeployment(deploymentSpec, K8sNamespace);
WaitUntilDeploymentOnline(deploymentSpec.Metadata.Name);
return deploymentSpec.Metadata.Name;
}
private void DeleteDeployment(string deploymentName)
{
client.DeleteNamespacedDeployment(deploymentName, K8sNamespace);
WaitUntilDeploymentOffline(deploymentName);
}
private IDictionary<string, string> CreateNodeSelector(Location location)
{
if (location == Location.Unspecified) return new Dictionary<string, string>();
return new Dictionary<string, string>
{
{ "codex-test-location", cluster.GetNodeLabelForLocation(location) }
};
}
private IDictionary<string, string> GetSelector()
{
return new Dictionary<string, string> { { "codex-test-node", "dist-test-" + workflowNumberSource.WorkflowNumber } };
}
private V1ObjectMeta CreateDeploymentMetadata()
{
return new V1ObjectMeta
{
Name = "deploy-" + workflowNumberSource.WorkflowNumber,
NamespaceProperty = K8sNamespace
};
}
private List<V1Container> CreateDeploymentContainers(ContainerRecipe[] containerRecipes)
{
return containerRecipes.Select(r => CreateDeploymentContainer(r)).ToList();
}
private V1Container CreateDeploymentContainer(ContainerRecipe recipe)
{
return new V1Container
{
Name = recipe.Name,
Image = recipe.Image,
Ports = CreateContainerPorts(recipe),
Env = CreateEnv(recipe)
};
}
private List<V1EnvVar> CreateEnv(ContainerRecipe recipe)
{
return recipe.EnvVars.Select(CreateEnvVar).ToList();
}
private V1EnvVar CreateEnvVar(EnvVar envVar)
{
return new V1EnvVar
{
Name = envVar.Name,
Value = envVar.Value,
};
}
private List<V1ContainerPort> CreateContainerPorts(ContainerRecipe recipe)
{
var exposedPorts = recipe.ExposedPorts.Select(p => CreateContainerPort(recipe, p));
var internalPorts = recipe.InternalPorts.Select(p => CreateContainerPort(recipe, p));
return exposedPorts.Concat(internalPorts).ToList();
}
private V1ContainerPort CreateContainerPort(ContainerRecipe recipe, Port port)
{
return new V1ContainerPort
{
Name = GetNameForPort(recipe, port),
ContainerPort = port.Number
};
}
private string GetNameForPort(ContainerRecipe recipe, Port port)
{
return $"p{workflowNumberSource.WorkflowNumber}-{recipe.Number}-{port.Number}";
}
#endregion
#region Service management
private (string, Dictionary<ContainerRecipe, Port[]>) CreateService(ContainerRecipe[] containerRecipes)
{
var result = new Dictionary<ContainerRecipe, Port[]>();
var ports = CreateServicePorts(result, containerRecipes);
if (!ports.Any())
{
// None of these container-recipes wish to expose anything via a serice port.
// So, we don't have to create a service.
return (string.Empty, result);
}
var serviceSpec = new V1Service
{
ApiVersion = "v1",
Metadata = CreateServiceMetadata(),
Spec = new V1ServiceSpec
{
Type = "NodePort",
Selector = GetSelector(),
Ports = ports
}
};
client.CreateNamespacedService(serviceSpec, K8sNamespace);
return (serviceSpec.Metadata.Name, result);
}
private void DeleteService(string serviceName)
{
client.DeleteNamespacedService(serviceName, K8sNamespace);
}
private V1ObjectMeta CreateServiceMetadata()
{
return new V1ObjectMeta
{
Name = "service-" + workflowNumberSource.WorkflowNumber,
NamespaceProperty = K8sNamespace
};
}
private List<V1ServicePort> CreateServicePorts(Dictionary<ContainerRecipe, Port[]> servicePorts, ContainerRecipe[] recipes)
{
var result = new List<V1ServicePort>();
foreach (var recipe in recipes)
{
result.AddRange(CreateServicePorts(servicePorts, recipe));
}
return result;
}
private List<V1ServicePort> CreateServicePorts(Dictionary<ContainerRecipe, Port[]> servicePorts, ContainerRecipe recipe)
{
var result = new List<V1ServicePort>();
var usedPorts = new List<Port>();
foreach (var port in recipe.ExposedPorts)
{
var servicePort = workflowNumberSource.GetServicePort();
usedPorts.Add(new Port(servicePort, ""));
result.Add(new V1ServicePort
{
Name = GetNameForPort(recipe, port),
Protocol = "TCP",
Port = port.Number,
TargetPort = GetNameForPort(recipe, port),
NodePort = servicePort
});
}
servicePorts.Add(recipe, usedPorts.ToArray());
return result;
}
#endregion
#region Waiting
private void WaitUntilNamespaceCreated()
{
WaitUntil(() => IsTestNamespaceOnline());
}
private void WaitUntilNamespaceDeleted()
{
WaitUntil(() => !IsTestNamespaceOnline());
}
private void WaitUntilDeploymentOnline(string deploymentName)
{
WaitUntil(() =>
{
var deployment = client.ReadNamespacedDeployment(deploymentName, K8sNamespace);
return deployment?.Status.AvailableReplicas != null && deployment.Status.AvailableReplicas > 0;
});
}
private void WaitUntilDeploymentOffline(string deploymentName)
{
WaitUntil(() =>
{
var deployments = client.ListNamespacedDeployment(K8sNamespace);
var deployment = deployments.Items.SingleOrDefault(d => d.Metadata.Name == deploymentName);
return deployment == null || deployment.Status.AvailableReplicas == 0;
});
}
private void WaitUntilPodOffline(string podName)
{
WaitUntil(() =>
{
var pods = client.ListNamespacedPod(K8sNamespace).Items;
var pod = pods.SingleOrDefault(p => p.Metadata.Name == podName);
return pod == null;
});
}
private void WaitUntil(Func<bool> predicate)
{
Time.WaitUntil(predicate, cluster.K8sOperationTimeout(), cluster.WaitForK8sServiceDelay());
}
#endregion
private (string, string) FetchNewPod()
{
var pods = client.ListNamespacedPod(K8sNamespace).Items;
var newPods = pods.Where(p => !knownPods.Contains(p.Name())).ToArray();
if (newPods.Length != 1) throw new InvalidOperationException("Expected only 1 pod to be created. Test infra failure.");
var newPod = newPods.Single();
var name = newPod.Name();
var ip = newPod.Status.PodIP;
if (string.IsNullOrEmpty(name)) throw new InvalidOperationException("Invalid pod name received. Test infra failure.");
if (string.IsNullOrEmpty(ip)) throw new InvalidOperationException("Invalid pod IP received. Test infra failure.");
knownPods.Add(name);
return (name, ip);
}
}
}

View File

@ -1,4 +1,4 @@
namespace CodexDistTestCore
namespace KubernetesWorkflow
{
public class KnownK8sPods
{

View File

@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<RootNamespace>KubernetesWorkflow</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="KubernetesClient" Version="10.1.4" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Utils\Utils.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,9 @@
namespace KubernetesWorkflow
{
public enum Location
{
Unspecified,
BensLaptop,
BensOldGamingMachine
}
}

View File

@ -0,0 +1,25 @@
using System.Globalization;
using Utils;
namespace KubernetesWorkflow
{
public class RecipeComponentFactory
{
private NumberSource portNumberSource = new NumberSource(8080);
public Port CreatePort(string tag)
{
return new Port(portNumberSource.GetNextNumber(), tag);
}
public EnvVar CreateEnvVar(string name, int value)
{
return CreateEnvVar(name, value.ToString(CultureInfo.InvariantCulture));
}
public EnvVar CreateEnvVar(string name, string value)
{
return new EnvVar(name, value);
}
}
}

View File

@ -0,0 +1,40 @@
namespace KubernetesWorkflow
{
public class RunningContainers
{
public RunningContainers(StartupConfig startupConfig, RunningPod runningPod, RunningContainer[] containers)
{
StartupConfig = startupConfig;
RunningPod = runningPod;
Containers = containers;
}
public StartupConfig StartupConfig { get; }
public RunningPod RunningPod { get; }
public RunningContainer[] Containers { get; }
public string Describe()
{
return $"[{RunningPod.Ip}]";
}
}
public class RunningContainer
{
public RunningContainer(RunningPod pod, ContainerRecipe recipe, Port[] servicePorts)
{
Pod = pod;
Recipe = recipe;
ServicePorts = servicePorts;
}
public string GetName()
{
return $"<{Recipe.Name}>";
}
public RunningPod Pod { get; }
public ContainerRecipe Recipe { get; }
public Port[] ServicePorts { get; }
}
}

View File

@ -0,0 +1,28 @@
namespace KubernetesWorkflow
{
public class RunningPod
{
private readonly Dictionary<ContainerRecipe, Port[]> servicePortMap;
public RunningPod(K8sCluster cluster, string name, string ip, string deploymentName, string serviceName, Dictionary<ContainerRecipe, Port[]> servicePortMap)
{
Cluster = cluster;
Name = name;
Ip = ip;
DeploymentName = deploymentName;
ServiceName = serviceName;
this.servicePortMap = servicePortMap;
}
public K8sCluster Cluster { get; }
public string Name { get; }
public string Ip { get; }
internal string DeploymentName { get; }
internal string ServiceName { get; }
public Port[] GetServicePortsForContainerRecipe(ContainerRecipe containerRecipe)
{
return servicePortMap[containerRecipe];
}
}
}

View File

@ -0,0 +1,18 @@
namespace KubernetesWorkflow
{
public class StartupConfig
{
private readonly List<object> configs = new List<object>();
public void Add(object config)
{
configs.Add(config);
}
public T Get<T>()
{
var match = configs.Single(c => typeof(T).IsAssignableFrom(c.GetType()));
return (T)match;
}
}
}

View File

@ -0,0 +1,115 @@
using System.IO;
namespace KubernetesWorkflow
{
public class StartupWorkflow
{
private readonly WorkflowNumberSource numberSource;
private readonly K8sCluster cluster;
private readonly KnownK8sPods knownK8SPods;
private readonly RecipeComponentFactory componentFactory = new RecipeComponentFactory();
internal StartupWorkflow(WorkflowNumberSource numberSource, K8sCluster cluster, KnownK8sPods knownK8SPods)
{
this.numberSource = numberSource;
this.cluster = cluster;
this.knownK8SPods = knownK8SPods;
}
public RunningContainers Start(int numberOfContainers, Location location, ContainerRecipeFactory recipeFactory, StartupConfig startupConfig)
{
return K8s(controller =>
{
var recipes = CreateRecipes(numberOfContainers, recipeFactory, startupConfig);
var runningPod = controller.BringOnline(recipes, location);
return new RunningContainers(startupConfig, runningPod, CreateContainers(runningPod, recipes));
});
}
public void Stop(RunningContainers runningContainers)
{
K8s(controller =>
{
controller.Stop(runningContainers.RunningPod);
});
}
public void DownloadContainerLog(RunningContainer container, ILogHandler logHandler)
{
K8s(controller =>
{
controller.DownloadPodLog(container.Pod, container.Recipe, logHandler);
});
}
public string ExecuteCommand(RunningContainer container, string command, params string[] args)
{
return K8s(controller =>
{
return controller.ExecuteCommand(container.Pod, container.Recipe.Name, command, args);
});
}
public void DeleteAllResources()
{
K8s(controller =>
{
controller.DeleteAllResources();
});
}
private static RunningContainer[] CreateContainers(RunningPod runningPod, ContainerRecipe[] recipes)
{
return recipes.Select(r => new RunningContainer(runningPod, r, runningPod.GetServicePortsForContainerRecipe(r))).ToArray();
}
private ContainerRecipe[] CreateRecipes(int numberOfContainers, ContainerRecipeFactory recipeFactory, StartupConfig startupConfig)
{
var result = new List<ContainerRecipe>();
for (var i = 0; i < numberOfContainers; i++)
{
result.Add(recipeFactory.CreateRecipe(i ,numberSource.GetContainerNumber(), componentFactory, startupConfig));
}
return result.ToArray();
}
private void K8s(Action<K8sController> action)
{
var controller = new K8sController(cluster, knownK8SPods, numberSource);
action(controller);
controller.Dispose();
}
private T K8s<T>(Func<K8sController, T> action)
{
var controller = new K8sController(cluster, knownK8SPods, numberSource);
var result = action(controller);
controller.Dispose();
return result;
}
}
public interface ILogHandler
{
void Log(Stream log);
}
public abstract class LogHandler : ILogHandler
{
public void Log(Stream log)
{
using var reader = new StreamReader(log);
var line = reader.ReadLine();
while (line != null)
{
ProcessLine(line);
line = reader.ReadLine();
}
}
protected abstract void ProcessLine(string line);
}
}

View File

@ -0,0 +1,27 @@
using Utils;
namespace KubernetesWorkflow
{
public class WorkflowCreator
{
private readonly NumberSource numberSource = new NumberSource(0);
private readonly NumberSource servicePortNumberSource = new NumberSource(30001);
private readonly NumberSource containerNumberSource = new NumberSource(0);
private readonly KnownK8sPods knownPods = new KnownK8sPods();
private readonly K8sCluster cluster;
public WorkflowCreator(Configuration configuration)
{
cluster = new K8sCluster(configuration);
}
public StartupWorkflow CreateWorkflow()
{
var workflowNumberSource = new WorkflowNumberSource(numberSource.GetNextNumber(),
servicePortNumberSource,
containerNumberSource);
return new StartupWorkflow(workflowNumberSource, cluster, knownPods);
}
}
}

View File

@ -0,0 +1,29 @@
using Utils;
namespace KubernetesWorkflow
{
public class WorkflowNumberSource
{
private readonly NumberSource servicePortNumberSource;
private readonly NumberSource containerNumberSource;
public WorkflowNumberSource(int workflowNumber, NumberSource servicePortNumberSource, NumberSource containerNumberSource)
{
WorkflowNumber = workflowNumber;
this.servicePortNumberSource = servicePortNumberSource;
this.containerNumberSource = containerNumberSource;
}
public int WorkflowNumber { get; }
public int GetContainerNumber()
{
return containerNumberSource.GetNextNumber();
}
public int GetServicePort()
{
return servicePortNumberSource.GetNextNumber();
}
}
}

36
Logging/BaseLog.cs Normal file
View File

@ -0,0 +1,36 @@
namespace Logging
{
public abstract class BaseLog
{
private bool hasFailed;
private LogFile? logFile;
protected abstract LogFile CreateLogFile();
protected LogFile LogFile
{
get
{
if (logFile == null) logFile = CreateLogFile();
return logFile;
}
}
public void Log(string message)
{
LogFile.Write(message);
}
public void Error(string message)
{
Log($"[ERROR] {message}");
}
public void MarkAsFailed()
{
if (hasFailed) return;
hasFailed = true;
LogFile.ConcatToFilename("_FAILED");
}
}
}

49
Logging/FixtureLog.cs Normal file
View File

@ -0,0 +1,49 @@
using NUnit.Framework;
namespace Logging
{
public class FixtureLog : BaseLog
{
private readonly DateTime start;
private readonly string fullName;
public FixtureLog(LogConfig config)
{
start = DateTime.UtcNow;
var folder = DetermineFolder(config);
var fixtureName = GetFixtureName();
fullName = Path.Combine(folder, fixtureName);
}
public TestLog CreateTestLog()
{
return new TestLog(fullName);
}
protected override LogFile CreateLogFile()
{
return new LogFile(fullName, "log");
}
private string DetermineFolder(LogConfig config)
{
return Path.Join(
config.LogRoot,
$"{start.Year}-{Pad(start.Month)}",
Pad(start.Day));
}
private string GetFixtureName()
{
var test = TestContext.CurrentContext.Test;
var className = test.ClassName!.Substring(test.ClassName.LastIndexOf('.') + 1);
return $"{Pad(start.Hour)}-{Pad(start.Minute)}-{Pad(start.Second)}Z_{className.Replace('.', '-')}";
}
private static string Pad(int n)
{
return n.ToString().PadLeft(2, '0');
}
}
}

12
Logging/LogConfig.cs Normal file
View File

@ -0,0 +1,12 @@
namespace Logging
{
public class LogConfig
{
public LogConfig(string logRoot)
{
LogRoot = logRoot;
}
public string LogRoot { get; }
}
}

57
Logging/LogFile.cs Normal file
View File

@ -0,0 +1,57 @@
namespace Logging
{
public class LogFile
{
private readonly string extension;
private string filename;
public LogFile(string filename, string extension)
{
this.filename = filename;
this.extension = extension;
FullFilename = filename + "." + extension;
EnsurePathExists(filename);
}
public string FullFilename { get; private set; }
public void Write(string message)
{
WriteRaw($"{GetTimestamp()} {message}");
}
public void WriteRaw(string message)
{
try
{
File.AppendAllLines(FullFilename, new[] { message });
}
catch (Exception ex)
{
Console.WriteLine("Writing to log has failed: " + ex);
}
}
public void ConcatToFilename(string toAdd)
{
var oldFullName = FullFilename;
filename += toAdd;
FullFilename = filename + "." + extension;
File.Move(oldFullName, FullFilename);
}
private static string GetTimestamp()
{
return $"[{DateTime.UtcNow.ToString("u")}]";
}
private void EnsurePathExists(string filename)
{
var path = new FileInfo(filename).Directory!.FullName;
Directory.CreateDirectory(path);
}
}
}

19
Logging/Logging.csproj Normal file
View File

@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<RootNamespace>Logging</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="nunit" Version="3.13.3" />
<PackageReference Include="NUnit3TestAdapter" Version="4.4.2" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Utils\Utils.csproj" />
</ItemGroup>
</Project>

64
Logging/TestLog.cs Normal file
View File

@ -0,0 +1,64 @@
using NUnit.Framework;
using Utils;
namespace Logging
{
public class TestLog : BaseLog
{
private readonly NumberSource subfileNumberSource = new NumberSource(0);
private readonly string methodName;
private readonly string fullName;
public TestLog(string folder)
{
methodName = GetMethodName();
fullName = Path.Combine(folder, methodName);
Log($"*** Begin: {methodName}");
}
public LogFile CreateSubfile(string ext = "log")
{
return new LogFile($"{fullName}_{GetSubfileNumber()}", ext);
}
public void EndTest()
{
var result = TestContext.CurrentContext.Result;
Log($"*** Finished: {methodName} = {result.Outcome.Status}");
if (!string.IsNullOrEmpty(result.Message))
{
Log(result.Message);
Log($"{result.StackTrace}");
}
if (result.Outcome.Status == NUnit.Framework.Interfaces.TestStatus.Failed)
{
MarkAsFailed();
}
}
protected override LogFile CreateLogFile()
{
return new LogFile(fullName, "log");
}
private string GetMethodName()
{
var test = TestContext.CurrentContext.Test;
var args = FormatArguments(test);
return $"{test.MethodName}{args}";
}
private string GetSubfileNumber()
{
return subfileNumberSource.GetNextNumber().ToString().PadLeft(6, '0');
}
private static string FormatArguments(TestContext.TestAdapter test)
{
if (test.Arguments == null || !test.Arguments.Any()) return "";
return $"[{string.Join(',', test.Arguments)}]";
}
}
}

View File

@ -1,7 +1,8 @@
using CodexDistTestCore;
using DistTestCore;
using DistTestCore.Codex;
using NUnit.Framework;
namespace LongTests.BasicTests
namespace TestsLong.BasicTests
{
[TestFixture]
public class LargeFileTests : DistTest

View File

@ -1,7 +1,8 @@
using CodexDistTestCore;
using DistTestCore;
using DistTestCore.Codex;
using NUnit.Framework;
namespace LongTests.BasicTests
namespace TestsLong.BasicTests
{
public class TestInfraTests : DistTest
{

View File

@ -13,7 +13,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\CodexDistTestCore\CodexDistTestCore.csproj" />
<ProjectReference Include="..\DistTestCore\DistTestCore.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,114 @@
using Logging;
using Nethereum.ABI.FunctionEncoding.Attributes;
using Nethereum.Contracts;
using Nethereum.Hex.HexTypes;
using Nethereum.Web3;
using System.Numerics;
using Utils;
namespace NethereumWorkflow
{
public class NethereumInteraction
{
private readonly List<Task> openTasks = new List<Task>();
private readonly TestLog log;
private readonly Web3 web3;
private readonly string rootAccount;
internal NethereumInteraction(TestLog log, Web3 web3, string rootAccount)
{
this.log = log;
this.web3 = web3;
this.rootAccount = rootAccount;
}
public string GetTokenAddress(string marketplaceAddress)
{
var function = new GetTokenFunction();
var handler = web3.Eth.GetContractQueryHandler<GetTokenFunction>();
return Time.Wait(handler.QueryAsync<string>(marketplaceAddress, function));
}
public void TransferWeiTo(string account, decimal amount)
{
if (amount < 1 || string.IsNullOrEmpty(account)) throw new ArgumentException("Invalid arguments for AddToBalance");
var value = ToHexBig(amount);
var transactionId = Time.Wait(web3.Eth.TransactionManager.SendTransactionAsync(rootAccount, account, value));
openTasks.Add(web3.Eth.TransactionManager.TransactionReceiptService.PollForReceiptAsync(transactionId));
}
public void MintTestTokens(string account, decimal amount, string tokenAddress)
{
if (amount < 1 || string.IsNullOrEmpty(account)) throw new ArgumentException("Invalid arguments for MintTestTokens");
var function = new MintTokensFunction
{
Holder = account,
Amount = ToBig(amount)
};
var handler = web3.Eth.GetContractTransactionHandler<MintTokensFunction>();
openTasks.Add(handler.SendRequestAndWaitForReceiptAsync(tokenAddress, function));
}
public decimal GetBalance(string tokenAddress, string account)
{
var function = new GetTokenBalanceFunction
{
Owner = account
};
var handler = web3.Eth.GetContractQueryHandler<GetTokenBalanceFunction>();
return ToDecimal(Time.Wait(handler.QueryAsync<BigInteger>(tokenAddress, function)));
}
public void WaitForAllTransactions()
{
var tasks = openTasks.ToArray();
openTasks.Clear();
Task.WaitAll(tasks);
}
private HexBigInteger ToHexBig(decimal amount)
{
var bigint = ToBig(amount);
var str = bigint.ToString("X");
return new HexBigInteger(str);
}
private BigInteger ToBig(decimal amount)
{
return new BigInteger(amount);
}
private decimal ToDecimal(BigInteger bigInteger)
{
return (decimal)bigInteger;
}
}
[Function("token", "address")]
public class GetTokenFunction : FunctionMessage
{
}
[Function("mint")]
public class MintTokensFunction : FunctionMessage
{
[Parameter("address", "holder", 1)]
public string Holder { get; set; }
[Parameter("uint256", "amount", 2)]
public BigInteger Amount { get; set; }
}
[Function("balanceOf", "uint256")]
public class GetTokenBalanceFunction :FunctionMessage
{
[Parameter("address", "owner", 1)]
public string Owner { get; set; }
}
}

View File

@ -0,0 +1,34 @@
using Logging;
using Nethereum.Web3;
namespace NethereumWorkflow
{
public class NethereumInteractionCreator
{
private readonly TestLog log;
private readonly string ip;
private readonly int port;
private readonly string rootAccount;
private readonly string privateKey;
public NethereumInteractionCreator(TestLog log, string ip, int port, string rootAccount, string privateKey)
{
this.log = log;
this.ip = ip;
this.port = port;
this.rootAccount = rootAccount;
this.privateKey = privateKey;
}
public NethereumInteraction CreateWorkflow()
{
return new NethereumInteraction(log, CreateWeb3(), rootAccount);
}
private Web3 CreateWeb3()
{
var account = new Nethereum.Web3.Accounts.Account(privateKey);
return new Web3(account, $"http://{ip}:{port}");
}
}
}

View File

@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<RootNamespace>NethereumWorkflow</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Nethereum.Web3" Version="4.14.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Logging\Logging.csproj" />
<ProjectReference Include="..\Utils\Utils.csproj" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,92 @@
using DistTestCore;
using DistTestCore.Codex;
using NUnit.Framework;
using Utils;
namespace Tests.BasicTests
{
[TestFixture]
public class ExampleTests : DistTest
{
[Test]
public void CodexLogExample()
{
var primary = SetupCodexNodes(1)
.WithLogLevel(CodexLogLevel.Trace)
.BringOnline()[0];
primary.UploadFile(GenerateTestFile(5.MB()));
var log = primary.DownloadLog();
log.AssertLogContains("Uploaded file");
}
[Test]
public void TwoMetricsExample()
{
var group = SetupCodexNodes(2)
.EnableMetrics()
.BringOnline();
var group2 = SetupCodexNodes(2)
.EnableMetrics()
.BringOnline();
var primary = group[0];
var secondary = group[1];
var primary2 = group2[0];
var secondary2 = group2[1];
primary.ConnectToPeer(secondary);
primary2.ConnectToPeer(secondary2);
Thread.Sleep(TimeSpan.FromMinutes(2));
primary.Metrics.AssertThat("libp2p_peers", Is.EqualTo(1));
primary2.Metrics.AssertThat("libp2p_peers", Is.EqualTo(1));
}
[Test]
public void MarketplaceExample()
{
var primary = SetupCodexNodes(1)
.WithStorageQuota(11.GB())
.EnableMarketplace(initialBalance: 234.TestTokens())
.BringOnline()[0];
primary.Marketplace.AssertThatBalance(Is.EqualTo(234.TestTokens()));
var secondary = SetupCodexNodes(1)
.EnableMarketplace(initialBalance: 1000.TestTokens())
.BringOnline()[0];
primary.ConnectToPeer(secondary);
primary.Marketplace.MakeStorageAvailable(
size: 10.GB(),
minPricePerBytePerSecond: 1.TestTokens(),
maxCollateral: 20.TestTokens(),
maxDuration: TimeSpan.FromMinutes(3));
var testFile = GenerateTestFile(10.MB());
var contentId = secondary.UploadFile(testFile);
secondary.Marketplace.RequestStorage(contentId,
pricePerBytePerSecond: 2.TestTokens(),
requiredCollateral: 10.TestTokens(),
minRequiredNumberOfNodes: 1,
proofProbability: 5,
duration: TimeSpan.FromMinutes(2));
Time.Sleep(TimeSpan.FromMinutes(1));
primary.Marketplace.AssertThatBalance(Is.LessThan(234.TestTokens()), "Collateral was not placed.");
Time.Sleep(TimeSpan.FromMinutes(2));
primary.Marketplace.AssertThatBalance(Is.GreaterThan(234.TestTokens()), "Storer was not paid for storage.");
secondary.Marketplace.AssertThatBalance(Is.LessThan(1000.TestTokens()), "Contractor was not charged for storage.");
}
}
}

View File

@ -0,0 +1,40 @@
using DistTestCore;
using NUnit.Framework;
namespace Tests.BasicTests
{
[TestFixture]
public class OneClientTests : DistTest
{
[Test]
public void OneClientTest()
{
var primary = SetupCodexNodes(1).BringOnline()[0];
PerformOneClientTest(primary);
}
[Test]
public void RestartTest()
{
var group = SetupCodexNodes(1).BringOnline();
var setup = group.BringOffline();
var primary = setup.BringOnline()[0];
PerformOneClientTest(primary);
}
private void PerformOneClientTest(IOnlineCodexNode primary)
{
var testFile = GenerateTestFile(1.MB());
var contentId = primary.UploadFile(testFile);
var downloadedFile = primary.DownloadContent(contentId);
testFile.AssertIsEqual(downloadedFile);
}
}
}

View File

@ -1,121 +0,0 @@
using CodexDistTestCore;
using CodexDistTestCore.Config;
using NUnit.Framework;
namespace Tests.BasicTests
{
[TestFixture]
public class SimpleTests : DistTest
{
[Test]
public void GetDebugInfo()
{
var dockerImage = new CodexDockerImage();
var node = SetupCodexNodes(1).BringOnline()[0];
var debugInfo = node.GetDebugInfo();
Assert.That(debugInfo.spr, Is.Not.Empty);
Assert.That(debugInfo.codex.revision, Is.EqualTo(dockerImage.GetExpectedImageRevision()));
}
[Test, DontDownloadLogsAndMetricsOnFailure]
public void CanAccessLogs()
{
var node = SetupCodexNodes(1).BringOnline()[0];
var log = node.DownloadLog();
log.AssertLogContains("Started codex node");
}
[Test]
public void TwoMetricsExample()
{
var group = SetupCodexNodes(2)
.EnableMetrics()
.BringOnline();
var group2 = SetupCodexNodes(2)
.EnableMetrics()
.BringOnline();
var primary = group[0];
var secondary = group[1];
var primary2 = group2[0];
var secondary2 = group2[1];
primary.ConnectToPeer(secondary);
primary2.ConnectToPeer(secondary2);
Thread.Sleep(TimeSpan.FromMinutes(5));
primary.Metrics.AssertThat("libp2p_peers", Is.EqualTo(1));
primary2.Metrics.AssertThat("libp2p_peers", Is.EqualTo(1));
}
[Test]
public void OneClientTest()
{
var primary = SetupCodexNodes(1).BringOnline()[0];
var testFile = GenerateTestFile(1.MB());
var contentId = primary.UploadFile(testFile);
var downloadedFile = primary.DownloadContent(contentId);
testFile.AssertIsEqual(downloadedFile);
}
[Test]
public void TwoClientsOnePodTest()
{
var group = SetupCodexNodes(2).BringOnline();
var primary = group[0];
var secondary = group[1];
PerformTwoClientTest(primary, secondary);
}
[Test]
public void TwoClientsTwoPodsTest()
{
var primary = SetupCodexNodes(1).BringOnline()[0];
var secondary = SetupCodexNodes(1).BringOnline()[0];
PerformTwoClientTest(primary, secondary);
}
[Test]
[Ignore("Requires Location map to be configured for k8s cluster.")]
public void TwoClientsTwoLocationsTest()
{
var primary = SetupCodexNodes(1)
.At(Location.BensLaptop)
.BringOnline()[0];
var secondary = SetupCodexNodes(1)
.At(Location.BensOldGamingMachine)
.BringOnline()[0];
PerformTwoClientTest(primary, secondary);
}
private void PerformTwoClientTest(IOnlineCodexNode primary, IOnlineCodexNode secondary)
{
primary.ConnectToPeer(secondary);
var testFile = GenerateTestFile(1.MB());
var contentId = primary.UploadFile(testFile);
var downloadedFile = secondary.DownloadContent(contentId);
testFile.AssertIsEqual(downloadedFile);
}
}
}

View File

@ -0,0 +1,59 @@
using DistTestCore;
using KubernetesWorkflow;
using NUnit.Framework;
namespace Tests.BasicTests
{
[TestFixture]
public class TwoClientTests : DistTest
{
[Test]
public void TwoClientsOnePodTest()
{
var group = SetupCodexNodes(2).BringOnline();
var primary = group[0];
var secondary = group[1];
PerformTwoClientTest(primary, secondary);
}
[Test]
public void TwoClientsTwoPodsTest()
{
var primary = SetupCodexNodes(1).BringOnline()[0];
var secondary = SetupCodexNodes(1).BringOnline()[0];
PerformTwoClientTest(primary, secondary);
}
[Test]
[Ignore("Requires Location map to be configured for k8s cluster.")]
public void TwoClientsTwoLocationsTest()
{
var primary = SetupCodexNodes(1)
.At(Location.BensLaptop)
.BringOnline()[0];
var secondary = SetupCodexNodes(1)
.At(Location.BensOldGamingMachine)
.BringOnline()[0];
PerformTwoClientTest(primary, secondary);
}
private void PerformTwoClientTest(IOnlineCodexNode primary, IOnlineCodexNode secondary)
{
primary.ConnectToPeer(secondary);
var testFile = GenerateTestFile(1.MB());
var contentId = primary.UploadFile(testFile);
var downloadedFile = secondary.DownloadContent(contentId);
testFile.AssertIsEqual(downloadedFile);
}
}
}

View File

@ -13,7 +13,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\CodexDistTestCore\CodexDistTestCore.csproj" />
<ProjectReference Include="..\DistTestCore\DistTestCore.csproj" />
</ItemGroup>
</Project>

View File

@ -1,4 +1,4 @@
namespace CodexDistTestCore
namespace Utils
{
public class NumberSource
{

42
Utils/Time.cs Normal file
View File

@ -0,0 +1,42 @@
namespace Utils
{
public static class Time
{
public static void Sleep(TimeSpan span)
{
Thread.Sleep(span);
}
public static T Wait<T>(Task<T> task)
{
task.Wait();
return task.Result;
}
public static string FormatDuration(TimeSpan d)
{
var result = "";
if (d.Days > 0) result += $"{d.Days} days, ";
if (d.Hours > 0) result += $"{d.Hours} hours, ";
if (d.Minutes > 0) result += $"{d.Minutes} mins, ";
result += $"{d.Seconds} secs";
return result;
}
public static void WaitUntil(Func<bool> predicate, TimeSpan timeout, TimeSpan retryTime)
{
var start = DateTime.UtcNow;
var state = predicate();
while (!state)
{
if (DateTime.UtcNow - start > timeout)
{
throw new TimeoutException("Operation timed out.");
}
Sleep(retryTime);
state = predicate();
}
}
}
}

10
Utils/Utils.csproj Normal file
View File

@ -0,0 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<RootNamespace>Utils</RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
</Project>

View File

@ -3,11 +3,19 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.4.33213.308
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tests", "Tests\Tests.csproj", "{57F57B85-A537-4D3A-B7AE-B72C66B74AAB}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tests", "Tests\Tests.csproj", "{57F57B85-A537-4D3A-B7AE-B72C66B74AAB}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LongTests", "LongTests\LongTests.csproj", "{AFCE270E-F844-4A7C-9006-69AE622BB1F4}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TestsLong", "LongTests\TestsLong.csproj", "{AFCE270E-F844-4A7C-9006-69AE622BB1F4}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CodexDistTestCore", "CodexDistTestCore\CodexDistTestCore.csproj", "{19306DE1-CEE5-4F7B-AA5D-FD91926D853D}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DistTestCore", "DistTestCore\DistTestCore.csproj", "{47F31305-6E68-4827-8E39-7B41DAA1CE7A}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "KubernetesWorkflow", "KubernetesWorkflow\KubernetesWorkflow.csproj", "{359123AA-3D9B-4442-80F4-19E32E3EC9EA}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Utils", "Utils\Utils.csproj", "{957DE3B8-9571-450A-8609-B267DCA8727C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Logging", "Logging\Logging.csproj", "{8481A4A6-4BDD-41B0-A3EB-EF53F7BD40D1}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NethereumWorkflow", "Nethereum\NethereumWorkflow.csproj", "{D6C3555E-D52D-4993-A87B-71AB650398FD}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@ -23,10 +31,26 @@ Global
{AFCE270E-F844-4A7C-9006-69AE622BB1F4}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AFCE270E-F844-4A7C-9006-69AE622BB1F4}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AFCE270E-F844-4A7C-9006-69AE622BB1F4}.Release|Any CPU.Build.0 = Release|Any CPU
{19306DE1-CEE5-4F7B-AA5D-FD91926D853D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{19306DE1-CEE5-4F7B-AA5D-FD91926D853D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{19306DE1-CEE5-4F7B-AA5D-FD91926D853D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{19306DE1-CEE5-4F7B-AA5D-FD91926D853D}.Release|Any CPU.Build.0 = Release|Any CPU
{47F31305-6E68-4827-8E39-7B41DAA1CE7A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{47F31305-6E68-4827-8E39-7B41DAA1CE7A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{47F31305-6E68-4827-8E39-7B41DAA1CE7A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{47F31305-6E68-4827-8E39-7B41DAA1CE7A}.Release|Any CPU.Build.0 = Release|Any CPU
{359123AA-3D9B-4442-80F4-19E32E3EC9EA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{359123AA-3D9B-4442-80F4-19E32E3EC9EA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{359123AA-3D9B-4442-80F4-19E32E3EC9EA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{359123AA-3D9B-4442-80F4-19E32E3EC9EA}.Release|Any CPU.Build.0 = Release|Any CPU
{957DE3B8-9571-450A-8609-B267DCA8727C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{957DE3B8-9571-450A-8609-B267DCA8727C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{957DE3B8-9571-450A-8609-B267DCA8727C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{957DE3B8-9571-450A-8609-B267DCA8727C}.Release|Any CPU.Build.0 = Release|Any CPU
{8481A4A6-4BDD-41B0-A3EB-EF53F7BD40D1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8481A4A6-4BDD-41B0-A3EB-EF53F7BD40D1}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8481A4A6-4BDD-41B0-A3EB-EF53F7BD40D1}.Release|Any CPU.ActiveCfg = Release|Any CPU
{8481A4A6-4BDD-41B0-A3EB-EF53F7BD40D1}.Release|Any CPU.Build.0 = Release|Any CPU
{D6C3555E-D52D-4993-A87B-71AB650398FD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D6C3555E-D52D-4993-A87B-71AB650398FD}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D6C3555E-D52D-4993-A87B-71AB650398FD}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D6C3555E-D52D-4993-A87B-71AB650398FD}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE