Sets up new test execution loop
This commit is contained in:
parent
c08507e1b8
commit
9a9c740e1c
|
@ -1,51 +0,0 @@
|
|||
using Logging;
|
||||
using Utils;
|
||||
|
||||
namespace ContinuousTests
|
||||
{
|
||||
public class AllTestsRun
|
||||
{
|
||||
private readonly Configuration config;
|
||||
private readonly FixtureLog log;
|
||||
private readonly TestFactory testFinder;
|
||||
|
||||
public AllTestsRun(Configuration config, FixtureLog log, TestFactory testFinder)
|
||||
{
|
||||
this.config = config;
|
||||
this.log = log;
|
||||
this.testFinder = testFinder;
|
||||
}
|
||||
|
||||
public ContinuousTestResult RunAll()
|
||||
{
|
||||
var tests = testFinder.CreateTests().ToList();
|
||||
var handles = tests.Select(t => new TestHandle(t)).ToArray();
|
||||
|
||||
var result = ContinuousTestResult.Passed;
|
||||
while (tests.Any())
|
||||
{
|
||||
var test = tests.PickOneRandom();
|
||||
var testLog = log.CreateTestLog(test.Name);
|
||||
var singleTestRun = new SingleTestRun(config, test, testLog);
|
||||
|
||||
log.Log($"Start '{test.Name}'");
|
||||
try
|
||||
{
|
||||
singleTestRun.Run();
|
||||
log.Log($"'{test.Name}' = Passed");
|
||||
if (!config.KeepPassedTestLogs) testLog.Delete();
|
||||
}
|
||||
catch
|
||||
{
|
||||
log.Log($"'{test.Name}' = Failed");
|
||||
testLog.MarkAsFailed();
|
||||
result = ContinuousTestResult.Failed;
|
||||
}
|
||||
|
||||
Thread.Sleep(config.SleepSecondsPerSingleTest * 1000);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,7 +11,7 @@ namespace ContinuousTests
|
|||
{
|
||||
return containers.Select(container =>
|
||||
{
|
||||
var address = container.ClusterInternalAddress;
|
||||
var address = container.ClusterExternalAddress;
|
||||
return new CodexNode(log, timeSet, address);
|
||||
}).ToArray();
|
||||
}
|
||||
|
|
|
@ -6,9 +6,8 @@ namespace ContinuousTests
|
|||
public class Configuration
|
||||
{
|
||||
public string LogPath { get; set; } = string.Empty;
|
||||
public string DataPath { get; set; } = string.Empty;
|
||||
public CodexDeployment CodexDeployment { get; set; } = null!;
|
||||
public int SleepSecondsPerSingleTest { get; set; }
|
||||
public int SleepSecondsPerAllTests { get; set; }
|
||||
public bool KeepPassedTestLogs { get; set; }
|
||||
}
|
||||
|
||||
|
@ -37,11 +36,11 @@ namespace ContinuousTests
|
|||
catch { }
|
||||
}
|
||||
|
||||
var logPath = Environment.GetEnvironmentVariable("LOGPATH");
|
||||
var codexDeploymentJson = Environment.GetEnvironmentVariable("CODEXDEPLOYMENT");
|
||||
var sleepPerSingle = Environment.GetEnvironmentVariable("SLEEPSECONDSPERSINGLETEST");
|
||||
var sleepPerAll = Environment.GetEnvironmentVariable("SLEEPSECONDSPERALLTESTS");
|
||||
var keep = Environment.GetEnvironmentVariable("KEEPPASSEDTESTLOGS");
|
||||
var logPath = "logs";// Environment.GetEnvironmentVariable("LOGPATH");
|
||||
var codexDeploymentJson = "C:\\Users\\Ben\\Desktop\\codex-deployment.json"; //Environment.GetEnvironmentVariable("CODEXDEPLOYMENT");
|
||||
var sleepPerSingle = "10";// Environment.GetEnvironmentVariable("SLEEPSECONDSPERSINGLETEST");
|
||||
var sleepPerAll = "10";// Environment.GetEnvironmentVariable("SLEEPSECONDSPERALLTESTS");
|
||||
var keep = ""; // Environment.GetEnvironmentVariable("KEEPPASSEDTESTLOGS");
|
||||
|
||||
if (!string.IsNullOrEmpty(logPath) &&
|
||||
!string.IsNullOrEmpty(codexDeploymentJson) &&
|
||||
|
@ -54,8 +53,8 @@ namespace ContinuousTests
|
|||
{
|
||||
LogPath = logPath,
|
||||
CodexDeployment = ParseCodexDeploymentJson(codexDeploymentJson),
|
||||
SleepSecondsPerSingleTest = Convert.ToInt32(sleepPerSingle),
|
||||
SleepSecondsPerAllTests = Convert.ToInt32(sleepPerAll),
|
||||
//SleepSecondsPerSingleTest = Convert.ToInt32(sleepPerSingle),
|
||||
//SleepSecondsPerAllTests = Convert.ToInt32(sleepPerAll),
|
||||
KeepPassedTestLogs = keep == "1"
|
||||
};
|
||||
}
|
||||
|
@ -78,16 +77,16 @@ namespace ContinuousTests
|
|||
|
||||
private void Validate(Configuration configuration)
|
||||
{
|
||||
if (configuration.SleepSecondsPerSingleTest < 1)
|
||||
{
|
||||
Console.WriteLine("Warning: configuration.SleepSecondsPerSingleTest was less than 1 seconds. Using 1 seconds instead!");
|
||||
configuration.SleepSecondsPerSingleTest = 1;
|
||||
}
|
||||
if (configuration.SleepSecondsPerAllTests < 1)
|
||||
{
|
||||
Console.WriteLine("Warning: configuration.SleepSecondsPerAllTests was less than 10 seconds. Using 10 seconds instead!");
|
||||
configuration.SleepSecondsPerAllTests = 10;
|
||||
}
|
||||
//if (configuration.SleepSecondsPerSingleTest < 1)
|
||||
//{
|
||||
// Console.WriteLine("Warning: configuration.SleepSecondsPerSingleTest was less than 1 seconds. Using 1 seconds instead!");
|
||||
// configuration.SleepSecondsPerSingleTest = 1;
|
||||
//}
|
||||
//if (configuration.SleepSecondsPerAllTests < 1)
|
||||
//{
|
||||
// Console.WriteLine("Warning: configuration.SleepSecondsPerAllTests was less than 10 seconds. Using 10 seconds instead!");
|
||||
// configuration.SleepSecondsPerAllTests = 10;
|
||||
//}
|
||||
|
||||
if (string.IsNullOrEmpty(configuration.LogPath))
|
||||
{
|
||||
|
|
|
@ -31,6 +31,8 @@ namespace ContinuousTests
|
|||
public virtual ITimeSet TimeSet { get { return new DefaultTimeSet(); } }
|
||||
|
||||
public abstract int RequiredNumberOfNodes { get; }
|
||||
public abstract TimeSpan RunTestEvery { get; }
|
||||
public abstract TestFailMode TestFailMode { get; }
|
||||
|
||||
public string Name
|
||||
{
|
||||
|
@ -40,8 +42,6 @@ namespace ContinuousTests
|
|||
}
|
||||
}
|
||||
|
||||
public abstract void Run();
|
||||
|
||||
public ContentId? UploadFile(CodexNode node, TestFile file)
|
||||
{
|
||||
using var fileStream = File.OpenRead(file.Filename);
|
||||
|
@ -84,4 +84,10 @@ namespace ContinuousTests
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public enum TestFailMode
|
||||
{
|
||||
StopAfterFirstFailure,
|
||||
AlwaysRunAllMoments
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,138 +1,31 @@
|
|||
using DistTestCore;
|
||||
using DistTestCore.Codex;
|
||||
using Logging;
|
||||
|
||||
namespace ContinuousTests
|
||||
namespace ContinuousTests
|
||||
{
|
||||
public interface ITestResultHandler
|
||||
{
|
||||
void TestPassed(ContinuousTest test);
|
||||
void TestFailed(ContinuousTest test);
|
||||
}
|
||||
|
||||
public class ContinuousTestRunner : ITestResultHandler
|
||||
public class ContinuousTestRunner
|
||||
{
|
||||
private readonly ConfigLoader configLoader = new ConfigLoader();
|
||||
private readonly TestFactory testFactory = new TestFactory();
|
||||
private readonly CodexNodeFactory codexNodeFactory = new CodexNodeFactory();
|
||||
private readonly Configuration config;
|
||||
private readonly StartupChecker startupChecker;
|
||||
|
||||
public ContinuousTestRunner()
|
||||
{
|
||||
config = configLoader.Load();
|
||||
startupChecker = new StartupChecker(config);
|
||||
}
|
||||
|
||||
public void Run()
|
||||
{
|
||||
var config = configLoader.Load();
|
||||
StartupChecks(config);
|
||||
startupChecker.Check();
|
||||
|
||||
while (true)
|
||||
var allTests = testFactory.CreateTests();
|
||||
var testStarters = allTests.Select(t => new TestStarter(config, t.GetType(), t.RunTestEvery)).ToArray();
|
||||
|
||||
foreach (var t in testStarters)
|
||||
{
|
||||
var log = new FixtureLog(new LogConfig(config.LogPath, false), "ContinuousTestsRun");
|
||||
var allTestsRun = new AllTestsRun(config, log, testFactory, this);
|
||||
|
||||
try
|
||||
{
|
||||
allTestsRun.RunAll();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
log.Error($"Exception during test run: " + ex);
|
||||
}
|
||||
|
||||
if (result == ContinuousTestResult.Failed)
|
||||
{
|
||||
log.MarkAsFailed();
|
||||
}
|
||||
if (!config.KeepPassedTestLogs && result == ContinuousTestResult.Passed)
|
||||
{
|
||||
log.DeleteFolder();
|
||||
}
|
||||
|
||||
Thread.Sleep(config.SleepSecondsPerSingleTest * 1000);
|
||||
t.Begin();
|
||||
}
|
||||
}
|
||||
|
||||
private void StartupChecks(Configuration config)
|
||||
{
|
||||
var log = new FixtureLog(new LogConfig(config.LogPath, false), "StartupChecks");
|
||||
log.Log("Starting continuous test run...");
|
||||
log.Log("Checking configuration...");
|
||||
PreflightCheck(config);
|
||||
log.Log("Contacting Codex nodes...");
|
||||
CheckCodexNodes(log, config);
|
||||
log.Log("All OK.");
|
||||
}
|
||||
|
||||
private void PreflightCheck(Configuration config)
|
||||
{
|
||||
var tests = testFactory.CreateTests();
|
||||
if (!tests.Any())
|
||||
{
|
||||
throw new Exception("Unable to find any tests.");
|
||||
}
|
||||
|
||||
var errors = new List<string>();
|
||||
foreach (var test in tests)
|
||||
{
|
||||
if (test.RequiredNumberOfNodes > config.CodexDeployment.CodexContainers.Length)
|
||||
{
|
||||
errors.Add($"Test '{test.Name}' requires {test.RequiredNumberOfNodes} nodes. Deployment only has {config.CodexDeployment.CodexContainers.Length}");
|
||||
}
|
||||
}
|
||||
|
||||
if (!Directory.Exists(config.LogPath))
|
||||
{
|
||||
Directory.CreateDirectory(config.LogPath);
|
||||
}
|
||||
|
||||
if (errors.Any())
|
||||
{
|
||||
throw new Exception("Prerun check failed: " + string.Join(", ", errors));
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckCodexNodes(BaseLog log, Configuration config)
|
||||
{
|
||||
var nodes = codexNodeFactory.Create(config.CodexDeployment.CodexContainers, log, new DefaultTimeSet());
|
||||
var pass = true;
|
||||
foreach (var n in nodes)
|
||||
{
|
||||
log.Log($"Checking '{n.Address.Host}'...");
|
||||
|
||||
if (EnsureOnline(n))
|
||||
{
|
||||
log.Log("OK");
|
||||
}
|
||||
else
|
||||
{
|
||||
log.Error($"No response from '{n.Address.Host}'.");
|
||||
pass = false;
|
||||
}
|
||||
}
|
||||
if (!pass)
|
||||
{
|
||||
throw new Exception("Not all codex nodes responded.");
|
||||
}
|
||||
}
|
||||
|
||||
private bool EnsureOnline(CodexNode n)
|
||||
{
|
||||
try
|
||||
{
|
||||
var info = n.GetDebugInfo();
|
||||
if (info == null || string.IsNullOrEmpty(info.id)) return false;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void TestPassed(ContinuousTest test)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public void TestFailed(ContinuousTest test)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
|
||||
Thread.Sleep(TimeSpan.MaxValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ public class Program
|
|||
{
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
Console.WriteLine("Codex Continous-Test-Runner.");
|
||||
Console.WriteLine("Running...");
|
||||
var runner = new ContinuousTestRunner();
|
||||
runner.Run();
|
||||
}
|
||||
|
|
|
@ -9,38 +9,112 @@ namespace ContinuousTests
|
|||
public class SingleTestRun
|
||||
{
|
||||
private readonly CodexNodeFactory codexNodeFactory = new CodexNodeFactory();
|
||||
private readonly List<Exception> exceptions = new List<Exception>();
|
||||
private readonly Configuration config;
|
||||
private readonly ContinuousTest test;
|
||||
private readonly TestHandle handle;
|
||||
private readonly CodexNode[] nodes;
|
||||
private readonly FileManager fileManager;
|
||||
private readonly FixtureLog fixtureLog;
|
||||
|
||||
public SingleTestRun(Configuration config, ContinuousTest test, BaseLog testLog)
|
||||
public SingleTestRun(Configuration config, TestHandle handle)
|
||||
{
|
||||
this.config = config;
|
||||
this.test = test;
|
||||
this.handle = handle;
|
||||
|
||||
nodes = CreateRandomNodes(test.RequiredNumberOfNodes, testLog);
|
||||
fileManager = new FileManager(testLog, new DistTestCore.Configuration());
|
||||
var testName = handle.Test.GetType().Name;
|
||||
fixtureLog = new FixtureLog(new LogConfig(config.LogPath, false), testName);
|
||||
|
||||
test.Initialize(nodes, testLog, fileManager);
|
||||
nodes = CreateRandomNodes(handle.Test.RequiredNumberOfNodes);
|
||||
fileManager = new FileManager(fixtureLog, CreateFileManagerConfiguration());
|
||||
}
|
||||
|
||||
public void Run()
|
||||
{
|
||||
test.Run();
|
||||
Task.Run(() =>
|
||||
{
|
||||
try
|
||||
{
|
||||
RunTest();
|
||||
|
||||
if (!config.KeepPassedTestLogs) fixtureLog.Delete();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
fixtureLog.Error("Test run failed with exception: " + ex);
|
||||
fixtureLog.MarkAsFailed();
|
||||
}
|
||||
fileManager.DeleteAllTestFiles();
|
||||
});
|
||||
}
|
||||
|
||||
public void TearDown()
|
||||
private void RunTest()
|
||||
{
|
||||
test.Initialize(null!, null!, null!);
|
||||
fileManager.DeleteAllTestFiles();
|
||||
var earliestMoment = handle.GetEarliestMoment();
|
||||
var lastMoment = handle.GetLastMoment();
|
||||
|
||||
var t = earliestMoment;
|
||||
while (t <= lastMoment)
|
||||
{
|
||||
RunMoment(t);
|
||||
|
||||
if (handle.Test.TestFailMode == TestFailMode.StopAfterFirstFailure && exceptions.Any())
|
||||
{
|
||||
Log("Exception detected. TestFailMode = StopAfterFirstFailure. Stopping...");
|
||||
throw exceptions.Single();
|
||||
}
|
||||
|
||||
var nextMoment = handle.GetNextMoment(t);
|
||||
if (nextMoment != null)
|
||||
{
|
||||
Log($" > Next TestMoment in {nextMoment.Value} seconds...");
|
||||
t += nextMoment.Value;
|
||||
Thread.Sleep(nextMoment.Value * 1000);
|
||||
}
|
||||
else
|
||||
{
|
||||
Log(" > Completed last test moment. Test ended.");
|
||||
}
|
||||
}
|
||||
|
||||
if (exceptions.Any()) throw exceptions.First();
|
||||
}
|
||||
|
||||
private CodexNode[] CreateRandomNodes(int number, BaseLog testLog)
|
||||
private void RunMoment(int t)
|
||||
{
|
||||
try
|
||||
{
|
||||
handle.InvokeMoment(t, InitializeTest);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Log($" > TestMoment yielded exception: " + ex);
|
||||
exceptions.Add(ex);
|
||||
}
|
||||
|
||||
DecommissionTest();
|
||||
}
|
||||
|
||||
private void InitializeTest(string name)
|
||||
{
|
||||
Log($" > Running TestMoment '{name}'");
|
||||
handle.Test.Initialize(nodes, fixtureLog, fileManager);
|
||||
}
|
||||
|
||||
private void DecommissionTest()
|
||||
{
|
||||
handle.Test.Initialize(null!, null!, null!);
|
||||
}
|
||||
|
||||
private void Log(string msg)
|
||||
{
|
||||
fixtureLog.Log(msg);
|
||||
}
|
||||
|
||||
private CodexNode[] CreateRandomNodes(int number)
|
||||
{
|
||||
var containers = SelectRandomContainers(number);
|
||||
testLog.Log("Selected nodes: " + string.Join(",", containers.Select(c => c.Name)));
|
||||
return codexNodeFactory.Create(containers, testLog, test.TimeSet);
|
||||
fixtureLog.Log("Selected nodes: " + string.Join(",", containers.Select(c => c.Name)));
|
||||
return codexNodeFactory.Create(containers, fixtureLog, handle.Test.TimeSet);
|
||||
}
|
||||
|
||||
private RunningContainer[] SelectRandomContainers(int number)
|
||||
|
@ -53,5 +127,11 @@ namespace ContinuousTests
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private DistTestCore.Configuration CreateFileManagerConfiguration()
|
||||
{
|
||||
return new DistTestCore.Configuration(null, string.Empty, false, config.DataPath + Guid.NewGuid(),
|
||||
CodexLogLevel.Error, TestRunnerLocation.ExternalToCluster);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
using DistTestCore.Codex;
|
||||
using DistTestCore;
|
||||
using Logging;
|
||||
|
||||
namespace ContinuousTests
|
||||
{
|
||||
public class StartupChecker
|
||||
{
|
||||
private readonly TestFactory testFactory = new TestFactory();
|
||||
private readonly CodexNodeFactory codexNodeFactory = new CodexNodeFactory();
|
||||
private readonly Configuration config;
|
||||
|
||||
public StartupChecker(Configuration config)
|
||||
{
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
public void Check()
|
||||
{
|
||||
var log = new FixtureLog(new LogConfig(config.LogPath, false), "StartupChecks");
|
||||
log.Log("Starting continuous test run...");
|
||||
log.Log("Checking configuration...");
|
||||
PreflightCheck(config);
|
||||
log.Log("Contacting Codex nodes...");
|
||||
CheckCodexNodes(log, config);
|
||||
log.Log("All OK.");
|
||||
}
|
||||
|
||||
private void PreflightCheck(Configuration config)
|
||||
{
|
||||
var tests = testFactory.CreateTests();
|
||||
if (!tests.Any())
|
||||
{
|
||||
throw new Exception("Unable to find any tests.");
|
||||
}
|
||||
foreach (var test in tests)
|
||||
{
|
||||
var handle = new TestHandle(test);
|
||||
handle.GetEarliestMoment();
|
||||
handle.GetLastMoment();
|
||||
}
|
||||
|
||||
var errors = new List<string>();
|
||||
foreach (var test in tests)
|
||||
{
|
||||
if (test.RequiredNumberOfNodes > config.CodexDeployment.CodexContainers.Length)
|
||||
{
|
||||
errors.Add($"Test '{test.Name}' requires {test.RequiredNumberOfNodes} nodes. Deployment only has {config.CodexDeployment.CodexContainers.Length}");
|
||||
}
|
||||
}
|
||||
|
||||
if (!Directory.Exists(config.LogPath))
|
||||
{
|
||||
Directory.CreateDirectory(config.LogPath);
|
||||
}
|
||||
|
||||
if (errors.Any())
|
||||
{
|
||||
throw new Exception("Prerun check failed: " + string.Join(", ", errors));
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckCodexNodes(BaseLog log, Configuration config)
|
||||
{
|
||||
var nodes = codexNodeFactory.Create(config.CodexDeployment.CodexContainers, log, new DefaultTimeSet());
|
||||
var pass = true;
|
||||
foreach (var n in nodes)
|
||||
{
|
||||
log.Log($"Checking '{n.Address.Host}'...");
|
||||
|
||||
if (EnsureOnline(n))
|
||||
{
|
||||
log.Log("OK");
|
||||
}
|
||||
else
|
||||
{
|
||||
log.Error($"No response from '{n.Address.Host}'.");
|
||||
pass = false;
|
||||
}
|
||||
}
|
||||
if (!pass)
|
||||
{
|
||||
throw new Exception("Not all codex nodes responded.");
|
||||
}
|
||||
}
|
||||
|
||||
private bool EnsureOnline(CodexNode n)
|
||||
{
|
||||
try
|
||||
{
|
||||
var info = n.GetDebugInfo();
|
||||
if (info == null || string.IsNullOrEmpty(info.id)) return false;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -12,12 +12,23 @@ namespace ContinuousTests
|
|||
|
||||
ReflectTestMoments();
|
||||
|
||||
if (!moments.Any()) throw new Exception("Test has no moments.");
|
||||
if (moments.Count != moments.Select(m => m.Moment).Distinct().Count()) throw new Exception("Test has duplicate moments");
|
||||
var testName = test.GetType().Name;
|
||||
if (!moments.Any()) throw new Exception($"Test '{testName}' has no moments.");
|
||||
if (moments.Count != moments.Select(m => m.Moment).Distinct().Count()) throw new Exception($"Test '{testName}' has duplicate moments");
|
||||
}
|
||||
|
||||
public ContinuousTest Test { get; }
|
||||
|
||||
public int GetEarliestMoment()
|
||||
{
|
||||
return moments.Min(m => m.Moment);
|
||||
}
|
||||
|
||||
public int GetLastMoment()
|
||||
{
|
||||
return moments.Max(m => m.Moment);
|
||||
}
|
||||
|
||||
public int? GetNextMoment(int currentMoment)
|
||||
{
|
||||
var remainingMoments = moments.Where(m => m.Moment >= currentMoment).ToArray();
|
||||
|
@ -25,17 +36,16 @@ namespace ContinuousTests
|
|||
return remainingMoments.Min(m => m.Moment);
|
||||
}
|
||||
|
||||
public int GetLastMoment()
|
||||
{
|
||||
return moments.Max(m => m.Moment);
|
||||
}
|
||||
|
||||
public void InvokeMoment(int currentMoment)
|
||||
public void InvokeMoment(int currentMoment, Action<string> beforeInvoke)
|
||||
{
|
||||
var moment = moments.SingleOrDefault(m => m.Moment == currentMoment);
|
||||
if (moment == null) return;
|
||||
|
||||
moment.Method.Invoke(Test, Array.Empty<object>());
|
||||
lock (MomentLock.Lock)
|
||||
{
|
||||
beforeInvoke(moment.Method.Name);
|
||||
moment.Method.Invoke(Test, Array.Empty<object>());
|
||||
}
|
||||
}
|
||||
|
||||
private void ReflectTestMoments()
|
||||
|
@ -66,4 +76,9 @@ namespace ContinuousTests
|
|||
public MethodInfo Method { get; }
|
||||
public int Moment { get; }
|
||||
}
|
||||
|
||||
public static class MomentLock
|
||||
{
|
||||
public static readonly object Lock = new();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
namespace ContinuousTests
|
||||
{
|
||||
public class TestStarter
|
||||
{
|
||||
private readonly Configuration config;
|
||||
private readonly Type testType;
|
||||
private readonly TimeSpan runsEvery;
|
||||
|
||||
public TestStarter(Configuration config, Type testType, TimeSpan runsEvery)
|
||||
{
|
||||
this.config = config;
|
||||
this.testType = testType;
|
||||
this.runsEvery = runsEvery;
|
||||
}
|
||||
|
||||
public void Begin()
|
||||
{
|
||||
Task.Run(() =>
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
StartTest();
|
||||
Thread.Sleep(runsEvery);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void StartTest()
|
||||
{
|
||||
var test = (ContinuousTest)Activator.CreateInstance(testType)!;
|
||||
var handle = new TestHandle(test);
|
||||
var run = new SingleTestRun(config, handle);
|
||||
run.Run();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -5,10 +5,8 @@ namespace ContinuousTests.Tests
|
|||
public class MarketplaceTest : ContinuousTest
|
||||
{
|
||||
public override int RequiredNumberOfNodes => 1;
|
||||
|
||||
public override void Run()
|
||||
{
|
||||
}
|
||||
public override TimeSpan RunTestEvery => TimeSpan.FromDays(1);
|
||||
public override TestFailMode TestFailMode => TestFailMode.AlwaysRunAllMoments;
|
||||
|
||||
[TestMoment(t: Zero)]
|
||||
public void NodePostsStorageRequest()
|
||||
|
|
|
@ -4,77 +4,77 @@ using NUnit.Framework;
|
|||
|
||||
namespace ContinuousTests.Tests
|
||||
{
|
||||
public class UploadPerformanceTest : PerformanceTest
|
||||
{
|
||||
public override int RequiredNumberOfNodes => 1;
|
||||
//public class UploadPerformanceTest : PerformanceTest
|
||||
//{
|
||||
// public override int RequiredNumberOfNodes => 1;
|
||||
|
||||
public override void Run()
|
||||
{
|
||||
UploadTest(100, Nodes[0]);
|
||||
}
|
||||
}
|
||||
// public override void Run()
|
||||
// {
|
||||
// UploadTest(100, Nodes[0]);
|
||||
// }
|
||||
//}
|
||||
|
||||
public class DownloadLocalPerformanceTest : PerformanceTest
|
||||
{
|
||||
public override int RequiredNumberOfNodes => 1;
|
||||
//public class DownloadLocalPerformanceTest : PerformanceTest
|
||||
//{
|
||||
// public override int RequiredNumberOfNodes => 1;
|
||||
|
||||
public override void Run()
|
||||
{
|
||||
DownloadTest(100, Nodes[0], Nodes[0]);
|
||||
}
|
||||
}
|
||||
// public override void Run()
|
||||
// {
|
||||
// DownloadTest(100, Nodes[0], Nodes[0]);
|
||||
// }
|
||||
//}
|
||||
|
||||
public class DownloadRemotePerformanceTest : PerformanceTest
|
||||
{
|
||||
public override int RequiredNumberOfNodes => 2;
|
||||
//public class DownloadRemotePerformanceTest : PerformanceTest
|
||||
//{
|
||||
// public override int RequiredNumberOfNodes => 2;
|
||||
|
||||
public override void Run()
|
||||
{
|
||||
DownloadTest(100, Nodes[0], Nodes[1]);
|
||||
}
|
||||
}
|
||||
// public override void Run()
|
||||
// {
|
||||
// DownloadTest(100, Nodes[0], Nodes[1]);
|
||||
// }
|
||||
//}
|
||||
|
||||
public abstract class PerformanceTest : ContinuousTest
|
||||
{
|
||||
public void UploadTest(int megabytes, CodexNode uploadNode)
|
||||
{
|
||||
var file = FileManager.GenerateTestFile(megabytes.MB());
|
||||
//public abstract class PerformanceTest : ContinuousTest
|
||||
//{
|
||||
// public void UploadTest(int megabytes, CodexNode uploadNode)
|
||||
// {
|
||||
// var file = FileManager.GenerateTestFile(megabytes.MB());
|
||||
|
||||
var time = Measure(() =>
|
||||
{
|
||||
UploadFile(uploadNode, file);
|
||||
});
|
||||
// var time = Measure(() =>
|
||||
// {
|
||||
// UploadFile(uploadNode, file);
|
||||
// });
|
||||
|
||||
var timePerMB = time / megabytes;
|
||||
// var timePerMB = time / megabytes;
|
||||
|
||||
Assert.That(timePerMB, Is.LessThan(CodexContainerRecipe.MaxUploadTimePerMegabyte), "MaxUploadTimePerMegabyte performance threshold breached.");
|
||||
}
|
||||
// Assert.That(timePerMB, Is.LessThan(CodexContainerRecipe.MaxUploadTimePerMegabyte), "MaxUploadTimePerMegabyte performance threshold breached.");
|
||||
// }
|
||||
|
||||
public void DownloadTest(int megabytes, CodexNode uploadNode, CodexNode downloadNode)
|
||||
{
|
||||
var file = FileManager.GenerateTestFile(megabytes.MB());
|
||||
// public void DownloadTest(int megabytes, CodexNode uploadNode, CodexNode downloadNode)
|
||||
// {
|
||||
// var file = FileManager.GenerateTestFile(megabytes.MB());
|
||||
|
||||
var cid = UploadFile(uploadNode, file);
|
||||
Assert.That(cid, Is.Not.Null);
|
||||
// var cid = UploadFile(uploadNode, file);
|
||||
// Assert.That(cid, Is.Not.Null);
|
||||
|
||||
TestFile? result = null;
|
||||
var time = Measure(() =>
|
||||
{
|
||||
result = DownloadContent(downloadNode, cid!);
|
||||
});
|
||||
// TestFile? result = null;
|
||||
// var time = Measure(() =>
|
||||
// {
|
||||
// result = DownloadContent(downloadNode, cid!);
|
||||
// });
|
||||
|
||||
file.AssertIsEqual(result);
|
||||
// file.AssertIsEqual(result);
|
||||
|
||||
var timePerMB = time / megabytes;
|
||||
// var timePerMB = time / megabytes;
|
||||
|
||||
Assert.That(timePerMB, Is.LessThan(CodexContainerRecipe.MaxDownloadTimePerMegabyte), "MaxDownloadTimePerMegabyte performance threshold breached.");
|
||||
}
|
||||
// Assert.That(timePerMB, Is.LessThan(CodexContainerRecipe.MaxDownloadTimePerMegabyte), "MaxDownloadTimePerMegabyte performance threshold breached.");
|
||||
// }
|
||||
|
||||
private static TimeSpan Measure(Action action)
|
||||
{
|
||||
var start = DateTime.UtcNow;
|
||||
action();
|
||||
return DateTime.UtcNow - start;
|
||||
}
|
||||
}
|
||||
// private static TimeSpan Measure(Action action)
|
||||
// {
|
||||
// var start = DateTime.UtcNow;
|
||||
// action();
|
||||
// return DateTime.UtcNow - start;
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
|
|
@ -3,20 +3,20 @@ using NUnit.Framework;
|
|||
|
||||
namespace ContinuousTests.Tests
|
||||
{
|
||||
public class TwoClientTest : ContinuousTest
|
||||
{
|
||||
public override int RequiredNumberOfNodes => 2;
|
||||
//public class TwoClientTest : ContinuousTest
|
||||
//{
|
||||
// public override int RequiredNumberOfNodes => 2;
|
||||
|
||||
public override void Run()
|
||||
{
|
||||
var file = FileManager.GenerateTestFile(10.MB());
|
||||
// public override void Run()
|
||||
// {
|
||||
// var file = FileManager.GenerateTestFile(10.MB());
|
||||
|
||||
var cid = UploadFile(Nodes[0], file);
|
||||
Assert.That(cid, Is.Not.Null);
|
||||
// var cid = UploadFile(Nodes[0], file);
|
||||
// Assert.That(cid, Is.Not.Null);
|
||||
|
||||
var dl = DownloadContent(Nodes[1], cid!);
|
||||
// var dl = DownloadContent(Nodes[1], cid!);
|
||||
|
||||
file.AssertIsEqual(dl);
|
||||
}
|
||||
}
|
||||
// file.AssertIsEqual(dl);
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue