Disables assert on contract config values check

This commit is contained in:
Ben 2025-09-01 10:35:40 +02:00
parent 365e78f050
commit c16397ae47
No known key found for this signature in database
GPG Key ID: 0F16E812E736C24B
3 changed files with 173 additions and 136 deletions

View File

@ -6,6 +6,7 @@ namespace CodexClient
{ {
BytesPerSecond? GetUploadSpeed(); BytesPerSecond? GetUploadSpeed();
BytesPerSecond? GetDownloadSpeed(); BytesPerSecond? GetDownloadSpeed();
ITransferSpeeds Combine(ITransferSpeeds? other);
} }
public class TransferSpeeds : ITransferSpeeds public class TransferSpeeds : ITransferSpeeds
@ -35,6 +36,18 @@ namespace CodexClient
return downloads.Average(); return downloads.Average();
} }
public ITransferSpeeds Combine(ITransferSpeeds? other)
{
if (other == null) return this;
var o = (TransferSpeeds)other;
var result = new TransferSpeeds();
result.uploads.AddRange(uploads);
result.uploads.AddRange(o.uploads);
result.downloads.AddRange(downloads);
result.downloads.AddRange(o.downloads);
return result;
}
private static BytesPerSecond Convert(ByteSize size, TimeSpan duration) private static BytesPerSecond Convert(ByteSize size, TimeSpan duration)
{ {
double bytes = size.SizeInBytes; double bytes = size.SizeInBytes;

View File

@ -105,7 +105,15 @@ namespace CodexContractsPlugin
{ {
if (Convert.ToInt32(value) != expected) if (Convert.ToInt32(value) != expected)
{ {
throw new Exception($"Config value '{name}' should be deployed as '{expected}' but was '{value}'"); // Merge todo: https://github.com/codex-storage/nim-codex/pull/1303
// Once this is merged, the contract config values are settable via env-vars.
// This plugin is already updated to set the config vars to values compatible with a
// 1-second block frequency. AND it will read back the config and assert it is deployed correctly.
// This is waiting for that merge.
// Replace log with assert WHEN MERGED:
// throw new Exception($"Config value '{name}' should be deployed as '{expected}' but was '{value}'");
Log($"MERGE TODO. Config value '{name}' should be deployed as '{expected}' but was '{value}'");
} }
} }

View File

@ -7,175 +7,191 @@ using Utils;
namespace CodexReleaseTests.DataTests namespace CodexReleaseTests.DataTests
{ {
[TestFixture(2, 10)] namespace SwarmTests
[TestFixture(5, 20)]
[TestFixture(10, 20)]
public class SwarmTests : AutoBootstrapDistTest
{ {
private readonly int numberOfNodes; [TestFixture(2, 10)]
private readonly int filesizeMb; [TestFixture(5, 20)]
[TestFixture(10, 20)]
public SwarmTests(int numberOfNodes, int filesizeMb) public class SwarmTests : AutoBootstrapDistTest
{ {
this.numberOfNodes = numberOfNodes; private readonly int numberOfNodes;
this.filesizeMb = filesizeMb; private readonly int filesizeMb;
} private ICodexNodeGroup nodes = null!;
[Test] public SwarmTests(int numberOfNodes, int filesizeMb)
public void Swarm()
{
var filesize = filesizeMb.MB();
var nodes = StartCodex(numberOfNodes);
var files = nodes.Select(n => UploadUniqueFilePerNode(n, filesize)).ToArray();
var tasks = ParallelDownloadEachFile(nodes, files);
Task.WaitAll(tasks);
AssertAllFilesDownloadedCorrectly(files);
}
[Test]
public void StreamlessSwarm()
{
var filesize = filesizeMb.MB();
var nodes = StartCodex(numberOfNodes);
var files = nodes.Select(n => UploadUniqueFilePerNode(n, filesize)).ToArray();
var tasks = ParallelStreamlessDownloadEachFile(nodes, files);
Task.WaitAll(tasks);
AssertAllFilesStreamlesslyDownloadedCorrectly(nodes, files);
}
private SwarmTestNetworkFile UploadUniqueFilePerNode(ICodexNode node, ByteSize fileSize)
{
var file = GenerateTestFile(fileSize);
var cid = node.UploadFile(file);
return new SwarmTestNetworkFile(node, fileSize, file, cid);
}
private Task[] ParallelDownloadEachFile(ICodexNodeGroup nodes, SwarmTestNetworkFile[] files)
{
var tasks = new List<Task>();
foreach (var node in nodes)
{ {
tasks.Add(StartDownload(node, files)); this.numberOfNodes = numberOfNodes;
this.filesizeMb = filesizeMb;
} }
return tasks.ToArray(); [TearDown]
} public void TearDown()
private Task[] ParallelStreamlessDownloadEachFile(ICodexNodeGroup nodes, SwarmTestNetworkFile[] files)
{
var tasks = new List<Task>();
foreach (var node in nodes)
{ {
tasks.Add(StartStreamlessDownload(node, files)); ITransferSpeeds speeds = new TransferSpeeds();
} foreach (var n in nodes)
return tasks.ToArray();
}
private Task StartDownload(ICodexNode node, SwarmTestNetworkFile[] files)
{
return Task.Run(() =>
{
var remaining = files.ToList();
while (remaining.Count > 0)
{ {
var file = remaining.PickOneRandom(); speeds = speeds.Combine(n.TransferSpeeds);
try
{
var dl = node.DownloadContent(file.Cid, TimeSpan.FromMinutes(30));
lock (file.Lock)
{
file.Downloaded.Add(dl);
}
}
catch (Exception ex)
{
file.Error = ex;
}
} }
}); Log($"Average upload speed: {speeds.GetUploadSpeed()}");
} Log($"Average download speed: {speeds.GetDownloadSpeed()}");
}
private Task StartStreamlessDownload(ICodexNode node, SwarmTestNetworkFile[] files) [Test]
{ public void Stream()
return Task.Run(() =>
{ {
var remaining = files.ToList(); var filesize = filesizeMb.MB();
nodes = StartCodex(numberOfNodes);
var files = nodes.Select(n => UploadUniqueFilePerNode(n, filesize)).ToArray();
while (remaining.Count > 0) var tasks = ParallelDownloadEachFile(files);
Task.WaitAll(tasks);
AssertAllFilesDownloadedCorrectly(files);
}
[Test]
public void Streamless()
{
var filesize = filesizeMb.MB();
nodes = StartCodex(numberOfNodes);
var files = nodes.Select(n => UploadUniqueFilePerNode(n, filesize)).ToArray();
var tasks = ParallelStreamlessDownloadEachFile(files);
Task.WaitAll(tasks);
AssertAllFilesStreamlesslyDownloadedCorrectly(files);
}
private SwarmTestNetworkFile UploadUniqueFilePerNode(ICodexNode node, ByteSize fileSize)
{
var file = GenerateTestFile(fileSize);
var cid = node.UploadFile(file);
return new SwarmTestNetworkFile(node, fileSize, file, cid);
}
private Task[] ParallelDownloadEachFile(SwarmTestNetworkFile[] files)
{
var tasks = new List<Task>();
foreach (var node in nodes)
{ {
var file = remaining.PickOneRandom(); tasks.Add(StartDownload(node, files));
if (file.Uploader.GetName() != node.GetName()) }
return tasks.ToArray();
}
private Task[] ParallelStreamlessDownloadEachFile(SwarmTestNetworkFile[] files)
{
var tasks = new List<Task>();
foreach (var node in nodes)
{
tasks.Add(StartStreamlessDownload(node, files));
}
return tasks.ToArray();
}
private Task StartDownload(ICodexNode node, SwarmTestNetworkFile[] files)
{
return Task.Run(() =>
{
var remaining = files.ToList();
while (remaining.Count > 0)
{ {
var file = remaining.PickOneRandom();
try try
{ {
var startSpace = node.Space(); var dl = node.DownloadContent(file.Cid, TimeSpan.FromMinutes(30));
node.DownloadStreamlessWait(file.Cid, file.OriginalSize); lock (file.Lock)
{
file.Downloaded.Add(dl);
}
} }
catch (Exception ex) catch (Exception ex)
{ {
file.Error = ex; file.Error = ex;
} }
} }
} });
}); }
}
private void AssertAllFilesDownloadedCorrectly(SwarmTestNetworkFile[] files) private Task StartStreamlessDownload(ICodexNode node, SwarmTestNetworkFile[] files)
{
foreach (var file in files)
{ {
if (file.Error != null) throw file.Error; return Task.Run(() =>
lock (file.Lock)
{ {
foreach (var dl in file.Downloaded) var remaining = files.ToList();
while (remaining.Count > 0)
{ {
file.Original.AssertIsEqual(dl); var file = remaining.PickOneRandom();
if (file.Uploader.GetName() != node.GetName())
{
try
{
var startSpace = node.Space();
node.DownloadStreamlessWait(file.Cid, file.OriginalSize);
}
catch (Exception ex)
{
file.Error = ex;
}
}
}
});
}
private void AssertAllFilesDownloadedCorrectly(SwarmTestNetworkFile[] files)
{
foreach (var file in files)
{
if (file.Error != null) throw file.Error;
lock (file.Lock)
{
foreach (var dl in file.Downloaded)
{
file.Original.AssertIsEqual(dl);
}
} }
} }
} }
}
private void AssertAllFilesStreamlesslyDownloadedCorrectly(ICodexNodeGroup nodes, SwarmTestNetworkFile[] files) private void AssertAllFilesStreamlesslyDownloadedCorrectly(SwarmTestNetworkFile[] files)
{
var totalFilesSpace = 0.Bytes();
foreach (var file in files)
{ {
if (file.Error != null) throw file.Error; var totalFilesSpace = 0.Bytes();
totalFilesSpace = new ByteSize(totalFilesSpace.SizeInBytes + file.Original.GetFilesize().SizeInBytes); foreach (var file in files)
} {
if (file.Error != null) throw file.Error;
foreach (var node in nodes) totalFilesSpace = new ByteSize(totalFilesSpace.SizeInBytes + file.Original.GetFilesize().SizeInBytes);
{ }
var currentSpace = node.Space();
Assert.That(currentSpace.QuotaUsedBytes, Is.GreaterThanOrEqualTo(totalFilesSpace.SizeInBytes));
}
}
private class SwarmTestNetworkFile foreach (var node in nodes)
{ {
public SwarmTestNetworkFile(ICodexNode uploader, ByteSize originalSize, TrackedFile original, ContentId cid) var currentSpace = node.Space();
{ Assert.That(currentSpace.QuotaUsedBytes, Is.GreaterThanOrEqualTo(totalFilesSpace.SizeInBytes));
Uploader = uploader; }
OriginalSize = originalSize;
Original = original;
Cid = cid;
} }
public ICodexNode Uploader { get; } private class SwarmTestNetworkFile
public ByteSize OriginalSize { get; } {
public TrackedFile Original { get; } public SwarmTestNetworkFile(ICodexNode uploader, ByteSize originalSize, TrackedFile original, ContentId cid)
public ContentId Cid { get; } {
public object Lock { get; } = new object(); Uploader = uploader;
public List<TrackedFile?> Downloaded { get; } = new List<TrackedFile?>(); OriginalSize = originalSize;
public Exception? Error { get; set; } = null; Original = original;
Cid = cid;
}
public ICodexNode Uploader { get; }
public ByteSize OriginalSize { get; }
public TrackedFile Original { get; }
public ContentId Cid { get; }
public object Lock { get; } = new object();
public List<TrackedFile?> Downloaded { get; } = new List<TrackedFile?>();
public Exception? Error { get; set; } = null;
}
} }
} }
} }