trying to detect upload slowdown related to number of blocks in node

This commit is contained in:
benbierens 2024-04-27 11:33:13 +02:00
parent a20e9a30cf
commit ce7ba47a32
No known key found for this signature in database
GPG Key ID: 877D2C2E09A22F3A
1 changed files with 32 additions and 0 deletions

View File

@ -1,5 +1,6 @@
using CodexPlugin;
using DistTestCore;
using Logging;
using NUnit.Framework;
using Utils;
@ -35,5 +36,36 @@ namespace CodexTests.ScalabilityTests
testFile.AssertIsEqual(downloadedFile);
}
[Test]
public void ManyFiles()
{
// I suspect that the upload speed is linked to the total
// number of blocks already in the node. I suspect the
// metadata store to be the cause of any slow-down.
// Using this test to detect and quantify the numbers.
var node = AddCodex(s => s
.WithLogLevel(CodexLogLevel.Trace)
.WithStorageQuota(20.GB())
);
var times = new List<TimeSpan>();
for (var i = 0; i < 100; i++)
{
Thread.Sleep(1000);
var file = GenerateTestFile(100.MB());
times.Add(Stopwatch.Measure(GetTestLog(), "Upload_" + i, () =>
{
node.UploadFile(file);
}));
}
Log("Upload times:");
foreach (var t in times)
{
Log(Time.FormatDuration(t));
}
}
}
}