diff --git a/ProjectPlugins/CodexPlugin/CodexContainerRecipe.cs b/ProjectPlugins/CodexPlugin/CodexContainerRecipe.cs index 8590029..e818774 100644 --- a/ProjectPlugins/CodexPlugin/CodexContainerRecipe.cs +++ b/ProjectPlugins/CodexPlugin/CodexContainerRecipe.cs @@ -7,7 +7,8 @@ namespace CodexPlugin { public class CodexContainerRecipe : ContainerRecipeFactory { - private const string DefaultDockerImage = "codexstorage/nim-codex:sha-c58d4d7-dist-tests"; + private const string DefaultDockerImage = "codexstorage/nim-codex:sha-ea6d681-dist-tests"; + public const string ApiPortTag = "codex_api_port"; public const string ListenPortTag = "codex_listen_port"; public const string MetricsPortTag = "codex_metrics_port"; diff --git a/Tests/CodexLongTests/BasicTests/DownloadTests.cs b/Tests/CodexLongTests/BasicTests/DownloadTests.cs index 533934d..fbfc20b 100644 --- a/Tests/CodexLongTests/BasicTests/DownloadTests.cs +++ b/Tests/CodexLongTests/BasicTests/DownloadTests.cs @@ -1,3 +1,4 @@ +using CodexPlugin; using CodexTests; using DistTestCore; using FileUtils; @@ -7,35 +8,45 @@ using Utils; namespace CodexLongTests.BasicTests { [TestFixture] - public class DownloadTests : CodexDistTest + public class DownloadTests : AutoBootstrapDistTest { - [TestCase(3, 500)] - [TestCase(5, 100)] - [TestCase(10, 256)] + [Test] + [Combinatorial] [UseLongTimeouts] - public void ParallelDownload(int numberOfNodes, int filesizeMb) + public void ParallelDownload( + [Values(1, 3, 5)] int numberOfFiles, + [Values(10, 50, 100)] int filesizeMb) { - var group = AddCodex(numberOfNodes); var host = AddCodex(); + var client = AddCodex(); - foreach (var node in group) + var testfiles = new List(); + var contentIds = new List(); + var downloadedFiles = new List(); + + for (int i = 0; i < numberOfFiles; i++) { - host.ConnectToPeer(node); + testfiles.Add(GenerateTestFile(filesizeMb.MB())); + contentIds.Add(new ContentId()); + downloadedFiles.Add(null); } - var testFile = GenerateTestFile(filesizeMb.MB()); - var contentId = host.UploadFile(testFile); - var list = new List>(); - - foreach (var node in group) + for (int i = 0; i < numberOfFiles; i++) { - list.Add(Task.Run(() => { return node.DownloadContent(contentId); })); + contentIds[i] = host.UploadFile(testfiles[i]); } - Task.WaitAll(list.ToArray()); - foreach (var task in list) + var downloadTasks = new List(); + for (int i = 0; i < numberOfFiles; i++) { - testFile.AssertIsEqual(task.Result); + downloadTasks.Add(Task.Run(() => { downloadedFiles[i] = client.DownloadContent(contentIds[i]); })); + } + + Task.WaitAll(downloadTasks.ToArray()); + + for (int i = 0; i < numberOfFiles; i++) + { + testfiles[i].AssertIsEqual(downloadedFiles[i]); } } } diff --git a/Tests/CodexLongTests/BasicTests/TestInfraTests.cs b/Tests/CodexLongTests/BasicTests/TestInfraTests.cs index 5720457..51d41b7 100644 --- a/Tests/CodexLongTests/BasicTests/TestInfraTests.cs +++ b/Tests/CodexLongTests/BasicTests/TestInfraTests.cs @@ -6,7 +6,9 @@ namespace CodexLongTests.BasicTests { public class TestInfraTests : CodexDistTest { - [Test, UseLongTimeouts] + [Test] + [UseLongTimeouts] + [Ignore("Not supported atm")] public void TestInfraShouldHave1000AddressSpacesPerPod() { var group = AddCodex(1000, s => s.EnableMetrics()); @@ -17,7 +19,9 @@ namespace CodexLongTests.BasicTests "Not all created nodes provided a unique id."); } - [Test, UseLongTimeouts] + [Test] + [UseLongTimeouts] + [Ignore("Not supported atm")] public void TestInfraSupportsManyConcurrentPods() { for (var i = 0; i < 20; i++) diff --git a/Tests/CodexLongTests/BasicTests/UploadTests.cs b/Tests/CodexLongTests/BasicTests/UploadTests.cs index 57e0296..31718aa 100644 --- a/Tests/CodexLongTests/BasicTests/UploadTests.cs +++ b/Tests/CodexLongTests/BasicTests/UploadTests.cs @@ -8,41 +8,39 @@ using Utils; namespace CodexLongTests.BasicTests { [TestFixture] - public class UploadTests : CodexDistTest + public class UploadTests : AutoBootstrapDistTest { - [TestCase(3, 50)] - [TestCase(5, 75)] - [TestCase(10, 25)] + [Test] + [Combinatorial] [UseLongTimeouts] - public void ParallelUpload(int numberOfNodes, int filesizeMb) + public void ParallelUpload( + [Values(1, 3, 5)] int numberOfFiles, + [Values(10, 50, 100)] int filesizeMb) { - var group = AddCodex(numberOfNodes); var host = AddCodex(); - - foreach (var node in group) - { - host.ConnectToPeer(node); - } + var client = AddCodex(); var testfiles = new List(); - var contentIds = new List>(); + var contentIds = new List(); - for (int i = 0; i < group.Count(); i++) + for (int i = 0; i < numberOfFiles; i++) { testfiles.Add(GenerateTestFile(filesizeMb.MB())); - var n = i; - contentIds.Add(Task.Run(() => { return host.UploadFile(testfiles[n]); })); + contentIds.Add(new ContentId()); } - var downloads = new List>(); - for (int i = 0; i < group.Count(); i++) + + var uploadTasks = new List(); + for (int i = 0; i < numberOfFiles; i++) { - var n = i; - downloads.Add(Task.Run(() => { return group[n].DownloadContent(contentIds[n].Result); })); + uploadTasks.Add(Task.Run(() => { contentIds[i] = host.UploadFile(testfiles[i]); })); } - Task.WaitAll(downloads.ToArray()); - for (int i = 0; i < group.Count(); i++) + + Task.WaitAll(uploadTasks.ToArray()); + + for (int i = 0; i < numberOfFiles; i++) { - testfiles[i].AssertIsEqual(downloads[i].Result); + var downloaded = client.DownloadContent(contentIds[i]); + testfiles[i].AssertIsEqual(downloaded); } } } diff --git a/Tests/CodexTests/ScalabilityTests/MultiPeerDownloadTests.cs b/Tests/CodexLongTests/ScalabilityTests/MultiPeerDownloadTests.cs similarity index 100% rename from Tests/CodexTests/ScalabilityTests/MultiPeerDownloadTests.cs rename to Tests/CodexLongTests/ScalabilityTests/MultiPeerDownloadTests.cs diff --git a/Tests/CodexTests/ScalabilityTests/ScalabilityTests.cs b/Tests/CodexLongTests/ScalabilityTests/ScalabilityTests.cs similarity index 100% rename from Tests/CodexTests/ScalabilityTests/ScalabilityTests.cs rename to Tests/CodexLongTests/ScalabilityTests/ScalabilityTests.cs diff --git a/Tests/CodexTests/ScalabilityTests/OneClientLargeFileTests.cs b/Tests/CodexTests/ScalabilityTests/OneClientLargeFileTests.cs deleted file mode 100644 index 3e87e1d..0000000 --- a/Tests/CodexTests/ScalabilityTests/OneClientLargeFileTests.cs +++ /dev/null @@ -1,126 +0,0 @@ -using CodexPlugin; -using DistTestCore; -using Logging; -using NUnit.Framework; -using Utils; - -namespace CodexTests.ScalabilityTests -{ - [TestFixture] - public class OneClientLargeFileTests : CodexDistTest - { - [Test] - [Combinatorial] - [UseLongTimeouts] - public void OneClientLargeFile([Values( - 256, - 512, - 1024, // GB - 2048, - 4096, - 8192, - 16384, - 32768, - 65536, - 131072 - )] int sizeMb) - { - var testFile = GenerateTestFile(sizeMb.MB()); - - var node = AddCodex(s => s - .WithLogLevel(CodexLogLevel.Warn) - .WithStorageQuota((sizeMb + 10).MB()) - ); - var contentId = node.UploadFile(testFile); - var downloadedFile = node.DownloadContent(contentId); - - testFile.AssertIsEqual(downloadedFile); - } - - [Test] - public void ManyFiles() - { - // I suspect that the upload speed is linked to the total - // number of blocks already in the node. I suspect the - // metadata store to be the cause of any slow-down. - // Using this test to detect and quantify the numbers. - - var node = AddCodex(s => s - .WithLogLevel(CodexLogLevel.Trace) - .WithStorageQuota(20.GB()) - ); - - var startUtc = DateTime.UtcNow; - var endUtc = DateTime.UtcNow; - - var fastMap = new Dictionary(); - var slowMap = new Dictionary(); - - var times = new List(); - for (var i = 0; i < 100; i++) - { - Thread.Sleep(1000); - var file = GenerateTestFile(100.MB()); - startUtc = DateTime.UtcNow; - var duration = Stopwatch.Measure(GetTestLog(), "Upload_" + i, () => - { - node.UploadFile(file); - }); - times.Add(duration); - endUtc = DateTime.UtcNow; - - // We collect the log of the node during the upload. - // We count the line occurances. - // If the upload was fast, add it to the fast-map. - // If it was slow, add it to the slow-map. - // After the test, we can compare and hopefully see what the node was doing during the slow uploads - // that it wasn't doing during the fast ones. - if (duration.TotalSeconds < 12) - { - AddToLogMap(fastMap, node, startUtc, endUtc); - } - else if (duration.TotalSeconds > 25) - { - AddToLogMap(slowMap, node, startUtc, endUtc); - } - } - - Log("Upload times:"); - foreach (var t in times) - { - Log(Time.FormatDuration(t)); - } - Log("Fast map:"); - foreach (var entry in fastMap.OrderByDescending(p => p.Value)) - { - if (entry.Value > 9) - { - Log($"'{entry.Key}' = {entry.Value}"); - } - } - Log("Slow map:"); - foreach (var entry in slowMap.OrderByDescending(p => p.Value)) - { - if (entry.Value > 9) - { - Log($"'{entry.Key}' = {entry.Value}"); - } - } - } - - private void AddToLogMap(Dictionary map, ICodexNode node, DateTime startUtc, DateTime endUtc) - { - var log = Ci.DownloadLog(node, 1000000); - log.IterateLines(line => - { - var log = CodexLogLine.Parse(line); - if (log == null) return; - if (log.TimestampUtc < startUtc) return; - if (log.TimestampUtc > endUtc) return; - - if (map.ContainsKey(log.Message)) map[log.Message] += 1; - else map.Add(log.Message, 1); - }); - } - } -} diff --git a/Tests/CodexTests/ScalabilityTests/ClusterSpeedTests.cs b/Tests/CodexTests/UtilityTests/ClusterSpeedTests.cs similarity index 97% rename from Tests/CodexTests/ScalabilityTests/ClusterSpeedTests.cs rename to Tests/CodexTests/UtilityTests/ClusterSpeedTests.cs index 0e3fdaf..9e647ea 100644 --- a/Tests/CodexTests/ScalabilityTests/ClusterSpeedTests.cs +++ b/Tests/CodexTests/UtilityTests/ClusterSpeedTests.cs @@ -12,6 +12,7 @@ namespace CodexTests.ScalabilityTests [Test] [Combinatorial] + [Ignore("Used to measure disc io speeds in cluster.")] public void DiscSpeedTest( [Values(1, 10, 100, 1024, 1024 * 10, 1024 * 100, 1024 * 1024)] int bufferSizeKb )