2023-08-24 19:51:04 -07:00
|
|
|
import std/options
|
|
|
|
|
import std/sequtils
|
|
|
|
|
import std/os
|
2023-09-13 14:43:25 -06:00
|
|
|
import std/cpuinfo
|
2023-08-28 21:45:55 -07:00
|
|
|
import std/algorithm
|
2023-09-14 17:47:37 -06:00
|
|
|
import std/importutils
|
2023-08-24 19:51:04 -07:00
|
|
|
|
2023-09-07 17:33:33 -06:00
|
|
|
import pkg/asynctest
|
2023-08-24 19:51:04 -07:00
|
|
|
import pkg/chronos
|
2023-09-15 13:08:38 -06:00
|
|
|
import pkg/chronos/threadsync
|
2023-08-24 19:51:04 -07:00
|
|
|
import pkg/stew/results
|
|
|
|
|
import pkg/stew/byteutils
|
2023-09-08 15:09:15 -06:00
|
|
|
import pkg/taskpools
|
2023-09-14 17:47:37 -06:00
|
|
|
import pkg/questionable/results
|
2023-09-15 13:08:38 -06:00
|
|
|
import pkg/chronicles
|
2023-09-26 16:12:55 -07:00
|
|
|
import pkg/threading/smartptrs
|
2023-08-24 19:51:04 -07:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
import pkg/datastore/sql/sqliteds
|
2023-09-14 17:47:37 -06:00
|
|
|
import pkg/datastore/fsds
|
|
|
|
|
import pkg/datastore/threads/threadproxyds {.all.}
|
2023-08-24 19:51:04 -07:00
|
|
|
|
|
|
|
|
import ./dscommontests
|
2023-09-13 14:43:25 -06:00
|
|
|
import ./querycommontests
|
2023-08-24 19:51:04 -07:00
|
|
|
|
2023-09-27 12:54:22 -07:00
|
|
|
const
|
|
|
|
|
NumThreads = 20 # IO threads aren't attached to CPU count
|
2023-09-27 13:35:14 -07:00
|
|
|
ThreadTestLoops {.intdefine.} = 1
|
2023-09-27 12:55:48 -07:00
|
|
|
N = ThreadTestLoops
|
2023-09-27 13:35:14 -07:00
|
|
|
ThreadTestInnerLoops {.intdefine.} = 1
|
|
|
|
|
M = ThreadTestInnerLoops
|
2023-09-27 12:54:22 -07:00
|
|
|
|
2023-09-27 16:01:40 -07:00
|
|
|
var
|
|
|
|
|
taskPool: Taskpool = Taskpool.new(NumThreads)
|
|
|
|
|
|
2023-09-27 16:17:02 -07:00
|
|
|
for i in 1..N:
|
2023-09-27 15:30:21 -07:00
|
|
|
suite "Test Basic ThreadDatastore with SQLite " & $i:
|
2023-09-27 12:54:22 -07:00
|
|
|
|
|
|
|
|
var
|
2023-09-27 14:01:21 -07:00
|
|
|
sqlStore: SQLiteBackend[KeyId, DataBuffer]
|
|
|
|
|
ds: ThreadDatastore[SQLiteBackend[KeyId, DataBuffer]]
|
2023-09-27 12:54:22 -07:00
|
|
|
key = Key.init("/a/b").tryGet()
|
|
|
|
|
bytes = "some bytes".toBytes
|
|
|
|
|
otherBytes = "some other bytes".toBytes
|
|
|
|
|
|
|
|
|
|
setupAll:
|
|
|
|
|
sqlStore = newSQLiteBackend[KeyId, DataBuffer](Memory).tryGet()
|
2023-09-27 16:01:40 -07:00
|
|
|
# taskPool = Taskpool.new(NumThreads)
|
2023-09-27 12:54:22 -07:00
|
|
|
ds = ThreadDatastore.new(sqlStore, tp = taskPool).tryGet()
|
|
|
|
|
|
|
|
|
|
teardown:
|
|
|
|
|
GC_fullCollect()
|
|
|
|
|
|
|
|
|
|
teardownAll:
|
|
|
|
|
(await ds.close()).tryGet()
|
2023-09-27 16:01:40 -07:00
|
|
|
# taskPool.shutdown()
|
2023-09-27 12:54:22 -07:00
|
|
|
|
2023-09-27 14:01:21 -07:00
|
|
|
for i in 1..M:
|
|
|
|
|
basicStoreTests(ds, key, bytes, otherBytes)
|
2023-09-27 12:54:22 -07:00
|
|
|
GC_fullCollect()
|
|
|
|
|
|
2023-09-27 16:01:40 -07:00
|
|
|
|
2023-09-27 12:54:22 -07:00
|
|
|
for i in 1..N:
|
2023-09-27 15:30:21 -07:00
|
|
|
suite "Test Query ThreadDatastore with SQLite " & $i:
|
2023-09-27 12:54:22 -07:00
|
|
|
|
|
|
|
|
var
|
2023-09-27 14:01:21 -07:00
|
|
|
sqlStore: SQLiteBackend[KeyId, DataBuffer]
|
2023-09-27 16:01:40 -07:00
|
|
|
# taskPool: Taskpool
|
2023-09-27 14:01:21 -07:00
|
|
|
ds: ThreadDatastore[SQLiteBackend[KeyId, DataBuffer]]
|
2023-09-27 12:54:22 -07:00
|
|
|
|
|
|
|
|
setup:
|
|
|
|
|
sqlStore = newSQLiteBackend[KeyId, DataBuffer](Memory).tryGet()
|
2023-09-27 16:01:40 -07:00
|
|
|
# taskPool = Taskpool.new(NumThreads)
|
2023-09-27 12:54:22 -07:00
|
|
|
ds = ThreadDatastore.new(sqlStore, tp = taskPool).tryGet()
|
|
|
|
|
|
|
|
|
|
teardown:
|
|
|
|
|
GC_fullCollect()
|
|
|
|
|
|
|
|
|
|
(await ds.close()).tryGet()
|
2023-09-27 16:01:40 -07:00
|
|
|
# taskPool.shutdown()
|
2023-09-27 12:54:22 -07:00
|
|
|
|
2023-09-27 13:35:14 -07:00
|
|
|
for i in 1..M:
|
|
|
|
|
queryTests(ds, true)
|
2023-09-27 12:54:22 -07:00
|
|
|
GC_fullCollect()
|
2023-08-24 19:51:04 -07:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# suite "Test Basic ThreadDatastore with fsds":
|
|
|
|
|
# let
|
|
|
|
|
# path = currentSourcePath() # get this file's name
|
|
|
|
|
# basePath = "tests_data"
|
|
|
|
|
# basePathAbs = path.parentDir / basePath
|
|
|
|
|
# key = Key.init("/a/b").tryGet()
|
|
|
|
|
# bytes = "some bytes".toBytes
|
|
|
|
|
# otherBytes = "some other bytes".toBytes
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# var
|
|
|
|
|
# fsStore: FSDatastore
|
|
|
|
|
# ds: ThreadDatastore
|
|
|
|
|
# taskPool: Taskpool
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# setupAll:
|
|
|
|
|
# removeDir(basePathAbs)
|
|
|
|
|
# require(not dirExists(basePathAbs))
|
|
|
|
|
# createDir(basePathAbs)
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# fsStore = FSDatastore.new(root = basePathAbs, depth = 3).tryGet()
|
|
|
|
|
# taskPool = Taskpool.new(NumThreads)
|
|
|
|
|
# ds = ThreadDatastore.new(fsStore, withLocks = true, tp = taskPool).tryGet()
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# teardown:
|
|
|
|
|
# GC_fullCollect()
|
2023-09-20 14:01:08 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# teardownAll:
|
|
|
|
|
# (await ds.close()).tryGet()
|
|
|
|
|
# taskPool.shutdown()
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# removeDir(basePathAbs)
|
|
|
|
|
# require(not dirExists(basePathAbs))
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# basicStoreTests(fsStore, key, bytes, otherBytes)
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# suite "Test Query ThreadDatastore with fsds":
|
|
|
|
|
# let
|
|
|
|
|
# path = currentSourcePath() # get this file's name
|
|
|
|
|
# basePath = "tests_data"
|
|
|
|
|
# basePathAbs = path.parentDir / basePath
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# var
|
|
|
|
|
# fsStore: FSDatastore
|
|
|
|
|
# ds: ThreadDatastore
|
|
|
|
|
# taskPool: Taskpool
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# setup:
|
|
|
|
|
# removeDir(basePathAbs)
|
|
|
|
|
# require(not dirExists(basePathAbs))
|
|
|
|
|
# createDir(basePathAbs)
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# fsStore = FSDatastore.new(root = basePathAbs, depth = 5).tryGet()
|
|
|
|
|
# taskPool = Taskpool.new(NumThreads)
|
|
|
|
|
# ds = ThreadDatastore.new(fsStore, withLocks = true, tp = taskPool).tryGet()
|
2023-09-15 16:41:03 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# teardown:
|
|
|
|
|
# GC_fullCollect()
|
|
|
|
|
# (await ds.close()).tryGet()
|
|
|
|
|
# taskPool.shutdown()
|
2023-08-24 19:51:04 -07:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# removeDir(basePathAbs)
|
|
|
|
|
# require(not dirExists(basePathAbs))
|
2023-08-24 19:51:04 -07:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# queryTests(ds, false)
|
2023-08-29 17:00:11 -07:00
|
|
|
|
2023-09-26 16:38:46 -07:00
|
|
|
# suite "Test ThreadDatastore cancelations":
|
|
|
|
|
# var
|
|
|
|
|
# sqlStore: SQLiteBackend[KeyId,DataBuffer]
|
|
|
|
|
# ds: ThreadDatastore
|
|
|
|
|
# taskPool: Taskpool
|
2023-09-15 13:08:38 -06:00
|
|
|
|
2023-09-26 16:38:46 -07:00
|
|
|
# privateAccess(ThreadDatastore) # expose private fields
|
|
|
|
|
# privateAccess(TaskCtx) # expose private fields
|
2023-09-15 13:08:38 -06:00
|
|
|
|
2023-09-26 16:38:46 -07:00
|
|
|
# setupAll:
|
|
|
|
|
# sqlStore = newSQLiteBackend[KeyId, DataBuffer](Memory).tryGet()
|
|
|
|
|
# taskPool = Taskpool.new(NumThreads)
|
|
|
|
|
# ds = ThreadDatastore.new(sqlStore, tp = taskPool).tryGet()
|
2023-09-15 13:08:38 -06:00
|
|
|
|
2023-09-26 16:38:46 -07:00
|
|
|
# teardown:
|
|
|
|
|
# GC_fullCollect() # run full collect after each test
|
2023-09-20 14:01:08 -06:00
|
|
|
|
2023-09-26 16:38:46 -07:00
|
|
|
# teardownAll:
|
|
|
|
|
# (await ds.close()).tryGet()
|
|
|
|
|
# taskPool.shutdown()
|
2023-09-20 14:01:08 -06:00
|
|
|
|
2023-09-26 16:12:55 -07:00
|
|
|
# test "Should monitor signal and cancel":
|
|
|
|
|
# var
|
|
|
|
|
# signal = ThreadSignalPtr.new().tryGet()
|
|
|
|
|
# res = ThreadResult[void]()
|
|
|
|
|
# ctx = newSharedPtr(TaskCtxObj[void](signal: signal))
|
|
|
|
|
# fut = newFuture[void]("signalMonitor")
|
|
|
|
|
# threadArgs = (addr ctx, addr fut)
|
|
|
|
|
# thread: Thread[type threadArgs]
|
|
|
|
|
|
|
|
|
|
# proc threadTask(args: type threadArgs) =
|
|
|
|
|
# var (ctx, fut) = args
|
|
|
|
|
# proc asyncTask() {.async.} =
|
|
|
|
|
# let
|
|
|
|
|
# monitor = signalMonitor(ctx, fut[])
|
|
|
|
|
|
|
|
|
|
# await monitor
|
|
|
|
|
|
|
|
|
|
# waitFor asyncTask()
|
|
|
|
|
|
|
|
|
|
# createThread(thread, threadTask, threadArgs)
|
|
|
|
|
# ctx.cancelled = true
|
|
|
|
|
# check: ctx.signal.fireSync.tryGet
|
|
|
|
|
|
|
|
|
|
# joinThreads(thread)
|
|
|
|
|
|
|
|
|
|
# check: fut.cancelled
|
|
|
|
|
# check: ctx.signal.close().isOk
|
|
|
|
|
# fut = nil
|
|
|
|
|
|
|
|
|
|
# test "Should monitor and not cancel":
|
|
|
|
|
# var
|
|
|
|
|
# signal = ThreadSignalPtr.new().tryGet()
|
|
|
|
|
# res = ThreadResult[void]()
|
|
|
|
|
# ctx = TaskCtx[void](
|
|
|
|
|
# ds: sqlStore,
|
|
|
|
|
# res: addr res,
|
|
|
|
|
# signal: signal)
|
|
|
|
|
# fut = newFuture[void]("signalMonitor")
|
|
|
|
|
# threadArgs = (addr ctx, addr fut)
|
|
|
|
|
# thread: Thread[type threadArgs]
|
|
|
|
|
|
|
|
|
|
# proc threadTask(args: type threadArgs) =
|
|
|
|
|
# var (ctx, fut) = args
|
|
|
|
|
# proc asyncTask() {.async.} =
|
|
|
|
|
# let
|
|
|
|
|
# monitor = signalMonitor(ctx, fut[])
|
|
|
|
|
|
|
|
|
|
# await monitor
|
|
|
|
|
|
|
|
|
|
# waitFor asyncTask()
|
|
|
|
|
|
|
|
|
|
# createThread(thread, threadTask, threadArgs)
|
|
|
|
|
# ctx.cancelled = false
|
|
|
|
|
# check: ctx.signal.fireSync.tryGet
|
|
|
|
|
|
|
|
|
|
# joinThreads(thread)
|
|
|
|
|
|
|
|
|
|
# check: not fut.cancelled
|
|
|
|
|
# check: ctx.signal.close().isOk
|
|
|
|
|
# fut = nil
|