mirror of
https://github.com/logos-storage/apatheia.git
synced 2026-01-04 14:03:09 +00:00
moving
This commit is contained in:
parent
7f3590793d
commit
ab7881ed4c
@ -70,8 +70,3 @@ macro submitMacro*(tp: untyped, jobs: untyped, exp: untyped): untyped =
|
||||
template submit*[T](jobs: JobQueue[T], exp: untyped): Future[T] =
|
||||
submitMacro(T, jobs, exp)
|
||||
|
||||
template jobWrapper*(task: untyped) =
|
||||
template `task Wrapper`*(jobResult: JobResult[float], args: varargs[untyped]) =
|
||||
let res = unpackVarargs(`task`, args)
|
||||
discard jobResult.queue.send((jobResult.id, res,))
|
||||
|
||||
|
||||
@ -122,7 +122,6 @@ proc identPub*(name: string): NimNode =
|
||||
|
||||
proc procIdentAppend*(id: NimNode, name: string): NimNode =
|
||||
if id.kind == nnkPostfix:
|
||||
echo "PROC ID: ", id.treeRepr
|
||||
result = id
|
||||
result[1] = ident(result[1].strVal & name)
|
||||
else:
|
||||
|
||||
@ -3,10 +3,16 @@ import std/[macros, strutils]
|
||||
|
||||
import macroutils
|
||||
|
||||
template checkParamType(obj: object) =
|
||||
import jobs
|
||||
export jobs
|
||||
|
||||
template checkParamType*(obj: object): auto =
|
||||
for name, field in obj.fieldPairs():
|
||||
echo "field name: ", name
|
||||
obj
|
||||
|
||||
template checkParamType*(obj: typed): auto =
|
||||
obj
|
||||
|
||||
macro asyncTask*(p: untyped): untyped =
|
||||
|
||||
@ -21,10 +27,6 @@ macro asyncTask*(p: untyped): untyped =
|
||||
|
||||
echo "\nASYNC_TASK: "
|
||||
echo "name: ", name
|
||||
echo "hasReturnType: ", hasReturnType(params)
|
||||
echo "getReturnType: ", params.getReturnType().treeRepr
|
||||
echo "generics: ", genericParams.treeRepr
|
||||
echo "params: \n", params.treeRepr
|
||||
# echo "ASYNC_TASK: call: \n", tcall.treeRepr
|
||||
|
||||
var asyncBody = newStmtList()
|
||||
@ -50,33 +52,10 @@ type
|
||||
|
||||
proc doHashes*(data: openArray[byte],
|
||||
opts: HashOptions) {.asyncTask.} =
|
||||
# echo "args: ", args.len()
|
||||
discard
|
||||
|
||||
proc doHashesRes*(data: openArray[byte],
|
||||
opts: HashOptions): int {.asyncTask.} =
|
||||
discard
|
||||
# echo "args: ", args.len()
|
||||
result = 10
|
||||
|
||||
|
||||
when false:
|
||||
proc doHashesTask*(args: seq[Data]) =
|
||||
discard
|
||||
|
||||
proc doHashes*(args: seq[Data]) {.async.} =
|
||||
# setup signals ... etc
|
||||
# memory stuffs
|
||||
# create future
|
||||
let argsPtr = addr args[0]
|
||||
let argsLen = args.len()
|
||||
GC_ref(args)
|
||||
|
||||
doHashes(toOpenArray(argsPtr, argsLen))
|
||||
GC_unref(args)
|
||||
|
||||
|
||||
proc processHashes*(args: seq[Data]) {.async.} =
|
||||
## do some processing on another thread
|
||||
let res = await doHashes(args)
|
||||
# proc doHashesRes*(data: openArray[byte],
|
||||
# opts: HashOptions): int {.asyncTask.} =
|
||||
# # echo "args: ", args.len()
|
||||
# result = 10
|
||||
|
||||
|
||||
30
tests/ttasks.nim
Normal file
30
tests/ttasks.nim
Normal file
@ -0,0 +1,30 @@
|
||||
import std/os
|
||||
|
||||
import chronos
|
||||
import chronos/threadsync
|
||||
import chronos/unittest2/asynctests
|
||||
import taskpools
|
||||
|
||||
import apatheia/queues
|
||||
import apatheia/tasks
|
||||
|
||||
## todo: setup basic async + threadsignal + taskpools example here
|
||||
##
|
||||
|
||||
proc addNums(a, b: float): float {.asyncTask.} =
|
||||
os.sleep(500)
|
||||
echo "adding: ", a, " + ", b
|
||||
return a + b
|
||||
|
||||
# proc addNums(jobResult: JobResult[float], a, b: float) =
|
||||
# let res = addNumsRaw(a, b)
|
||||
# discard jobResult.queue.send((jobResult.id, res,))
|
||||
|
||||
# suite "async tests":
|
||||
# var tp = Taskpool.new(num_threads = 2) # Default to the number of hardware threads.
|
||||
# asyncTest "test":
|
||||
# var jobs = newJobQueue[float](taskpool = tp)
|
||||
# echo "\nstart"
|
||||
# let res = await jobs.submit(addNums(1.0, 2.0,))
|
||||
# echo "result: ", res.repr
|
||||
# check true
|
||||
Loading…
x
Reference in New Issue
Block a user