Add push raises
This commit is contained in:
parent
ba6f90c807
commit
7cfe1cd8da
|
@ -23,7 +23,7 @@ requires "nim >= 1.2.0",
|
|||
"chronicles",
|
||||
"https://github.com/status-im/nim-zlib",
|
||||
"unittest2",
|
||||
"chronos"
|
||||
"https://github.com/status-im/nim-chronos#head"
|
||||
|
||||
proc test(args, path: string, shouldRun = true) =
|
||||
# Compilation language is controlled by TEST_LANG
|
||||
|
|
117
graphql/api.nim
117
graphql/api.nim
|
@ -23,6 +23,8 @@ export
|
|||
const
|
||||
builtinSchema = staticRead("builtin/schema.ql")
|
||||
|
||||
{.push gcsafe, raises: [].}
|
||||
|
||||
proc registerBuiltinScalars(ctx: GraphqlRef)
|
||||
proc loadBuiltinSchema(ctx: GraphqlRef)
|
||||
proc registerInstrospection(ctx: GraphqlRef)
|
||||
|
@ -82,7 +84,7 @@ proc addVar*(ctx: GraphqlRef, name: string) =
|
|||
let node = Node(kind: nkNull, pos: Pos())
|
||||
ctx.varTable[name] = node
|
||||
|
||||
proc parseVariable(q: var Parser): Node =
|
||||
proc parseVariable(q: var Parser): Node {.gcsafe, raises: [IOError].} =
|
||||
nextToken
|
||||
if currToken == tokEof:
|
||||
return
|
||||
|
@ -90,13 +92,16 @@ proc parseVariable(q: var Parser): Node =
|
|||
q.valueLiteral(isConst = true, result)
|
||||
|
||||
proc parseVariable(ctx: GraphqlRef, name: string, input: InputStream): GraphqlResult =
|
||||
var parser = Parser.init(input, ctx.names)
|
||||
let node = parser.parseVariable()
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
let varname = ctx.names.insert(name)
|
||||
ctx.varTable[varname] = node
|
||||
ok()
|
||||
try:
|
||||
var parser = Parser.init(input, ctx.names)
|
||||
let node = parser.parseVariable()
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
let varname = ctx.names.insert(name)
|
||||
ctx.varTable[varname] = node
|
||||
ok()
|
||||
except IOError as exc:
|
||||
err(@[errorError(exc.msg)])
|
||||
|
||||
proc parseVar*(ctx: GraphqlRef, name: string,
|
||||
value: string): GraphqlResult {.gcsafe.} =
|
||||
|
@ -110,29 +115,31 @@ proc parseVar*(ctx: GraphqlRef, name: string,
|
|||
var stream = unsafeMemoryInput(value)
|
||||
ctx.parseVariable(name, stream)
|
||||
|
||||
proc parseVars(ctx: GraphqlRef, input: InputStream): GraphqlResult =
|
||||
var parser = Parser.init(input, ctx.names)
|
||||
parser.lex.next()
|
||||
if parser.lex.tok == tokEof:
|
||||
return ok()
|
||||
proc parseVars(ctx: GraphqlRef, input: InputStream): GraphqlResult {.gcsafe, raises: [].} =
|
||||
try:
|
||||
var parser = Parser.init(input, ctx.names)
|
||||
parser.lex.next()
|
||||
if parser.lex.tok == tokEof:
|
||||
return ok()
|
||||
|
||||
var values: Node
|
||||
parser.rgReset(rgValueLiteral) # recursion guard
|
||||
parser.valueLiteral(isConst = true, values)
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
var values: Node
|
||||
parser.rgReset(rgValueLiteral) # recursion guard
|
||||
parser.valueLiteral(isConst = true, values)
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
|
||||
for n in values:
|
||||
ctx.varTable[n[0].name] = n[1]
|
||||
for n in values:
|
||||
ctx.varTable[n[0].name] = n[1]
|
||||
ok()
|
||||
except IOError as exc:
|
||||
err(@[errorError(exc.msg)])
|
||||
|
||||
ok()
|
||||
|
||||
proc parseVars*(ctx: GraphqlRef, input: string): GraphqlResult {.gcsafe.} =
|
||||
proc parseVars*(ctx: GraphqlRef, input: string): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
var stream = unsafeMemoryInput(input)
|
||||
ctx.parseVars(stream)
|
||||
|
||||
proc parseVars*(ctx: GraphqlRef, input: openArray[byte]): GraphqlResult {.gcsafe.} =
|
||||
proc parseVars*(ctx: GraphqlRef, input: openArray[byte]): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
var stream = unsafeMemoryInput(input)
|
||||
ctx.parseVars(stream)
|
||||
|
@ -169,16 +176,19 @@ proc markAsStored(ctx: GraphqlRef, root: Node) =
|
|||
|
||||
template validation(ctx: GraphqlRef, parser: Parser,
|
||||
stream: InputStream, doc, store: untyped): untyped =
|
||||
parser.parseDocument(doc)
|
||||
close stream
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
ctx.validate(doc.root)
|
||||
if store:
|
||||
ctx.markAsStored(doc.root)
|
||||
if ctx.errKind != ErrNone:
|
||||
return err(ctx.errors)
|
||||
ok()
|
||||
try:
|
||||
parser.parseDocument(doc)
|
||||
close stream
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
ctx.validate(doc.root)
|
||||
if store:
|
||||
ctx.markAsStored(doc.root)
|
||||
if ctx.errKind != ErrNone:
|
||||
return err(ctx.errors)
|
||||
ok()
|
||||
except IOError as exc:
|
||||
err(@[errorError(exc.msg)])
|
||||
|
||||
template parseSchemaImpl(schema, store, conf: untyped): untyped =
|
||||
var stream = unsafeMemoryInput(schema)
|
||||
|
@ -187,17 +197,17 @@ template parseSchemaImpl(schema, store, conf: untyped): untyped =
|
|||
ctx.validation(parser, stream, doc, store)
|
||||
|
||||
proc parseSchema*(ctx: GraphqlRef, schema: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
parseSchemaImpl(schema, store, conf)
|
||||
|
||||
proc parseSchema*(ctx: GraphqlRef, schema: openArray[byte], store = false,
|
||||
conf = defaultParserConf()): GraphqlResult =
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
parseSchemaImpl(schema, store, conf)
|
||||
|
||||
proc parseSchemaFromFile*(ctx: GraphqlRef, fileName: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
try:
|
||||
var stream = memFileInput(fileName)
|
||||
|
@ -208,21 +218,24 @@ proc parseSchemaFromFile*(ctx: GraphqlRef, fileName: string, store = false,
|
|||
err(@[fatalError("parseSchemaFromFile: " & e.msg)])
|
||||
|
||||
proc parseSchema(ctx: GraphqlRef, stream: InputStream,
|
||||
root: var Node, conf: ParserConf): GraphqlResult =
|
||||
var parser = Parser.init(stream, ctx.names, conf)
|
||||
var doc: SchemaDocument
|
||||
parser.parseDocument(doc)
|
||||
close stream
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
if root.isNil: root = doc.root
|
||||
else: root.sons.add doc.root.sons
|
||||
ok()
|
||||
root: var Node, conf: ParserConf): GraphqlResult {.gcsafe, raises: [].} =
|
||||
try:
|
||||
var parser = Parser.init(stream, ctx.names, conf)
|
||||
var doc: SchemaDocument
|
||||
parser.parseDocument(doc)
|
||||
close stream
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
if root.isNil: root = doc.root
|
||||
else: root.sons.add doc.root.sons
|
||||
ok()
|
||||
except IOError as exc:
|
||||
err(@[errorError(exc.msg)])
|
||||
|
||||
proc parseSchemas*[T: string | seq[byte]](ctx: GraphqlRef,
|
||||
files: openArray[string],
|
||||
schemas: openArray[T], store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
var root: Node
|
||||
try:
|
||||
|
@ -254,17 +267,17 @@ template parseQueryImpl(schema, store, conf: untyped): untyped =
|
|||
ctx.validation(parser, stream, doc, store)
|
||||
|
||||
proc parseQuery*(ctx: GraphqlRef, query: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
parseQueryImpl(query, store, conf)
|
||||
|
||||
proc parseQuery*(ctx: GraphqlRef, query: openArray[byte], store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
parseQueryImpl(query, store, conf)
|
||||
|
||||
proc parseQueryFromFile*(ctx: GraphqlRef, fileName: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe, raises: [].} =
|
||||
{.gcsafe.}:
|
||||
try:
|
||||
var stream = memFileInput(fileName)
|
||||
|
@ -345,3 +358,5 @@ proc registerInstrospection(ctx: GraphqlRef) =
|
|||
ctx.addResolvers(ctx, "__InputValue", inputValueProtos)
|
||||
ctx.addResolvers(ctx, "__EnumValue", enumValueProtos)
|
||||
ctx.addResolvers(ctx, "__Directive", directiveProtos)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -8,11 +8,13 @@
|
|||
# those terms.
|
||||
|
||||
import
|
||||
std/[strutils, parseutils],
|
||||
std/[parseutils],
|
||||
stew/results,
|
||||
../common/[ast],
|
||||
../graphql
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc validateInt32(x: string): Result[void, string] =
|
||||
var pos = 0
|
||||
var sign = if x[0] == '-':
|
||||
|
@ -43,7 +45,7 @@ proc scalarInt(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsaf
|
|||
else:
|
||||
ok(node)
|
||||
else:
|
||||
err("expect int, but got '$1'" % [$node.kind])
|
||||
err("expect int, but got '" & $node.kind & "'")
|
||||
|
||||
proc scalarFloat(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
case node.kind
|
||||
|
@ -54,22 +56,22 @@ proc scalarFloat(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcs
|
|||
var number: float
|
||||
let L = parseFloat(node.floatVal, number)
|
||||
if L != node.floatVal.len or L == 0:
|
||||
return err("'$1' is not a valid float" % [node.floatVal])
|
||||
return err("'" & node.floatVal & "' is not a valid float")
|
||||
ok(node)
|
||||
else:
|
||||
err("expect int or float, but got '$1'" % [$node.kind])
|
||||
err("expect int or float, but got '" & $node.kind & "'")
|
||||
|
||||
proc scalarString(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
if node.kind == nkString:
|
||||
ok(node)
|
||||
else:
|
||||
err("expect string, but got '$1'" % [$node.kind])
|
||||
err("expect string, but got '" & $node.kind & "'")
|
||||
|
||||
proc scalarBoolean(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
if node.kind == nkBoolean:
|
||||
ok(node)
|
||||
else:
|
||||
err("expect boolean, but got '$1'" % [$node.kind])
|
||||
err("expect boolean, but got '" & $node.kind & "'")
|
||||
|
||||
proc scalarID(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
case node.kind
|
||||
|
@ -83,7 +85,7 @@ proc scalarID(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe
|
|||
of nkString:
|
||||
ok(node)
|
||||
else:
|
||||
err("expect int or string, but got '$1'" % [$node.kind])
|
||||
err("expect int or string, but got '" & $node.kind & "'")
|
||||
|
||||
const
|
||||
builtinScalars* = {
|
||||
|
@ -93,3 +95,5 @@ const
|
|||
"Boolean": scalarBoolean,
|
||||
"ID": scalarID
|
||||
}
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -8,13 +8,13 @@
|
|||
# those terms.
|
||||
|
||||
import
|
||||
std/[tables, strutils],
|
||||
std/[tables],
|
||||
stew/[results],
|
||||
../common/[ast, ast_helper, response, names],
|
||||
../graphql
|
||||
|
||||
{.push hint[XCannotRaiseY]: off.}
|
||||
{.pragma: apiPragma, cdecl, gcsafe, raises: [Defect, CatchableError].}
|
||||
{.push gcsafe, raises: [] .}
|
||||
{.pragma: apiPragma, cdecl, gcsafe, raises: [].}
|
||||
|
||||
proc findType(ctx: GraphqlRef, nameStr: string): Node =
|
||||
let name = ctx.names.insert(nameStr)
|
||||
|
@ -63,15 +63,16 @@ proc queryType(ud: RootRef, params: Args, parent: Node): RespResult {.apiPragma.
|
|||
let name = params[0].val
|
||||
let sym = ctx.findType(name.stringVal)
|
||||
if sym.isNil:
|
||||
err("'$1' not defined" % [name.stringVal])
|
||||
err("'" & name.stringVal & "' not defined")
|
||||
elif sym.sym.kind == skDirective:
|
||||
err("'$1' is a directive, not a type" % [name.stringVal])
|
||||
err("'" & name.stringVal & "' is a directive, not a type")
|
||||
else:
|
||||
ok(sym)
|
||||
|
||||
proc queryTypename(ud: RootRef, params: Args, parent: Node): RespResult {.apiPragma.} =
|
||||
if parent.kind notin resObjValidKind:
|
||||
return err("__typename expect one of $1 but got '$2'" % [$resObjValidKind, $parent.kind])
|
||||
return err("__typename expect one of " &
|
||||
$resObjValidKind & " but got '" & $parent.kind & "'")
|
||||
|
||||
ok(resp($parent))
|
||||
|
||||
|
|
|
@ -28,6 +28,8 @@ type
|
|||
doubleEscape: bool
|
||||
escapeUnicode: bool
|
||||
|
||||
{.push gcsafe, raises: [IOError] .}
|
||||
|
||||
template top(x: seq[State]): State =
|
||||
x[^1]
|
||||
|
||||
|
@ -146,7 +148,7 @@ proc writeNull*(x: JsonRespStream) =
|
|||
writeSeparator(x)
|
||||
append "null"
|
||||
|
||||
proc field*(x: JsonRespStream, v: string) =
|
||||
proc field*(x: JsonRespStream, v: string){.gcsafe, raises: [IOError].} =
|
||||
let top = x.stack.top
|
||||
if x.doubleEscape:
|
||||
case top
|
||||
|
@ -210,26 +212,28 @@ proc serialize*(resp: JsonRespStream, n: Node) =
|
|||
proc write*(x: JsonRespStream, v: openArray[byte]) =
|
||||
x.stream.write(v)
|
||||
|
||||
proc getString*(x: JsonRespStream): string =
|
||||
proc getString*(x: JsonRespStream): string {.gcsafe, raises:[].} =
|
||||
x.stream.getOutput(string)
|
||||
|
||||
proc getBytes*(x: JsonRespStream): seq[byte] =
|
||||
proc getBytes*(x: JsonRespStream): seq[byte] {.gcsafe, raises:[].} =
|
||||
x.stream.getOutput(seq[byte])
|
||||
|
||||
proc len*(x: JsonRespStream): int =
|
||||
proc len*(x: JsonRespStream): int {.gcsafe, raises:[].} =
|
||||
x.stream.pos()
|
||||
|
||||
proc init*(v: JsonRespStream,
|
||||
doubleEscape: bool = false,
|
||||
escapeUnicode: bool = false) =
|
||||
escapeUnicode: bool = false) {.gcsafe, raises:[].} =
|
||||
v.stream = memoryOutput()
|
||||
v.stack = @[StateTop]
|
||||
v.stack = @[StateTop]
|
||||
v.doubleEscape = doubleEscape
|
||||
v.escapeUnicode = escapeUnicode
|
||||
|
||||
proc new*(_: type JsonRespStream,
|
||||
doubleEscape: bool = false,
|
||||
escapeUnicode: bool = false): JsonRespStream =
|
||||
escapeUnicode: bool = false): JsonRespStream {.gcsafe, raises: [].} =
|
||||
let v = JsonRespStream()
|
||||
v.init(doubleEscape, escapeUnicode)
|
||||
v
|
||||
v
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -17,6 +17,8 @@ type
|
|||
cloneTree: bool
|
||||
node: Node
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc serialize*(resp: NodeRespStream, n: Node) =
|
||||
if resp.cloneTree:
|
||||
resp.node = copyTree(n)
|
||||
|
@ -35,3 +37,5 @@ proc new*(_: type NodeRespStream, cloneTree: bool = false): NodeRespStream =
|
|||
let v = NodeRespStream()
|
||||
v.init(cloneTree)
|
||||
v
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -28,6 +28,8 @@ type
|
|||
stack: seq[State]
|
||||
fldName: string
|
||||
|
||||
{.push gcsafe, raises: [IOError] .}
|
||||
|
||||
template top(x: seq[State]): State =
|
||||
x[^1]
|
||||
|
||||
|
@ -202,11 +204,11 @@ proc write*(x: TomlRespStream, v: float64) =
|
|||
append $v
|
||||
writeEOL(x)
|
||||
|
||||
proc writeNull*(x: TomlRespStream) =
|
||||
proc writeNull*(x: TomlRespStream) {.gcsafe, raises:[].} =
|
||||
# really, just do nothing
|
||||
discard
|
||||
|
||||
proc field*(x: TomlRespStream, v: string) =
|
||||
proc field*(x: TomlRespStream, v: string) {.gcsafe, raises:[].} =
|
||||
x.fldName = v
|
||||
|
||||
proc serialize*(resp: TomlRespStream, n: Node) =
|
||||
|
@ -240,20 +242,22 @@ proc serialize*(resp: TomlRespStream, n: Node) =
|
|||
proc write*(x: TomlRespStream, v: openArray[byte]) =
|
||||
x.stream.write(v)
|
||||
|
||||
proc getString*(x: TomlRespStream): string =
|
||||
proc getString*(x: TomlRespStream): string {.gcsafe, raises:[].} =
|
||||
x.stream.getOutput(string)
|
||||
|
||||
proc getBytes*(x: TomlRespStream): seq[byte] =
|
||||
proc getBytes*(x: TomlRespStream): seq[byte] {.gcsafe, raises:[].} =
|
||||
x.stream.getOutput(seq[byte])
|
||||
|
||||
proc len*(x: TomlRespStream): int =
|
||||
proc len*(x: TomlRespStream): int {.gcsafe, raises:[].} =
|
||||
x.stream.pos()
|
||||
|
||||
proc init*(v: TomlRespStream) =
|
||||
proc init*(v: TomlRespStream) {.gcsafe, raises:[].} =
|
||||
v.stream = memoryOutput()
|
||||
v.stack = @[StateTop]
|
||||
|
||||
proc new*(_: type TomlRespStream): TomlRespStream =
|
||||
proc new*(_: type TomlRespStream): TomlRespStream {.gcsafe, raises:[].} =
|
||||
let v = TomlRespStream()
|
||||
v.init()
|
||||
v
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -201,6 +201,8 @@ const
|
|||
|
||||
unreachableCode* = "unreachableCode"
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
template unreachable*() =
|
||||
assert(false, unreachableCode)
|
||||
|
||||
|
@ -389,4 +391,6 @@ proc setField*(sym: Symbol, name: Name, node: Node) =
|
|||
of skInterface:
|
||||
sym.interfaceFields[name] = node
|
||||
else:
|
||||
unreachable()
|
||||
unreachable()
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -43,6 +43,8 @@ type
|
|||
|
||||
Types* = Directive | InputObject | Enum | Union | Interface | Object | Scalar
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
func desc*(n: Types): Node = Node(n)[0]
|
||||
func name*(n: Types): Node = Node(n)[1]
|
||||
func dirs*(n: InputObject): Dirs = Dirs(Node(n)[2])
|
||||
|
@ -137,3 +139,5 @@ proc findArg*(name: Node, args: Arguments): Node =
|
|||
if arg.name.name == name.name:
|
||||
return arg.typ
|
||||
return nil
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -25,13 +25,18 @@ type
|
|||
message*: string
|
||||
path*: Node
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc `$`*(x: ErrorDesc): string =
|
||||
if x.path.isNil:
|
||||
"[$1, $2]: $3: $4" % [$x.pos.line,
|
||||
$x.pos.col, $x.level, x.message]
|
||||
else:
|
||||
"[$1, $2]: $3: $4: $5" % [$x.pos.line,
|
||||
$x.pos.col, $x.level, x.message, $x.path.sons]
|
||||
try:
|
||||
if x.path.isNil:
|
||||
return "[$1, $2]: $3: $4" % [$x.pos.line,
|
||||
$x.pos.col, $x.level, x.message]
|
||||
else:
|
||||
return "[$1, $2]: $3: $4: $5" % [$x.pos.line,
|
||||
$x.pos.col, $x.level, x.message, $x.path.sons]
|
||||
except ValueError as exc:
|
||||
doAssert(false, exc.msg)
|
||||
|
||||
proc fatalError*(msg: string): ErrorDesc =
|
||||
ErrorDesc(
|
||||
|
@ -39,3 +44,12 @@ proc fatalError*(msg: string): ErrorDesc =
|
|||
pos: Pos(),
|
||||
message: msg
|
||||
)
|
||||
|
||||
proc errorError*(msg: string): ErrorDesc =
|
||||
ErrorDesc(
|
||||
level: elError,
|
||||
pos: Pos(),
|
||||
message: msg
|
||||
)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -94,6 +94,8 @@ type
|
|||
|
||||
const MaxKeyword = Keyword.high.int
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc insert*(nc: NameCache, name: string): Name
|
||||
|
||||
proc newNameCache*(): NameCache =
|
||||
|
@ -214,3 +216,5 @@ func toKeyword*(x: Name): Keyword =
|
|||
|
||||
proc `$`*(x: Name): string =
|
||||
if x.isNil: ":nil" else: x.s
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -10,6 +10,8 @@
|
|||
import
|
||||
./ast, ./types, ./names
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc respMap*(name: Name): Node =
|
||||
Node(kind: nkMap, pos: Pos(), typeName: name)
|
||||
|
||||
|
@ -36,3 +38,5 @@ proc resp*(x: bool): Node =
|
|||
|
||||
proc resp*(x: float64): Node =
|
||||
Node(kind: nkFloat, floatVal: $x, pos: Pos())
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
import
|
||||
./ast
|
||||
|
||||
{.pragma: respPragma, gcsafe, raises: [Defect, CatchableError].}
|
||||
{.pragma: respPragma, gcsafe, raises: [CatchableError].}
|
||||
|
||||
type
|
||||
serializeProc = proc(x: RootRef, n: Node) {.respPragma.}
|
||||
|
@ -20,7 +20,9 @@ type
|
|||
obj: RootRef
|
||||
serializeP: serializeProc
|
||||
|
||||
proc serializeImpl[T](x: RootRef, n: Node) =
|
||||
{.push gcsafe, raises: [].}
|
||||
|
||||
proc serializeImpl[T](x: RootRef, n: Node) {.gcsafe, raises: [IOError].} =
|
||||
mixin serialize
|
||||
serialize(T(x), n)
|
||||
|
||||
|
@ -30,7 +32,7 @@ proc respStream*[T: RootRef](x: T): RespStream =
|
|||
result.obj = x
|
||||
result.serializeP = serializeImpl[T]
|
||||
|
||||
proc serialize*(x: RespStream, n: Node) =
|
||||
proc serialize*(x: RespStream, n: Node) {.gcsafe, raises: [CatchableError].} =
|
||||
x.serializeP(x.obj, n)
|
||||
|
||||
proc to*(x: RespStream, T: type): T =
|
||||
|
@ -45,3 +47,5 @@ template respList*(x, body: untyped) =
|
|||
beginList(x)
|
||||
body
|
||||
endList(x)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -92,7 +92,9 @@ const
|
|||
tokBlockString
|
||||
}
|
||||
|
||||
proc defaultParserConf*(): ParserConf =
|
||||
{.push gcsafe, raises: [IOError] .}
|
||||
|
||||
proc defaultParserConf*(): ParserConf {.gcsafe, raises: [].} =
|
||||
result.lexerConf = defaultLexConf()
|
||||
result.maxRecursionLimit = 25
|
||||
result.maxListElems = 128
|
||||
|
@ -103,7 +105,7 @@ proc defaultParserConf*(): ParserConf =
|
|||
result.maxDefinitions = 512
|
||||
result.maxChoices = 64
|
||||
|
||||
proc toInternalConf(conf: ParserConf): ParserConfInternal =
|
||||
proc toInternalConf(conf: ParserConf): ParserConfInternal {.gcsafe, raises: [].} =
|
||||
result.maxRecursionLimit = conf.maxRecursionLimit
|
||||
result.maxListElems = LoopGuard(maxLoop: conf.maxListElems, desc: "max list elements")
|
||||
result.maxFields = LoopGuard(maxLoop: conf.maxFields, desc: "max fields")
|
||||
|
@ -113,7 +115,7 @@ proc toInternalConf(conf: ParserConf): ParserConfInternal =
|
|||
result.maxDefinitions = LoopGuard(maxLoop: conf.maxDefinitions, desc: "max definitions")
|
||||
result.maxChoices = LoopGuard(maxLoop: conf.maxChoices, desc: "max choices and concats")
|
||||
|
||||
proc init*(T: type Parser, stream: InputStream, conf = defaultParserConf()): T =
|
||||
proc init*(T: type Parser, stream: InputStream, conf = defaultParserConf()): T {.gcsafe, raises: [].} =
|
||||
let names = newNameCache()
|
||||
T(lex: Lexer.init(stream, names, conf.lexerConf),
|
||||
emptyNode: Node(kind: nkEmpty, pos: Pos()),
|
||||
|
@ -121,7 +123,10 @@ proc init*(T: type Parser, stream: InputStream, conf = defaultParserConf()): T =
|
|||
flags: conf.flags
|
||||
)
|
||||
|
||||
proc init*(T: type Parser, stream: InputStream, names: NameCache, conf = defaultParserConf()): T =
|
||||
proc init*(T: type Parser,
|
||||
stream: InputStream,
|
||||
names: NameCache,
|
||||
conf = defaultParserConf()): T {.gcsafe, raises: [].} =
|
||||
T(lex: Lexer.init(stream, names, conf.lexerConf),
|
||||
emptyNode: Node(kind: nkEmpty, pos: Pos()),
|
||||
conf: toInternalConf(conf),
|
||||
|
@ -143,7 +148,7 @@ template currToken*: TokKind =
|
|||
template currName*: Keyword =
|
||||
toKeyword(q.lex.name)
|
||||
|
||||
proc parserError*(q: var Parser, err: ParserError, args: varargs[string, `$`]) =
|
||||
proc parserError*(q: var Parser, err: ParserError, args: varargs[string, `$`]) {.gcsafe, raises: [].} =
|
||||
q.error = err
|
||||
if currToken == tokError:
|
||||
q.err = q.lex.err
|
||||
|
@ -151,24 +156,27 @@ proc parserError*(q: var Parser, err: ParserError, args: varargs[string, `$`]) =
|
|||
|
||||
q.err.pos = q.pos
|
||||
q.err.level = elError
|
||||
case err
|
||||
of errUnexpectedToken, errExpectToken:
|
||||
doAssert(args.len >= 1)
|
||||
q.err.message = "get $1, expect $2" % [$currToken, args[0]]
|
||||
of errUnexpectedName, errExpectName, errInvalidDirectiveLoc, errInvalidName:
|
||||
doAssert(args.len >= 1)
|
||||
if currToken != tokName:
|
||||
try:
|
||||
case err
|
||||
of errUnexpectedToken, errExpectToken:
|
||||
doAssert(args.len >= 1)
|
||||
q.err.message = "get $1, expect $2" % [$currToken, args[0]]
|
||||
of errUnexpectedName, errExpectName, errInvalidDirectiveLoc, errInvalidName:
|
||||
doAssert(args.len >= 1)
|
||||
if currToken != tokName:
|
||||
q.err.message = "get $1, expect $2" % [$currToken, args[0]]
|
||||
else:
|
||||
q.err.message = "get '$1', expect $2" % [q.lex.name.s, args[0]]
|
||||
of errRecursionLimit:
|
||||
doAssert(args.len >= 2)
|
||||
q.err.message = "recursion limit $1 reached for $2" % [args[0], args[1]]
|
||||
of errLoopLimit:
|
||||
doAssert(args.len >= 2)
|
||||
q.err.message = "loop limit $1 reached for $2" % [args[0], args[1]]
|
||||
else:
|
||||
q.err.message = "get '$1', expect $2" % [q.lex.name.s, args[0]]
|
||||
of errRecursionLimit:
|
||||
doAssert(args.len >= 2)
|
||||
q.err.message = "recursion limit $1 reached for $2" % [args[0], args[1]]
|
||||
of errLoopLimit:
|
||||
doAssert(args.len >= 2)
|
||||
q.err.message = "loop limit $1 reached for $2" % [args[0], args[1]]
|
||||
else:
|
||||
doAssert(false, "unimplemented parser error " & $err)
|
||||
doAssert(false, "unimplemented parser error " & $err)
|
||||
except ValueError as exc:
|
||||
doAssert(false, exc.msg)
|
||||
|
||||
template nextToken* =
|
||||
q.lex.next()
|
||||
|
@ -176,6 +184,8 @@ template nextToken* =
|
|||
template isKeyword*(x: Keyword): bool =
|
||||
q.lex.tok == tokName and currName() == x
|
||||
|
||||
{.push hint[XCannotRaiseY]: off.}
|
||||
|
||||
macro callParser*(parserInst, parser, dest: untyped): untyped =
|
||||
case parser.kind
|
||||
of nnkCall:
|
||||
|
@ -189,6 +199,8 @@ macro callParser*(parserInst, parser, dest: untyped): untyped =
|
|||
else:
|
||||
error("unsupported parser")
|
||||
|
||||
{.pop.}
|
||||
|
||||
# will inject new 'destination' symbol and
|
||||
# shadow available symbol with same name
|
||||
template `:=`*(dest, parser: untyped) =
|
||||
|
@ -328,7 +340,7 @@ template rgPop(x: RecursionGuard, v: int): untyped =
|
|||
proc valueLiteral*(q: var Parser, isConst: bool, val: var Node)
|
||||
|
||||
# reserved word(prefixed with '__') used exclusively for instrospection
|
||||
proc validName(q: var Parser, name: Name): bool =
|
||||
proc validName(q: var Parser, name: Name): bool {.gcsafe, raises: [].} =
|
||||
if pfAcceptReservedWords in q.flags:
|
||||
return true
|
||||
|
||||
|
@ -506,3 +518,5 @@ proc typeRef*(q: var Parser, resType: var Node) =
|
|||
|
||||
expectOptional tokBang:
|
||||
resType = newTree(nkNonNullType, resType)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -8,11 +8,13 @@
|
|||
# those terms.
|
||||
|
||||
import
|
||||
std/[tables, strutils],
|
||||
std/[tables],
|
||||
stew/[results],
|
||||
./common/[names, ast, ast_helper, response, respstream],
|
||||
./common/[names, ast, ast_helper, response, respstream, errors],
|
||||
./graphql, ./validator
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
template `@=`(dest: untyped, validator: untyped) =
|
||||
let dest {.inject.} = callValidator(ctx, validator)
|
||||
if ctx.errKind != ErrNone:
|
||||
|
@ -38,7 +40,7 @@ proc coerceScalar(ctx: GraphqlRef, fieldName, fieldType, resval: Node): Node =
|
|||
proc coerceEnum(ctx: GraphqlRef, fieldName, fieldType, resval: Node): Node =
|
||||
if resval.kind != nkString:
|
||||
ctx.error(ErrScalarError, fieldName, resval,
|
||||
"expect '$1' got '$2'" % [$fieldType, $resval.kind])
|
||||
"expect '" & $fieldType & "' got '" & $resval.kind & "'")
|
||||
return respNull()
|
||||
let name = ctx.names.insert(resval.stringVal)
|
||||
if fieldType.sym.enumVals.hasKey(name):
|
||||
|
@ -262,9 +264,15 @@ proc executeRequest*(ctx: GraphqlRef, resp: RespStream,
|
|||
{.gcsafe.}:
|
||||
var res = respNull()
|
||||
ctx.executeRequestImpl(res, opName)
|
||||
resp.serialize(res)
|
||||
|
||||
try:
|
||||
resp.serialize(res)
|
||||
except CatchableError as exc:
|
||||
return err(@[errorError(exc.msg)])
|
||||
|
||||
if ctx.errKind == ErrNone:
|
||||
ok()
|
||||
else:
|
||||
err(ctx.errors)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -53,7 +53,7 @@ type
|
|||
|
||||
InstrumentResult* = Result[void, string]
|
||||
InstrumentProc* = proc(ud: InstrumentRef, flag: InstrumentFlag,
|
||||
params, node: Node): InstrumentResult {.cdecl, gcsafe, raises: [Defect, CatchableError].}
|
||||
params, node: Node): InstrumentResult {.cdecl, gcsafe, raises: [].}
|
||||
|
||||
InstrumentRef* = ref InstrumentObj
|
||||
InstrumentObj* = object of RootObj
|
||||
|
@ -80,10 +80,10 @@ type
|
|||
RespResult* = Result[Node, string]
|
||||
|
||||
CoercionProc* = proc(ctx: GraphqlRef,
|
||||
typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.}
|
||||
typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect, raises:[].}
|
||||
|
||||
ResolverProc* = proc(ud: RootRef, params: Args,
|
||||
parent: Node): RespResult {.cdecl, gcsafe, raises: [Defect, CatchableError].}
|
||||
parent: Node): RespResult {.cdecl, gcsafe, raises:[].}
|
||||
|
||||
ResolverRef* = ref ResolverObj
|
||||
ResolverObj* = object
|
||||
|
@ -112,6 +112,8 @@ type
|
|||
|
||||
const resObjValidKind* = {nkString, nkSym, nkName, nkNamedType, nkMap}
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
template findType*(name: Name): Node =
|
||||
ctx.typeTable.getOrDefault(name)
|
||||
|
||||
|
@ -164,31 +166,34 @@ proc nonEmptyPos(node: Node): Pos =
|
|||
|
||||
proc fatal*(ctx: GraphqlRef, err: GraphqlError, node: Node, msg: varargs[string, `$`]) =
|
||||
ctx.errKind = err
|
||||
ctx.errors.add ErrorDesc(
|
||||
level: elFatal,
|
||||
pos: node.nonEmptyPos,
|
||||
path: copyTree(ctx.path),
|
||||
message:
|
||||
case err
|
||||
of ErrNoRoot:
|
||||
"Have more than one root operation, requires operationName"
|
||||
of ErrOperationNotFound:
|
||||
"Operation not found: '$1'" % [msg[0]]
|
||||
of ErrTypeUndefined:
|
||||
"Resolver not found: '$1'" % [msg[0]]
|
||||
of ErrNoImpl:
|
||||
"Implementation not found: '$1' of '$2'" % [msg[0], msg[1]]
|
||||
of ErrValueError:
|
||||
"Field '$1' cannot be resolved: \"$2\"" % [msg[0], msg[1]]
|
||||
of ErrNotNullable:
|
||||
"Field '$1' should not return null" % [msg[0]]
|
||||
of ErrIncompatType:
|
||||
"Field '$1' expect '$2' but got '$3'" % [msg[0], msg[1], msg[2]]
|
||||
of ErrInstrument:
|
||||
"Instrument Error: " & msg[0]
|
||||
else:
|
||||
"ASSERT: UNSPECIFIED ERR KIND: " & $err
|
||||
)
|
||||
try:
|
||||
ctx.errors.add ErrorDesc(
|
||||
level: elFatal,
|
||||
pos: node.nonEmptyPos,
|
||||
path: copyTree(ctx.path),
|
||||
message:
|
||||
case err
|
||||
of ErrNoRoot:
|
||||
"Have more than one root operation, requires operationName"
|
||||
of ErrOperationNotFound:
|
||||
"Operation not found: '$1'" % [msg[0]]
|
||||
of ErrTypeUndefined:
|
||||
"Resolver not found: '$1'" % [msg[0]]
|
||||
of ErrNoImpl:
|
||||
"Implementation not found: '$1' of '$2'" % [msg[0], msg[1]]
|
||||
of ErrValueError:
|
||||
"Field '$1' cannot be resolved: \"$2\"" % [msg[0], msg[1]]
|
||||
of ErrNotNullable:
|
||||
"Field '$1' should not return null" % [msg[0]]
|
||||
of ErrIncompatType:
|
||||
"Field '$1' expect '$2' but got '$3'" % [msg[0], msg[1], msg[2]]
|
||||
of ErrInstrument:
|
||||
"Instrument Error: " & msg[0]
|
||||
else:
|
||||
"ASSERT: UNSPECIFIED ERR KIND: " & $err
|
||||
)
|
||||
except ValueError as exc:
|
||||
doAssert(false, exc.msg)
|
||||
|
||||
func getArticle(x: string): string =
|
||||
const vowels = {'a','A','i','I','e','E','o','O'}
|
||||
|
@ -200,71 +205,74 @@ func getArticle(x: string): string =
|
|||
|
||||
proc error*(ctx: GraphqlRef, err: GraphqlError, node: Node, msg: varargs[string, `$`]) =
|
||||
ctx.errKind = err
|
||||
ctx.errors.add ErrorDesc(
|
||||
pos: node.nonEmptyPos,
|
||||
level: elError,
|
||||
path: copyTree(ctx.path),
|
||||
message:
|
||||
case err
|
||||
of ErrDuplicateName:
|
||||
"duplicate name '$1'" % [$node]
|
||||
of ErrDirNoRepeat:
|
||||
"directive is non repeatable '$1'" % [$node]
|
||||
of ErrOnlyOne:
|
||||
"only one '$1' allowed" % [msg[0]]
|
||||
of ErrTypeUndefined:
|
||||
"type not defined '$1'" % [$node]
|
||||
of ErrTypeMismatch:
|
||||
let typ = msg[0]
|
||||
let an = getArticle(typ)
|
||||
"'$1' is $2 '$3', expect '$4'" % [$node, an, typ, msg[1]]
|
||||
of ErrCyclicReference:
|
||||
"cyclic reference detected for '$1'" % [$node]
|
||||
of ErrDirectiveMisLoc:
|
||||
"directive '$1' doesn't specify '$2' location" % [$node, msg[0]]
|
||||
of ErrNoImpl:
|
||||
"no '$2' '$1' implementation found" % [$node, msg[0]]
|
||||
of ErrValueError:
|
||||
"the value of '$1' can't be '$2'" % [$node, msg[0]]
|
||||
of ErrNoArg:
|
||||
"field '$1' need argument '$2'" % [$node, msg[0]]
|
||||
of ErrArgTypeMismatch:
|
||||
"arg '$1' of field '$2' type mismatch with '$3'" % [$node, msg[0], msg[1]]
|
||||
of ErrIncompatType:
|
||||
"'$1' has incompatible type with '$2'" % [$node, msg[0]]
|
||||
of ErrNotUsed:
|
||||
"'$1' is not used" % [$node]
|
||||
of ErrNotPartOf:
|
||||
"'$1' is not part of '$2'" % [$node, msg[0]]
|
||||
of ErrFieldIsRequired:
|
||||
"field '$1' is required in '$2', see '$3'" % [$node, msg[0], msg[1]]
|
||||
of ErrNoRoot:
|
||||
"no root operation '$1' available" % [msg[0]]
|
||||
of ErrFieldArgUndefined:
|
||||
"arg '$1' not defined in field '$2' of '$3'" % [$node, msg[0], msg[1]]
|
||||
of ErrFieldNotinArg:
|
||||
"field '$1' of arg '$2' should not empty" % [msg[0], $node]
|
||||
of ErrNotNullable:
|
||||
"$1 '$2' of '$3' should not nullable" % [msg[1], $node, msg[0]]
|
||||
of ErrRequireSelection:
|
||||
"field '$1' return type is '$2' requires selection set" % [$node, msg[0]]
|
||||
of ErrDirArgUndefined:
|
||||
"arg '$1' not defined in directive '$2'" % [$node, msg[0]]
|
||||
of ErrMergeConflict:
|
||||
"field '$1' have merge conflict: $2" % [$node, msg[0]]
|
||||
of ErrScalarError:
|
||||
"'$1' got '$2': $3" % [$node, msg[0], msg[1]]
|
||||
of ErrEnumError:
|
||||
"'$1' got '$2'('$3'), expect '$4'" % [$node, msg[0], msg[1], msg[2]]
|
||||
of ErrDirNotAllowed:
|
||||
"directive '$1' is not allowed at subscription root field" % [$node]
|
||||
of ErrInvalidArgDeprecation:
|
||||
"argument '$2' of '$1' can't be deprecated: non null or no default value" % [$node, msg[0]]
|
||||
of ErrInvalidFieldDeprecation:
|
||||
"field '$2' of '$1' can't be deprecated: non null or no default value" % [$node, msg[0]]
|
||||
else:
|
||||
"ASSERT: UNSPECIFIED ERR KIND: " & $err
|
||||
)
|
||||
try:
|
||||
ctx.errors.add ErrorDesc(
|
||||
pos: node.nonEmptyPos,
|
||||
level: elError,
|
||||
path: copyTree(ctx.path),
|
||||
message:
|
||||
case err
|
||||
of ErrDuplicateName:
|
||||
"duplicate name '$1'" % [$node]
|
||||
of ErrDirNoRepeat:
|
||||
"directive is non repeatable '$1'" % [$node]
|
||||
of ErrOnlyOne:
|
||||
"only one '$1' allowed" % [msg[0]]
|
||||
of ErrTypeUndefined:
|
||||
"type not defined '$1'" % [$node]
|
||||
of ErrTypeMismatch:
|
||||
let typ = msg[0]
|
||||
let an = getArticle(typ)
|
||||
"'$1' is $2 '$3', expect '$4'" % [$node, an, typ, msg[1]]
|
||||
of ErrCyclicReference:
|
||||
"cyclic reference detected for '$1'" % [$node]
|
||||
of ErrDirectiveMisLoc:
|
||||
"directive '$1' doesn't specify '$2' location" % [$node, msg[0]]
|
||||
of ErrNoImpl:
|
||||
"no '$2' '$1' implementation found" % [$node, msg[0]]
|
||||
of ErrValueError:
|
||||
"the value of '$1' can't be '$2'" % [$node, msg[0]]
|
||||
of ErrNoArg:
|
||||
"field '$1' need argument '$2'" % [$node, msg[0]]
|
||||
of ErrArgTypeMismatch:
|
||||
"arg '$1' of field '$2' type mismatch with '$3'" % [$node, msg[0], msg[1]]
|
||||
of ErrIncompatType:
|
||||
"'$1' has incompatible type with '$2'" % [$node, msg[0]]
|
||||
of ErrNotUsed:
|
||||
"'$1' is not used" % [$node]
|
||||
of ErrNotPartOf:
|
||||
"'$1' is not part of '$2'" % [$node, msg[0]]
|
||||
of ErrFieldIsRequired:
|
||||
"field '$1' is required in '$2', see '$3'" % [$node, msg[0], msg[1]]
|
||||
of ErrNoRoot:
|
||||
"no root operation '$1' available" % [msg[0]]
|
||||
of ErrFieldArgUndefined:
|
||||
"arg '$1' not defined in field '$2' of '$3'" % [$node, msg[0], msg[1]]
|
||||
of ErrFieldNotinArg:
|
||||
"field '$1' of arg '$2' should not empty" % [msg[0], $node]
|
||||
of ErrNotNullable:
|
||||
"$1 '$2' of '$3' should not nullable" % [msg[1], $node, msg[0]]
|
||||
of ErrRequireSelection:
|
||||
"field '$1' return type is '$2' requires selection set" % [$node, msg[0]]
|
||||
of ErrDirArgUndefined:
|
||||
"arg '$1' not defined in directive '$2'" % [$node, msg[0]]
|
||||
of ErrMergeConflict:
|
||||
"field '$1' have merge conflict: $2" % [$node, msg[0]]
|
||||
of ErrScalarError:
|
||||
"'$1' got '$2': $3" % [$node, msg[0], msg[1]]
|
||||
of ErrEnumError:
|
||||
"'$1' got '$2'('$3'), expect '$4'" % [$node, msg[0], msg[1], msg[2]]
|
||||
of ErrDirNotAllowed:
|
||||
"directive '$1' is not allowed at subscription root field" % [$node]
|
||||
of ErrInvalidArgDeprecation:
|
||||
"argument '$2' of '$1' can't be deprecated: non null or no default value" % [$node, msg[0]]
|
||||
of ErrInvalidFieldDeprecation:
|
||||
"field '$2' of '$1' can't be deprecated: non null or no default value" % [$node, msg[0]]
|
||||
else:
|
||||
"ASSERT: UNSPECIFIED ERR KIND: " & $err
|
||||
)
|
||||
except ValueError as exc:
|
||||
doAssert(false, exc.msg)
|
||||
|
||||
proc getScalar*(ctx: GraphqlRef, locType: Node): CoercionProc =
|
||||
if locType.sym.scalar.isNil:
|
||||
|
@ -314,3 +322,5 @@ template execInstrument*(flag: InstrumentFlag, params, node: Node) =
|
|||
if res.isErr:
|
||||
ctx.fatal(ErrInstrument, node, res.error)
|
||||
return
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -47,6 +47,8 @@ type
|
|||
const
|
||||
GraphQLPath* = "/graphql"
|
||||
|
||||
{.push gcsafe, raises: [].}
|
||||
|
||||
proc createSession(secure: bool,
|
||||
maxRedirections = HttpMaxRedirections,
|
||||
connectTimeout = HttpConnectTimeout,
|
||||
|
@ -204,29 +206,31 @@ proc addEnumVar*(ctx: GraphqlHttpClientRef, name: string, val: string) =
|
|||
value: Node(kind: nkEnum, pos: Pos(), name: ctx.names.insert(val))
|
||||
)
|
||||
|
||||
proc parseVars(ctx: GraphqlHttpClientRef, input: InputStream): ParseResult =
|
||||
var parser = Parser.init(input, ctx.names)
|
||||
parser.lex.next()
|
||||
if parser.lex.tok == tokEof:
|
||||
return ok()
|
||||
proc parseVars(ctx: GraphqlHttpClientRef, input: InputStream): ParseResult =
|
||||
try:
|
||||
var parser = Parser.init(input, ctx.names)
|
||||
parser.lex.next()
|
||||
if parser.lex.tok == tokEof:
|
||||
return ok()
|
||||
|
||||
var values: Node
|
||||
parser.rgReset(rgValueLiteral) # recursion guard
|
||||
parser.valueLiteral(isConst = true, values)
|
||||
if parser.error != errNone:
|
||||
return err(parser.err)
|
||||
var values: Node
|
||||
parser.rgReset(rgValueLiteral) # recursion guard
|
||||
parser.valueLiteral(isConst = true, values)
|
||||
if parser.error != errNone:
|
||||
return err(parser.err)
|
||||
|
||||
for n in values:
|
||||
ctx.varTable.add VarPair(name: $n[0].name, value: n[1])
|
||||
for n in values:
|
||||
ctx.varTable.add VarPair(name: $n[0].name, value: n[1])
|
||||
ok()
|
||||
except IOError as exc:
|
||||
err(errorError(exc.msg))
|
||||
|
||||
ok()
|
||||
|
||||
proc parseVars*(ctx: GraphqlHttpClientRef, input: string): ParseResult {.gcsafe.} =
|
||||
proc parseVars*(ctx: GraphqlHttpClientRef, input: string): ParseResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
var stream = unsafeMemoryInput(input)
|
||||
ctx.parseVars(stream)
|
||||
|
||||
proc parseVars*(ctx: GraphqlHttpClientRef, input: openArray[byte]): ParseResult {.gcsafe.} =
|
||||
proc parseVars*(ctx: GraphqlHttpClientRef, input: openArray[byte]): ParseResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
var stream = unsafeMemoryInput(input)
|
||||
ctx.parseVars(stream)
|
||||
|
@ -298,3 +302,5 @@ proc sendRequest*(ctx: GraphqlHttpClientRef, query: string,
|
|||
proc closeWait*(ctx: GraphqlHttpClientRef) {.async.} =
|
||||
if ctx.session.isNil.not:
|
||||
await ctx.session.closeWait()
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -45,11 +45,16 @@ type
|
|||
|
||||
GraphqlHttpServerRef* = ref GraphqlHttpServer
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
template exec(executor: untyped) =
|
||||
let res = callValidator(ctx, executor)
|
||||
if res.isErr:
|
||||
return (Http400, jsonErrorResp(res.error))
|
||||
|
||||
proc errorResp(msg: string): string =
|
||||
"""{"errors":[{"message":""" & escapeJson(msg) & "}]}"
|
||||
|
||||
proc execRequest(server: GraphqlHttpServerRef, ro: RequestObject): (HttpCode, string) {.gcsafe.} =
|
||||
let ctx = server.graphql
|
||||
|
||||
|
@ -59,18 +64,21 @@ proc execRequest(server: GraphqlHttpServerRef, ro: RequestObject): (HttpCode, st
|
|||
|
||||
ctx.addVariables(ro.variables)
|
||||
|
||||
let query = toString(ro.query)
|
||||
exec parseQuery(query)
|
||||
let resp = JsonRespStream.new()
|
||||
let opName = if ro.operationName.kind == nkNull:
|
||||
""
|
||||
else:
|
||||
toString(ro.operationName)
|
||||
let res = ctx.executeRequest(respStream(resp), opName)
|
||||
if res.isErr:
|
||||
(Http400, jsonErrorResp(res.error, resp.getBytes()))
|
||||
else:
|
||||
(Http200, jsonOkResp(resp.getBytes()))
|
||||
try:
|
||||
let query = toString(ro.query)
|
||||
exec parseQuery(query)
|
||||
let resp = JsonRespStream.new()
|
||||
let opName = if ro.operationName.kind == nkNull:
|
||||
""
|
||||
else:
|
||||
toString(ro.operationName)
|
||||
let res = ctx.executeRequest(respStream(resp), opName)
|
||||
if res.isErr:
|
||||
(Http400, jsonErrorResp(res.error, resp.getBytes()))
|
||||
else:
|
||||
(Http200, jsonOkResp(resp.getBytes()))
|
||||
except IOError as exc:
|
||||
(Http400, errorResp(exc.msg))
|
||||
|
||||
proc getContentTypes(request: HttpRequestRef): set[ContentType] =
|
||||
let conType = request.headers.getList("content-type")
|
||||
|
@ -174,7 +182,7 @@ proc processUIRequest(server: GraphqlHttpServerRef, request: HttpRequestRef): Fu
|
|||
|
||||
proc routingRequest(server: GraphqlHttpServerRef, r: RequestFence): Future[HttpResponseRef] {.gcsafe, async.} =
|
||||
if r.isErr():
|
||||
return dumbResponse()
|
||||
return defaultResponse()
|
||||
|
||||
let request = r.get()
|
||||
|
||||
|
@ -215,8 +223,12 @@ proc new*(t: typedesc[GraphqlHttpServerRef],
|
|||
authHooks: authHooks
|
||||
)
|
||||
|
||||
proc processCallback(rf: RequestFence): Future[HttpResponseRef] =
|
||||
routingRequest(server, rf)
|
||||
proc processCallback(rf: RequestFence): Future[HttpResponseRef] {.
|
||||
async: (raises: [CancelledError]).} =
|
||||
try:
|
||||
return await routingRequest(server, rf)
|
||||
except CatchableError:
|
||||
return defaultResponse()
|
||||
|
||||
let sres = HttpServerRef.new(address, processCallback, serverFlags,
|
||||
socketFlags, serverUri, serverIdent,
|
||||
|
@ -253,8 +265,12 @@ proc new*(t: typedesc[GraphqlHttpServerRef],
|
|||
authHooks: authHooks
|
||||
)
|
||||
|
||||
proc processCallback(rf: RequestFence): Future[HttpResponseRef] =
|
||||
routingRequest(server, rf)
|
||||
proc processCallback(rf: RequestFence): Future[HttpResponseRef] {.
|
||||
async: (raises: [CancelledError]).} =
|
||||
try:
|
||||
return await routingRequest(server, rf)
|
||||
except CatchableError:
|
||||
return defaultResponse()
|
||||
|
||||
let sres = SecureHttpServerRef.new(address, processCallback,
|
||||
tlsPrivateKey, tlsCertificate, serverFlags,
|
||||
|
@ -302,3 +318,5 @@ proc closeWait*(rs: GraphqlHttpServerRef) {.async.} =
|
|||
proc join*(rs: GraphqlHttpServerRef): Future[void] =
|
||||
## Wait until GraphQL server will not be closed.
|
||||
rs.server.join()
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -19,6 +19,8 @@ type
|
|||
maxComplexity: int
|
||||
calculator: ComplexityCalculator
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc traverse(qc: QueryComplexity, fieldSet: FieldSet): int =
|
||||
for field in fieldSet:
|
||||
inc(result, qc.calculator(qc, field))
|
||||
|
@ -46,3 +48,5 @@ proc new*(_: type QueryComplexity, calc: ComplexityCalculator,
|
|||
var qc = QueryComplexity()
|
||||
qc.init(calc, maxComplexity)
|
||||
qc
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -81,17 +81,22 @@ type
|
|||
conf : LexConfInternal
|
||||
flags* : set[LexerFlag]
|
||||
|
||||
proc defaultLexConf*(): LexConf =
|
||||
{.push gcsafe, raises: [IOError].}
|
||||
|
||||
proc defaultLexConf*(): LexConf {.gcsafe, raises: [].} =
|
||||
result.maxIdentChars = 128
|
||||
result.maxDigits = 128
|
||||
result.maxStringChars = 2048
|
||||
|
||||
proc toInternalConf(conf: LexConf): LexConfInternal =
|
||||
proc toInternalConf(conf: LexConf): LexConfInternal {.gcsafe, raises: [].} =
|
||||
result.maxIdentChars = LoopGuard(maxLoop: conf.maxIdentChars , desc: "max chars in ident")
|
||||
result.maxDigits = LoopGuard(maxLoop: conf.maxDigits , desc: "max digits in number")
|
||||
result.maxStringChars = LoopGuard(maxLoop: conf.maxStringChars, desc: "max chars in string")
|
||||
|
||||
proc init*(T: type Lexer, stream: InputStream, names: NameCache, conf = defaultLexConf()): T =
|
||||
proc init*(T: type Lexer,
|
||||
stream: InputStream,
|
||||
names: NameCache,
|
||||
conf = defaultLexConf()): T {.gcsafe, raises: [].} =
|
||||
result = Lexer(
|
||||
stream: stream,
|
||||
names: names,
|
||||
|
@ -106,19 +111,19 @@ template peek(s: InputStream): char =
|
|||
template read(s: InputStream): char =
|
||||
char inputs.read(s)
|
||||
|
||||
func peekSpecial(lex: Lexer): string =
|
||||
func peekSpecial(lex: Lexer): string {.gcsafe, raises: [].} =
|
||||
"\\" & $int(lex.stream.peek)
|
||||
|
||||
proc col*(lex: Lexer): int =
|
||||
proc col*(lex: Lexer): int {.gcsafe, raises: [].} =
|
||||
lex.stream.pos - lex.lineStart
|
||||
|
||||
proc tokenStartCol*(lex: Lexer): int =
|
||||
proc tokenStartCol*(lex: Lexer): int {.gcsafe, raises: [].} =
|
||||
1 + lex.tokenStart - lex.lineStart
|
||||
|
||||
func pos*(lex: Lexer): Pos =
|
||||
func pos*(lex: Lexer): Pos {.gcsafe, raises: [].} =
|
||||
Pos(line: lex.line.uint16, col: lex.tokenStartCol.uint16)
|
||||
|
||||
proc lexerError(lex: var Lexer, errKind: LexerError, args: varargs[string, `$`]) =
|
||||
proc lexerError(lex: var Lexer, errKind: LexerError, args: varargs[string, `$`]) {.gcsafe, raises: [].} =
|
||||
lex.error = errKind
|
||||
lex.tok = tokError
|
||||
lex.tokenStart = lex.stream.pos
|
||||
|
@ -129,13 +134,16 @@ proc lexerError(lex: var Lexer, errKind: LexerError, args: varargs[string, `$`])
|
|||
lex.err.level = elError
|
||||
lex.err.message = $errKind
|
||||
|
||||
case errKind
|
||||
of errInvalidEscape, errInvalidUnicode, errInvalidChar, errOrphanSurrogate:
|
||||
lex.err.message = $errKind % [args[0]]
|
||||
of errLoopLimit:
|
||||
lex.err.message = $errKind % [args[0], args[1]]
|
||||
else:
|
||||
lex.err.message = $errKind
|
||||
try:
|
||||
case errKind
|
||||
of errInvalidEscape, errInvalidUnicode, errInvalidChar, errOrphanSurrogate:
|
||||
lex.err.message = $errKind % [args[0]]
|
||||
of errLoopLimit:
|
||||
lex.err.message = $errKind % [args[0], args[1]]
|
||||
else:
|
||||
lex.err.message = $errKind
|
||||
except ValueError as exc:
|
||||
doAssert(false, exc.msg)
|
||||
|
||||
template safePeek(lex: Lexer, x: char): bool =
|
||||
lex.stream.readable and lex.stream.peek == x
|
||||
|
@ -146,7 +154,7 @@ template safePeek(lex: Lexer, x: set[char]): bool =
|
|||
template safePeekNotIn(lex: Lexer, x: set[char]): bool =
|
||||
lex.stream.readable and lex.stream.peek notin x
|
||||
|
||||
proc skipBOM*(lex: var Lexer): bool =
|
||||
proc skipBOM*(lex: var Lexer): bool {.gcsafe, raises: [].} =
|
||||
if lex.stream.peek == char(0xFE):
|
||||
advance lex.stream
|
||||
if lex.stream.peek == char(0xFF):
|
||||
|
@ -158,7 +166,7 @@ proc skipBOM*(lex: var Lexer): bool =
|
|||
else:
|
||||
return true
|
||||
|
||||
proc handleLF(lex: var Lexer) {.inline.} =
|
||||
proc handleLF(lex: var Lexer) {.gcsafe, raises: [].} =
|
||||
advance lex.stream
|
||||
lex.line += 1
|
||||
lex.lineStart = lex.stream.pos
|
||||
|
@ -278,7 +286,7 @@ proc scanNumber(lex: var Lexer) =
|
|||
lex.lexerError(errNoDotOrName)
|
||||
return
|
||||
|
||||
func charTo(T: type, c: char): T {.inline.} =
|
||||
func charTo(T: type, c: char): T {.gcsafe, raises: [].} =
|
||||
case c
|
||||
of {'0'..'9'}: result = T(c) - T('0')
|
||||
of {'a'..'f'}: result = T(c) - T('a') + T(10)
|
||||
|
@ -560,3 +568,5 @@ proc next*(lex: var Lexer) =
|
|||
else:
|
||||
lex.lexerError(errInvalidChar, lex.peekSpecial)
|
||||
advance lex.stream
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -20,6 +20,8 @@ export
|
|||
common_parser.defaultParserConf,
|
||||
ast
|
||||
|
||||
{.push gcsafe, raises: [IOError] .}
|
||||
|
||||
proc definition(q: var Parser, def: var Node) =
|
||||
case currToken
|
||||
of tokLCurly:
|
||||
|
@ -52,3 +54,5 @@ proc parseDocument*(q: var Parser, doc: var FullDocument) =
|
|||
repeatUntil(q.conf.maxDefinitions, tokEof):
|
||||
def := definition
|
||||
doc.root <- def
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -61,6 +61,8 @@ const
|
|||
12,36,12,12,12,12,12,12,12,12,12,12
|
||||
]
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc validate*[T: byte | char](_: type Utf8, text: openArray[T]): bool =
|
||||
var state = 0
|
||||
for c in text:
|
||||
|
@ -331,3 +333,5 @@ proc toPair*(_: type Utf16, cp: int): Utf16Pair =
|
|||
hi: uint16((c shr Utf16Shift) + highBegin),
|
||||
lo: uint16((c and Utf16Mask) + lowBegin)
|
||||
)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -19,6 +19,8 @@ export
|
|||
common_parser.defaultParserConf,
|
||||
ast
|
||||
|
||||
{.push gcsafe, raises: [IOError].}
|
||||
|
||||
# forward declaration
|
||||
proc selectionSet(q: var Parser, sels: var Node)
|
||||
|
||||
|
@ -171,3 +173,5 @@ proc parseDocument*(q: var Parser, doc: var QueryDocument) =
|
|||
repeatUntil(q.conf.maxDefinitions, tokEof):
|
||||
def := definition
|
||||
doc.root <- def
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -19,6 +19,8 @@ export
|
|||
common_parser.defaultParserConf,
|
||||
ast
|
||||
|
||||
{.push gcsafe, raises: [IOError].}
|
||||
|
||||
proc operationKind*(q: var Parser, opKind: var Node) =
|
||||
if currToken == tokName and currName in OperationNames:
|
||||
opKind ::= name
|
||||
|
@ -283,3 +285,5 @@ proc parseDocument*(q: var Parser, doc: var SchemaDocument) =
|
|||
repeatUntil(q.conf.maxDefinitions, tokEof):
|
||||
def := definition
|
||||
doc.root <- def
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -21,6 +21,8 @@ type
|
|||
operationName*: Node
|
||||
variables*: Node
|
||||
|
||||
{.push gcsafe, raises: [IOError].}
|
||||
|
||||
proc errorResp*(r: JsonRespStream, msg: string) =
|
||||
respMap(r):
|
||||
r.field("errors")
|
||||
|
@ -85,7 +87,7 @@ proc jsonOkResp*(data: openArray[byte]): string =
|
|||
okResp(resp, data)
|
||||
resp.getString()
|
||||
|
||||
proc addVariables*(ctx: GraphqlRef, vars: Node) =
|
||||
proc addVariables*(ctx: GraphqlRef, vars: Node) {.gcsafe, raises: [].} =
|
||||
if vars.kind != nkInput:
|
||||
return
|
||||
for n in vars:
|
||||
|
@ -134,16 +136,16 @@ proc decodeRequest*(ctx: GraphqlRef, ro: var RequestObject, k, v: string): Parse
|
|||
of "variables": ro.variables = res.get()
|
||||
else: discard
|
||||
|
||||
proc toQueryNode*(data: string): Node =
|
||||
proc toQueryNode*(data: string): Node {.gcsafe, raises: [].} =
|
||||
Node(kind: nkString, stringVal: data, pos: Pos())
|
||||
|
||||
proc init*(_: type RequestObject): RequestObject =
|
||||
proc init*(_: type RequestObject): RequestObject {.gcsafe, raises: [].} =
|
||||
let empty = Node(kind: nkEmpty, pos: Pos())
|
||||
result.query = empty
|
||||
result.operationName = empty
|
||||
result.variables = empty
|
||||
|
||||
proc requestNodeToObject*(node: Node, ro: var RequestObject) =
|
||||
proc requestNodeToObject*(node: Node, ro: var RequestObject) {.gcsafe, raises: [].} =
|
||||
for n in node:
|
||||
if n.len != 2: continue
|
||||
case $n[0]
|
||||
|
@ -151,7 +153,7 @@ proc requestNodeToObject*(node: Node, ro: var RequestObject) =
|
|||
of "operationName": ro.operationName = n[1]
|
||||
of "variables": ro.variables = n[1]
|
||||
|
||||
proc toString*(node: Node): string =
|
||||
proc toString*(node: Node): string {.gcsafe, raises: [].} =
|
||||
if node.isNil:
|
||||
return ""
|
||||
case node.kind
|
||||
|
@ -161,3 +163,5 @@ proc toString*(node: Node): string =
|
|||
return node.stringVal
|
||||
else:
|
||||
$node
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -30,6 +30,8 @@ type
|
|||
fail: int
|
||||
ok: int
|
||||
|
||||
{.push gcsafe, raises: [].}
|
||||
|
||||
proc removeWhitespaces(x: string): string =
|
||||
const whites = {' ', '\t', '\r', '\n'}
|
||||
type
|
||||
|
@ -70,7 +72,7 @@ proc checkErrors(ctx: GraphqlRef, errors: openArray[ErrorDesc],
|
|||
for i in 0..<min(errors.len, unit.errors.len):
|
||||
check $errors[i] == unit.errors[i]
|
||||
|
||||
proc runExecutor(ctx: GraphqlRef, unit: Unit, testStatusIMPL: var TestStatus) =
|
||||
proc runExecutor(ctx: GraphqlRef, unit: Unit, testStatusIMPL: var TestStatus) {.gcsafe, raises: [IOError].} =
|
||||
var stream = unsafeMemoryInput(unit.code)
|
||||
var parser = Parser.init(stream, ctx.names)
|
||||
parser.flags.incl pfExperimentalFragmentVariables
|
||||
|
@ -109,7 +111,11 @@ proc runExecutor(ctx: GraphqlRef, unit: Unit, testStatusIMPL: var TestStatus) =
|
|||
let execRes = removeWhitespaces(js.getString)
|
||||
check unitRes == execRes
|
||||
|
||||
proc runSuite(ctx: GraphqlRef, savePoint: NameCounter, fileName: string, counter: Counter, purgeSchema: bool) =
|
||||
proc runSuite(ctx: GraphqlRef,
|
||||
savePoint: NameCounter,
|
||||
fileName: string,
|
||||
counter: Counter,
|
||||
purgeSchema: bool) {.gcsafe, raises: [SerializationError, IOError].} =
|
||||
let parts = splitFile(fileName)
|
||||
let cases = Toml.loadFile(fileName, TestCase)
|
||||
suite parts.name:
|
||||
|
@ -134,7 +140,9 @@ proc runSuite(ctx: GraphqlRef, savePoint: NameCounter, fileName: string, counter
|
|||
else:
|
||||
inc counter.fail
|
||||
|
||||
proc executeCases*(ctx: GraphqlRef, caseFolder: string, purgeSchema: bool) =
|
||||
proc executeCases*(ctx: GraphqlRef,
|
||||
caseFolder: string,
|
||||
purgeSchema: bool) {.gcsafe, raises: [SerializationError, OSError, IOError].} =
|
||||
let savePoint = ctx.getNameCounter()
|
||||
var counter = Counter()
|
||||
var fileNames: seq[string]
|
||||
|
@ -148,7 +156,9 @@ proc executeCases*(ctx: GraphqlRef, caseFolder: string, purgeSchema: bool) =
|
|||
|
||||
debugEcho counter[]
|
||||
|
||||
proc main*(ctx: GraphqlRef, caseFolder: string, purgeSchema: bool) =
|
||||
proc main*(ctx: GraphqlRef,
|
||||
caseFolder: string,
|
||||
purgeSchema: bool) {.gcsafe, raises: [SerializationError, OSError, IOError].} =
|
||||
let conf = getConfiguration()
|
||||
if conf.testFile.len == 0:
|
||||
executeCases(ctx, caseFolder, purgeSchema)
|
||||
|
@ -185,3 +195,5 @@ proc processArguments*() =
|
|||
if len(message) > 0:
|
||||
echo message
|
||||
quit(QuitSuccess)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -25,6 +25,8 @@ type
|
|||
unit*: string
|
||||
convertPath*: string
|
||||
|
||||
{.push gcsafe, raises: [].}
|
||||
|
||||
var testConfig {.threadvar.}: Configuration
|
||||
|
||||
proc initConfiguration(): Configuration =
|
||||
|
@ -56,3 +58,5 @@ proc processArguments*(msg: var string): ConfigStatus =
|
|||
break
|
||||
of cmdEnd:
|
||||
doAssert(false)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -16,6 +16,8 @@ import
|
|||
const
|
||||
introsKeywords = {introsSchema, introsType, introsTypeName}
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
|
||||
proc `$$`(node: Node): string =
|
||||
case node.kind
|
||||
of nkNonNullType:
|
||||
|
@ -1510,3 +1512,5 @@ proc getOperation*(ctx: GraphqlRef, opName: string): Node =
|
|||
|
||||
execInstrument(iExecBegin, ctx.emptyNode, op)
|
||||
op
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -30,6 +30,7 @@ type
|
|||
blocks: Node
|
||||
accounts: Node
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
{.pragma: apiRaises, raises: [].}
|
||||
{.pragma: apiPragma, cdecl, gcsafe, apiRaises.}
|
||||
{.push hint[XDeclaredButNotUsed]: off.}
|
||||
|
@ -38,7 +39,8 @@ proc validateHex(x: Node, minLen = 0): NodeResult =
|
|||
if x.stringVal.len < 2:
|
||||
return err("hex is too short")
|
||||
if x.stringVal.len > 2 + minLen * 2 and minLen != 0:
|
||||
return err("expect hex with len '$1', got '$2'" % [$(2 * minLen + 2), $x.stringVal.len])
|
||||
return err("expect hex with len '" &
|
||||
$(2 * minLen + 2) & "', got '" & $x.stringVal.len & "'")
|
||||
if x.stringVal.len mod 2 != 0:
|
||||
return err("hex must have even number of nibbles")
|
||||
if x.stringVal[0] != '0' or x.stringVal[1] notin {'x', 'X'}:
|
||||
|
@ -48,33 +50,33 @@ proc validateHex(x: Node, minLen = 0): NodeResult =
|
|||
return err("invalid chars in hex")
|
||||
ok(x)
|
||||
|
||||
proc scalarBytes32(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
proc scalarBytes32(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect, raises:[].} =
|
||||
## Bytes32 is a 32 byte binary string,
|
||||
## represented as 0x-prefixed hexadecimal.
|
||||
if node.kind != nkString:
|
||||
return err("expect hex string, but got '$1'" % [$node.kind])
|
||||
return err("expect hex string, but got '" & $node.kind & "'")
|
||||
validateHex(node, 32)
|
||||
|
||||
proc scalarAddress(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
proc scalarAddress(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect, raises:[].} =
|
||||
## Address is a 20 byte Ethereum address,
|
||||
## represented as 0x-prefixed hexadecimal.
|
||||
if node.kind != nkString:
|
||||
return err("expect hex string, but got '$1'" % [$node.kind])
|
||||
return err("expect hex string, but got '" & $node.kind & "'")
|
||||
validateHex(node, 20)
|
||||
|
||||
proc scalarBytes(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
proc scalarBytes(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect, raises:[].} =
|
||||
## Bytes is an arbitrary length binary string,
|
||||
## represented as 0x-prefixed hexadecimal.
|
||||
## An empty byte string is represented as '0x'.
|
||||
## Byte strings must have an even number of hexadecimal nybbles.
|
||||
if node.kind != nkString:
|
||||
return err("expect hex string, but got '$1'" % [$node.kind])
|
||||
return err("expect hex string, but got '" & $node.kind & "'")
|
||||
validateHex(node)
|
||||
|
||||
proc validateInt(node: Node): NodeResult =
|
||||
for c in node.stringVal:
|
||||
if c notin Digits:
|
||||
return err("invalid char in int: '$1'" % [$c])
|
||||
return err("invalid char in int: '" & $c & "'")
|
||||
|
||||
node.stringVal = "0x" & convertBase(node.stringVal, 10, 16)
|
||||
if node.stringVal.len > 66:
|
||||
|
@ -102,7 +104,7 @@ proc scalarBigInt(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gc
|
|||
# convert it into hex nkString node
|
||||
validateInt(node)
|
||||
else:
|
||||
return err("expect hex/dec string or int, but got '$1'" % [$node.kind])
|
||||
return err("expect hex/dec string or int, but got '" & $node.kind & "'")
|
||||
|
||||
proc scalarLong(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
## Long is a 64 bit unsigned integer.
|
||||
|
@ -111,7 +113,7 @@ proc scalarLong(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsa
|
|||
# convert it into nkString node
|
||||
ok(node)
|
||||
else:
|
||||
err("expect int, but got '$1'" % [$node.kind])
|
||||
err("expect int, but got '" & $node.kind & "'")
|
||||
|
||||
proc findKey(node: Node, key: string): RespResult =
|
||||
for n in node:
|
||||
|
@ -644,12 +646,16 @@ proc initEthApi*(ctx: GraphqlRef) =
|
|||
let name = ctx.createName($n)
|
||||
ud.names[n] = name
|
||||
|
||||
let res = ctx.parseLiteralFromFile("playground" / "data" / "ConstantinopleFixTransition.json", {pfJsonCompatibility})
|
||||
if res.isErr:
|
||||
debugEcho res.error
|
||||
quit(QuitFailure)
|
||||
try:
|
||||
let res = ctx.parseLiteralFromFile("playground" / "data" / "ConstantinopleFixTransition.json", {pfJsonCompatibility})
|
||||
if res.isErr:
|
||||
debugEcho res.error
|
||||
quit(QuitFailure)
|
||||
|
||||
extractData(ud, res.get())
|
||||
extractData(ud, res.get())
|
||||
except IOError as exc:
|
||||
debugEcho exc.msg
|
||||
quit(QuitFailure)
|
||||
|
||||
ctx.addResolvers(ud, ud.names[ethAccount ], accountProcs)
|
||||
ctx.addResolvers(ud, ud.names[ethLog ], logProcs)
|
||||
|
@ -659,3 +665,5 @@ proc initEthApi*(ctx: GraphqlRef) =
|
|||
ctx.addResolvers(ud, ud.names[ethSyncState ], syncStateProcs)
|
||||
ctx.addResolvers(ud, ud.names[ethPending ], pendingProcs)
|
||||
ctx.addResolvers(ud, ud.names[ethQuery ], queryProcs)
|
||||
|
||||
{.pop.}
|
||||
|
|
|
@ -150,6 +150,9 @@ let starships = [
|
|||
)
|
||||
]
|
||||
|
||||
{.push gcsafe, raises: [] .}
|
||||
{.pragma: ccRaises, raises: [].}
|
||||
|
||||
proc charToResp(ctx: Starwars, c: Character): Node =
|
||||
var resp = respMap(ctx.names[c.kind])
|
||||
resp["id"] = resp(c.id)
|
||||
|
@ -496,16 +499,17 @@ const starshipProcs = {
|
|||
"coordinates": starshipCoordinates
|
||||
}
|
||||
|
||||
proc coerceEnum(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
proc coerceEnum(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect, raises:[].} =
|
||||
case node.kind
|
||||
of nkString:
|
||||
ok(Node(kind: nkEnum, name: ctx.createName(node.stringVal), pos: node.pos))
|
||||
of nkEnum:
|
||||
ok(node)
|
||||
else:
|
||||
err("cannot coerce '$1' to $2" % [$node.kind, $typeNode])
|
||||
err("cannot coerce '" & $node.kind & "' to " & $typeNode)
|
||||
|
||||
{.pop.}
|
||||
{.pop.}
|
||||
|
||||
proc initStarWarsApi*(ctx: GraphqlRef) =
|
||||
var ud = Starwars(characters: @characters, ships: @starships)
|
||||
|
|
|
@ -26,7 +26,6 @@ proc loadSchema(ctx: GraphqlRef, schema: config.Schema): GraphqlResult =
|
|||
ctx.parseSchemaFromFile("tests" / "schemas" / "star_wars_schema.ql", conf = conf)
|
||||
|
||||
proc main() =
|
||||
var message: string
|
||||
## Processing command line arguments
|
||||
let r = processArguments()
|
||||
|
||||
|
|
|
@ -192,7 +192,7 @@ proc testHooks() =
|
|||
|
||||
waitFor server.closeWait()
|
||||
|
||||
template mainModule() =
|
||||
template mainModule() {.used.} =
|
||||
proc main() =
|
||||
let conf = getConfiguration()
|
||||
if conf.testFile.len == 0:
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# those terms.
|
||||
|
||||
import
|
||||
std/[json, strutils],
|
||||
std/[json],
|
||||
json_serialization,
|
||||
../graphql
|
||||
|
||||
|
@ -35,7 +35,7 @@ proc decodeResponse*(input: string): ServerResponse =
|
|||
debugEcho e.formatMsg("")
|
||||
|
||||
{.push hint[XCannotRaiseY]: off.}
|
||||
{.pragma: apiPragma, cdecl, gcsafe, raises: [Defect, CatchableError].}
|
||||
{.pragma: apiPragma, cdecl, gcsafe, raises: [].}
|
||||
|
||||
proc queryNameImpl(ud: RootRef, params: Args, parent: Node): RespResult {.apiPragma.} =
|
||||
ok(resp("superman"))
|
||||
|
@ -206,11 +206,11 @@ const droidProtos = {
|
|||
"color": colorImpl
|
||||
}
|
||||
|
||||
proc coerceEnum(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
proc coerceEnum(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect, raises:[].} =
|
||||
if node.kind == nkString:
|
||||
ok(Node(kind: nkEnum, name: ctx.createName(node.stringVal), pos: node.pos))
|
||||
else:
|
||||
err("cannot coerce '$1' to $2" % [$node.kind, $typeNode.sym.name])
|
||||
err("cannot coerce '" & $node.kind & "' to " & $typeNode.sym.name)
|
||||
|
||||
proc objectIDImpl(ud: RootRef, params: Args, parent: Node): RespResult {.apiPragma.} =
|
||||
if parent.kind == nkNull:
|
||||
|
|
|
@ -34,11 +34,14 @@ type
|
|||
const
|
||||
caseFolder = "tests" / "validation"
|
||||
|
||||
proc scalarMyScalar(ctx: GraphqlRef, typeNode, node: Node): NodeResult {.cdecl, gcsafe, noSideEffect.} =
|
||||
proc scalarMyScalar(ctx: GraphqlRef,
|
||||
typeNode,
|
||||
node: Node): NodeResult
|
||||
{.cdecl, gcsafe, noSideEffect, raises:[].} =
|
||||
if node.kind == nkString:
|
||||
ok(node)
|
||||
else:
|
||||
err("expect string, but got '$1'" % [$node.kind])
|
||||
err("expect string, but got '" & $node.kind & "'")
|
||||
|
||||
proc setupContext(): GraphqlRef =
|
||||
var ctx {.inject.} = new(GraphqlRef)
|
||||
|
@ -118,7 +121,7 @@ proc runConverter(ctx: GraphqlRef, savePoint: NameCounter, path, output: string)
|
|||
|
||||
f.close()
|
||||
|
||||
proc convertCases(output: string) =
|
||||
proc convertCases(output: string) {.used.} =
|
||||
var ctx = setupContext()
|
||||
let savePoint = ctx.getNameCounter()
|
||||
for fileName in walkDirRec(caseFolder):
|
||||
|
@ -187,7 +190,7 @@ proc runValidator(ctx: GraphqlRef, fileName: string, testStatusIMPL: var TestSta
|
|||
debugEcho $ctx.errors
|
||||
return
|
||||
|
||||
proc validateSchemas() =
|
||||
proc validateSchemas() {.used.} =
|
||||
suite "validate schemas":
|
||||
var ctx = new(GraphqlRef)
|
||||
var savePoint = ctx.getNameCounter()
|
||||
|
|
Loading…
Reference in New Issue