add store flag to parseSchema and parseQuery family
with this additional flag, both query and schema can be marked and purge also become selective when purging queries
This commit is contained in:
parent
7b4bc109d7
commit
ae811ec2ea
12
docs/api.md
12
docs/api.md
|
@ -35,13 +35,13 @@ If you find anything not listed or not exported in this list, please submit an i
|
|||
- `createName(ctx: GraphqlRef, name: string): Name`. `respMap` will need a name from the system using this proc.
|
||||
- `executeRequest(ctx: GraphqlRef, resp: RespStream, opName = ""): ParseResult`. This is your main entrance to the execution engine.
|
||||
- `validate(ctx: GraphqlRef, root: Node)`. Usually you don't need to call this directly.
|
||||
- `parseSchema(ctx: GraphqlRef, schema: string | openArray[byte]): ParseResult`. Parse a scheme from text string.
|
||||
- `parseSchemaFromFile(ctx: GraphqlRef, fileName: string): ParseResult`.
|
||||
- `parseSchemas[T: string | seq[byte]](ctx: GraphqlRef, files: openArray[string], schemas: openArray[T], conf = defaultParserConf()): GraphqlResult`.
|
||||
- `parseSchema(ctx: GraphqlRef, schema: string | openArray[byte], store = false, conf = defaultParserConf()): ParseResult`. Parse a scheme from text string.
|
||||
- `parseSchemaFromFile(ctx: GraphqlRef, fileName: string, store = false, conf = defaultParserConf()): ParseResult`.
|
||||
- `parseSchemas[T: string | seq[byte]](ctx: GraphqlRef, files: openArray[string], schemas: openArray[T], store = false, conf = defaultParserConf()): GraphqlResult`.
|
||||
Parses multiple files and multiple string/seq[byte] schema definitions at once.
|
||||
- `parseQuery(ctx: GraphqlRef, query: string | openArray[byte]): ParseResult `. Parse queries from text string.
|
||||
- `parseQueryFromFile(ctx: GraphqlRef, fileName: string): ParseResult`.
|
||||
- `purgeQueries(ctx: GraphqlRef, includeVariables: bool)`. A server will often call this to remove unused queries.
|
||||
- `parseQuery(ctx: GraphqlRef, query: string | openArray[byte], store = false): ParseResult `. Parse queries from text string.
|
||||
- `parseQueryFromFile(ctx: GraphqlRef, fileName: string, store = false): ParseResult`.
|
||||
- `purgeQueries(ctx: GraphqlRef, includeVariables: bool, includeStored = true)`. A server will often call this to remove unused queries.
|
||||
- `purgeSchema(ctx: GraphqlRef, includeScalars, includeResolvers: bool)`. Probably not need to call this often.
|
||||
- `getNameCounter(ctx: GraphqlRef): NameCounter`. Use this proc to create a savepoint for `purgeNames`.
|
||||
- `purgeNames(ctx: GraphqlRef, savePoint: NameCounter)`. You need to call this after you call `purgeQueries` or `purgeSchema`.
|
||||
|
|
|
@ -156,41 +156,50 @@ proc addResolvers*(ctx: GraphqlRef, ud: RootRef, typeName: string,
|
|||
proc createName*(ctx: GraphqlRef, name: string): Name =
|
||||
ctx.names.insert(name)
|
||||
|
||||
proc markAsStored(root: Node) =
|
||||
for n in root:
|
||||
if n.kind != nkSym:
|
||||
continue
|
||||
|
||||
n.sym.flags.incl sfBuiltin
|
||||
|
||||
template validation(ctx: GraphqlRef, parser: Parser,
|
||||
stream: InputStream, doc: untyped): untyped =
|
||||
stream: InputStream, doc, store: untyped): untyped =
|
||||
parser.parseDocument(doc)
|
||||
close stream
|
||||
if parser.error != errNone:
|
||||
return err(@[parser.err])
|
||||
ctx.validate(doc.root)
|
||||
if store:
|
||||
markAsStored(doc.root)
|
||||
if ctx.errKind != ErrNone:
|
||||
return err(ctx.errors)
|
||||
ok()
|
||||
|
||||
template parseSchemaImpl(schema, conf: untyped): untyped =
|
||||
template parseSchemaImpl(schema, store, conf: untyped): untyped =
|
||||
var stream = unsafeMemoryInput(schema)
|
||||
var parser = Parser.init(stream, ctx.names, conf)
|
||||
var doc: SchemaDocument
|
||||
ctx.validation(parser, stream, doc)
|
||||
ctx.validation(parser, stream, doc, store)
|
||||
|
||||
proc parseSchema*(ctx: GraphqlRef, schema: string,
|
||||
proc parseSchema*(ctx: GraphqlRef, schema: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
parseSchemaImpl(schema, conf)
|
||||
parseSchemaImpl(schema, store, conf)
|
||||
|
||||
proc parseSchema*(ctx: GraphqlRef, schema: openArray[byte],
|
||||
proc parseSchema*(ctx: GraphqlRef, schema: openArray[byte], store = false,
|
||||
conf = defaultParserConf()): GraphqlResult =
|
||||
{.gcsafe.}:
|
||||
parseSchemaImpl(schema, conf)
|
||||
parseSchemaImpl(schema, store, conf)
|
||||
|
||||
proc parseSchemaFromFile*(ctx: GraphqlRef, fileName: string,
|
||||
proc parseSchemaFromFile*(ctx: GraphqlRef, fileName: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
try:
|
||||
var stream = memFileInput(fileName)
|
||||
var parser = Parser.init(stream, ctx.names, conf)
|
||||
var doc: SchemaDocument
|
||||
ctx.validation(parser, stream, doc)
|
||||
ctx.validation(parser, stream, doc, store)
|
||||
except CatchableError as e:
|
||||
err(@[fatalError("parseSchemaFromFile: " & e.msg)])
|
||||
|
||||
|
@ -206,8 +215,10 @@ proc parseSchema(ctx: GraphqlRef, stream: InputStream,
|
|||
else: root.sons.add doc.root.sons
|
||||
ok()
|
||||
|
||||
proc parseSchemas*[T: string | seq[byte]](ctx: GraphqlRef, files: openArray[string],
|
||||
schemas: openArray[T], conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
proc parseSchemas*[T: string | seq[byte]](ctx: GraphqlRef,
|
||||
files: openArray[string],
|
||||
schemas: openArray[T], store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
var root: Node
|
||||
try:
|
||||
|
@ -226,48 +237,59 @@ proc parseSchemas*[T: string | seq[byte]](ctx: GraphqlRef, files: openArray[stri
|
|||
return res
|
||||
|
||||
ctx.validate(root)
|
||||
if store:
|
||||
markAsStored(root)
|
||||
if ctx.errKind != ErrNone:
|
||||
return err(ctx.errors)
|
||||
ok()
|
||||
|
||||
template parseQueryImpl(schema, conf: untyped): untyped =
|
||||
template parseQueryImpl(schema, store, conf: untyped): untyped =
|
||||
var stream = unsafeMemoryInput(query)
|
||||
var parser = Parser.init(stream, ctx.names, conf)
|
||||
var doc: QueryDocument
|
||||
ctx.validation(parser, stream, doc)
|
||||
ctx.validation(parser, stream, doc, store)
|
||||
|
||||
proc parseQuery*(ctx: GraphqlRef, query: string,
|
||||
proc parseQuery*(ctx: GraphqlRef, query: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
parseQueryImpl(query, conf)
|
||||
parseQueryImpl(query, store, conf)
|
||||
|
||||
proc parseQuery*(ctx: GraphqlRef, query: openArray[byte],
|
||||
proc parseQuery*(ctx: GraphqlRef, query: openArray[byte], store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
parseQueryImpl(query, conf)
|
||||
parseQueryImpl(query, store, conf)
|
||||
|
||||
proc parseQueryFromFile*(ctx: GraphqlRef, fileName: string,
|
||||
proc parseQueryFromFile*(ctx: GraphqlRef, fileName: string, store = false,
|
||||
conf = defaultParserConf()): GraphqlResult {.gcsafe.} =
|
||||
{.gcsafe.}:
|
||||
try:
|
||||
var stream = memFileInput(fileName)
|
||||
var parser = Parser.init(stream, ctx.names, conf)
|
||||
var doc: QueryDocument
|
||||
ctx.validation(parser, stream, doc)
|
||||
ctx.validation(parser, stream, doc, store)
|
||||
except CatchableError as e:
|
||||
err(@[fatalError("parseQueryFromFile: " & e.msg)])
|
||||
|
||||
proc purgeQueries*(ctx: GraphqlRef, includeVariables: bool = true) =
|
||||
ctx.opTable.clear()
|
||||
proc purgeQueries*(ctx: GraphqlRef, includeVariables = true, includeStored = false) =
|
||||
if includeStored:
|
||||
ctx.opTable.clear()
|
||||
else:
|
||||
var names = newSeqOfCap[Name](ctx.opTable.len)
|
||||
for n, v in ctx.opTable:
|
||||
if sfBuiltin notin v.flags:
|
||||
names.add n
|
||||
for n in names:
|
||||
ctx.opTable.del(n)
|
||||
|
||||
if includeVariables:
|
||||
ctx.varTable.clear()
|
||||
|
||||
proc purgeSchema*(ctx: GraphqlRef, includeScalars = true,
|
||||
includeResolvers = true, includeCoercion = true) =
|
||||
var names = initHashSet[Name]()
|
||||
var names = newSeqOfCap[Name](ctx.typeTable.len)
|
||||
for n, v in ctx.typeTable:
|
||||
if sfBuiltin notin v.flags:
|
||||
names.incl n
|
||||
names.add n
|
||||
|
||||
ctx.rootQuery = nil
|
||||
ctx.rootMutation = nil
|
||||
|
|
|
@ -25,10 +25,10 @@ proc loadSchema(ctx: GraphqlRef, schema: Schema): GraphqlResult =
|
|||
|
||||
if schema == ethereum:
|
||||
ctx.initEthApi()
|
||||
ctx.parseSchemaFromFile("tests" / "schemas" / "ethereum_1.0.ql", conf)
|
||||
ctx.parseSchemaFromFile("tests" / "schemas" / "ethereum_1.0.ql", conf = conf)
|
||||
else:
|
||||
ctx.initStarWarsApi()
|
||||
ctx.parseSchemaFromFile("tests" / "schemas" / "star_wars_schema.ql", conf)
|
||||
ctx.parseSchemaFromFile("tests" / "schemas" / "star_wars_schema.ql", conf = conf)
|
||||
|
||||
const
|
||||
address = initTAddress("127.0.0.1:8547")
|
||||
|
|
|
@ -43,7 +43,7 @@ proc suite1() =
|
|||
if r2.isErr:
|
||||
debugEcho r2.error
|
||||
return
|
||||
|
||||
|
||||
test "parseVariable":
|
||||
var res = ctx.parseVar("boolVar", "true")
|
||||
check res.isOk
|
||||
|
@ -60,15 +60,15 @@ proc suite1() =
|
|||
check res.isOk
|
||||
let resp1 = resp.getString()
|
||||
check resp1 == """{"name":"hello"}"""
|
||||
|
||||
|
||||
|
||||
|
||||
ctx.addVar("mm", "sweet banana")
|
||||
resp = JsonRespStream.new()
|
||||
res = ctx.executeRequest(respStream(resp), "banana")
|
||||
check res.isOk
|
||||
let resp2 = resp.getString()
|
||||
check resp2 == """{"name":"sweet banana"}"""
|
||||
|
||||
|
||||
test "parseSchemas":
|
||||
const
|
||||
schemaFile = "tests" / "schemas" / "example_schema.ql"
|
||||
|
@ -78,5 +78,10 @@ proc suite1() =
|
|||
let res = ctx.parseSchemas([schemaFile], [extend])
|
||||
check res.isErr
|
||||
check $res.error == "@[[2, 3]: Error: duplicate name 'name']"
|
||||
|
||||
|
||||
test "parse stored query":
|
||||
let res = ctx.parseQuery(query, store = true)
|
||||
check res.isOk
|
||||
ctx.purgeQueries(includeStored = true)
|
||||
|
||||
suite1()
|
||||
|
|
Loading…
Reference in New Issue