mirror of https://github.com/status-im/NimYAML.git
parser tests working, other tests compiling
This commit is contained in:
parent
e2f8e6419e
commit
48d601d959
|
@ -80,16 +80,16 @@ suite "DOM":
|
||||||
input = initYamlDoc(newYamlNode([a, b, newYamlNode("c"), a, b]))
|
input = initYamlDoc(newYamlNode([a, b, newYamlNode("c"), a, b]))
|
||||||
var result = serialize(input, initExtendedTagLibrary())
|
var result = serialize(input, initExtendedTagLibrary())
|
||||||
ensure(result, startDocEvent(), startSeqEvent(),
|
ensure(result, startDocEvent(), startSeqEvent(),
|
||||||
scalarEvent("a", anchor=0.AnchorId),
|
scalarEvent("a", anchor="a".Anchor),
|
||||||
scalarEvent("b", anchor=1.AnchorId), scalarEvent("c"),
|
scalarEvent("b", anchor="b".Anchor), scalarEvent("c"),
|
||||||
aliasEvent(0.AnchorId), aliasEvent(1.AnchorId), endSeqEvent(),
|
aliasEvent("a".Anchor), aliasEvent("b".Anchor), endSeqEvent(),
|
||||||
endDocEvent())
|
endDocEvent())
|
||||||
test "Serializing with all anchors":
|
test "Serializing with all anchors":
|
||||||
let
|
let
|
||||||
a = newYamlNode("a")
|
a = newYamlNode("a")
|
||||||
input = initYamlDoc(newYamlNode([a, newYamlNode("b"), a]))
|
input = initYamlDoc(newYamlNode([a, newYamlNode("b"), a]))
|
||||||
var result = serialize(input, initExtendedTagLibrary(), asAlways)
|
var result = serialize(input, initExtendedTagLibrary(), asAlways)
|
||||||
ensure(result, startDocEvent(), startSeqEvent(anchor=0.AnchorId),
|
ensure(result, startDocEvent(), startSeqEvent(anchor="a".Anchor),
|
||||||
scalarEvent("a", anchor=1.AnchorId),
|
scalarEvent("a", anchor = "b".Anchor),
|
||||||
scalarEvent("b", anchor=2.AnchorId), aliasEvent(1.AnchorId),
|
scalarEvent("b", anchor="c".Anchor), aliasEvent("b".Anchor),
|
||||||
endSeqEvent(), endDocEvent())
|
endSeqEvent(), endDocEvent())
|
|
@ -8,13 +8,10 @@ import "../yaml"
|
||||||
|
|
||||||
import unittest, json
|
import unittest, json
|
||||||
|
|
||||||
proc wc(line, column: int, lineContent: string, message: string) =
|
|
||||||
echo "Warning (", line, ",", column, "): ", message, "\n", lineContent
|
|
||||||
|
|
||||||
proc ensureEqual(yamlIn, jsonIn: string) =
|
proc ensureEqual(yamlIn, jsonIn: string) =
|
||||||
try:
|
try:
|
||||||
var
|
var
|
||||||
parser = newYamlParser(initCoreTagLibrary(), wc)
|
parser = initYamlParser(initCoreTagLibrary(), true)
|
||||||
s = parser.parse(yamlIn)
|
s = parser.parse(yamlIn)
|
||||||
yamlResult = constructJson(s)
|
yamlResult = constructJson(s)
|
||||||
jsonResult = parseJson(jsonIn)
|
jsonResult = parseJson(jsonIn)
|
||||||
|
@ -24,7 +21,7 @@ proc ensureEqual(yamlIn, jsonIn: string) =
|
||||||
except YamlStreamError:
|
except YamlStreamError:
|
||||||
let e = (ref YamlParserError)(getCurrentException().parent)
|
let e = (ref YamlParserError)(getCurrentException().parent)
|
||||||
echo "error occurred: " & e.msg
|
echo "error occurred: " & e.msg
|
||||||
echo "line: ", e.line, ", column: ", e.column
|
echo "line: ", e.mark.line, ", column: ", e.mark.column
|
||||||
echo e.lineContent
|
echo e.lineContent
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
|
|
|
@ -161,7 +161,7 @@ suite "Lexer":
|
||||||
|
|
||||||
test "Block Scalars":
|
test "Block Scalars":
|
||||||
assertEquals("one : >2-\l foo\l bar\ltwo: |+\l bar\l baz", i(0),
|
assertEquals("one : >2-\l foo\l bar\ltwo: |+\l bar\l baz", i(0),
|
||||||
pl("one"), mv(), fs(" foo bar"), i(0), pl("two"), mv(),
|
pl("one"), mv(), fs(" foo\nbar"), i(0), pl("two"), mv(),
|
||||||
ls("bar\l baz"), e())
|
ls("bar\l baz"), e())
|
||||||
|
|
||||||
test "Flow indicators":
|
test "Flow indicators":
|
||||||
|
|
|
@ -75,14 +75,6 @@ macro genTests(): untyped =
|
||||||
let errorTests = toHashSet(staticExec("cd " & (absolutePath / "tags" / "error") &
|
let errorTests = toHashSet(staticExec("cd " & (absolutePath / "tags" / "error") &
|
||||||
" && ls -1d *").splitLines())
|
" && ls -1d *").splitLines())
|
||||||
var ignored = toHashSet([".git", "name", "tags", "meta"])
|
var ignored = toHashSet([".git", "name", "tags", "meta"])
|
||||||
#-----------------------------------------------------------------------------
|
|
||||||
# THE FOLLOWING TESTS WOULD FAIL FOR THE DOCUMENTED REASONS
|
|
||||||
ignored.incl("W5VH")
|
|
||||||
# YAML allows the colon as part of an anchor or alias name.
|
|
||||||
# For aliases, this leads to confusion becaues `*a:` looks like an implicit
|
|
||||||
# mapping key (but is not).
|
|
||||||
# Therefore, NimYAML disallows colons in anchor names.
|
|
||||||
#-----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
result = newStmtList()
|
result = newStmtList()
|
||||||
# walkDir for some crude reason does not work with travis build
|
# walkDir for some crude reason does not work with travis build
|
||||||
|
|
|
@ -119,8 +119,8 @@ template expectConstructionError(li, co: int, message: string, body: typed) =
|
||||||
fail()
|
fail()
|
||||||
except YamlConstructionError:
|
except YamlConstructionError:
|
||||||
let e = (ref YamlConstructionError)(getCurrentException())
|
let e = (ref YamlConstructionError)(getCurrentException())
|
||||||
doAssert li == e.line, "Expected error line " & $li & ", was " & $e.line
|
doAssert li == e.mark.line, "Expected error line " & $li & ", was " & $e.mark.line
|
||||||
doAssert co == e.column, "Expected error column " & $co & ", was " & $e.column
|
doAssert co == e.mark.column, "Expected error column " & $co & ", was " & $e.mark.column
|
||||||
doAssert message == e.msg, "Expected error message \n" & escape(message) &
|
doAssert message == e.msg, "Expected error message \n" & escape(message) &
|
||||||
", got \n" & escape(e.msg)
|
", got \n" & escape(e.msg)
|
||||||
|
|
||||||
|
@ -307,8 +307,8 @@ suite "Serialization":
|
||||||
|
|
||||||
test "Dump OrderedTable[tuple[int32, int32], string]":
|
test "Dump OrderedTable[tuple[int32, int32], string]":
|
||||||
var input = initOrderedTable[tuple[a, b: int32], string]()
|
var input = initOrderedTable[tuple[a, b: int32], string]()
|
||||||
input.add((a: 23'i32, b: 42'i32), "dreiundzwanzigzweiundvierzig")
|
input[(a: 23'i32, b: 42'i32)] = "dreiundzwanzigzweiundvierzig"
|
||||||
input.add((a: 13'i32, b: 47'i32), "dreizehnsiebenundvierzig")
|
input[(a: 13'i32, b: 47'i32)] = "dreizehnsiebenundvierzig"
|
||||||
var output = dump(input, tsRootOnly, asTidy, blockOnly)
|
var output = dump(input, tsRootOnly, asTidy, blockOnly)
|
||||||
assertStringEqual(yamlDirs &
|
assertStringEqual(yamlDirs &
|
||||||
"""!n!tables:OrderedTable(tag:nimyaml.org;2016:tuple(tag:nimyaml.org;2016:system:int32;tag:nimyaml.org;2016:system:int32);tag:yaml.org;2002:str)
|
"""!n!tables:OrderedTable(tag:nimyaml.org;2016:tuple(tag:nimyaml.org;2016:system:int32;tag:nimyaml.org;2016:system:int32);tag:yaml.org;2002:str)
|
||||||
|
@ -610,7 +610,7 @@ next:
|
||||||
try: load(input, result)
|
try: load(input, result)
|
||||||
except YamlConstructionError:
|
except YamlConstructionError:
|
||||||
let ex = (ref YamlConstructionError)(getCurrentException())
|
let ex = (ref YamlConstructionError)(getCurrentException())
|
||||||
echo "line ", ex.line, ", column ", ex.column, ": ", ex.msg
|
echo "line ", ex.mark.line, ", column ", ex.mark.column, ": ", ex.msg
|
||||||
echo ex.lineContent
|
echo ex.lineContent
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ type
|
||||||
ssAny, ssPlain, ssSingleQuoted, ssDoubleQuoted, ssLiteral, ssFolded
|
ssAny, ssPlain, ssSingleQuoted, ssDoubleQuoted, ssLiteral, ssFolded
|
||||||
|
|
||||||
CollectionStyle* = enum
|
CollectionStyle* = enum
|
||||||
csBlock, csFlow
|
csAny, csBlock, csFlow, csPair
|
||||||
|
|
||||||
EventKind* = enum
|
EventKind* = enum
|
||||||
## Kinds of YAML events that may occur in an ``YamlStream``. Event kinds
|
## Kinds of YAML events that may occur in an ``YamlStream``. Event kinds
|
||||||
|
@ -156,6 +156,12 @@ proc collectionStyle*(event: Event): CollectionStyle =
|
||||||
of yamlStartSeq: result = event.seqStyle
|
of yamlStartSeq: result = event.seqStyle
|
||||||
else: raise (ref FieldDefect)(msg: "Event " & $event.kind & " has no collectionStyle")
|
else: raise (ref FieldDefect)(msg: "Event " & $event.kind & " has no collectionStyle")
|
||||||
|
|
||||||
|
proc startStreamEvent*(): Event =
|
||||||
|
return Event(startPos: defaultMark, endPos: defaultMark, kind: yamlStartStream)
|
||||||
|
|
||||||
|
proc endStreamEvent*(): Event =
|
||||||
|
return Event(startPos: defaultMark, endPos: defaultMark, kind: yamlEndStream)
|
||||||
|
|
||||||
proc startDocEvent*(explicit: bool = false, version: string = "", startPos, endPos: Mark = defaultMark): Event
|
proc startDocEvent*(explicit: bool = false, version: string = "", startPos, endPos: Mark = defaultMark): Event
|
||||||
{.inline, raises: [].} =
|
{.inline, raises: [].} =
|
||||||
## creates a new event that marks the start of a YAML document
|
## creates a new event that marks the start of a YAML document
|
||||||
|
@ -176,10 +182,10 @@ proc startMapEvent*(style: CollectionStyle, props: Properties,
|
||||||
kind: yamlStartMap, mapProperties: props,
|
kind: yamlStartMap, mapProperties: props,
|
||||||
mapStyle: style)
|
mapStyle: style)
|
||||||
|
|
||||||
proc startMapEvent*(style: CollectionStyle,
|
proc startMapEvent*(style: CollectionStyle = csAny,
|
||||||
tag: TagId = yTagQuestionMark,
|
tag: TagId = yTagQuestionMark,
|
||||||
anchor: Anchor = yAnchorNone,
|
anchor: Anchor = yAnchorNone,
|
||||||
startPos, endPos: Mark): Event {.inline.} =
|
startPos, endPos: Mark = defaultMark): Event {.inline.} =
|
||||||
return startMapEvent(style, (anchor, tag), startPos, endPos)
|
return startMapEvent(style, (anchor, tag), startPos, endPos)
|
||||||
|
|
||||||
proc endMapEvent*(startPos, endPos: Mark = defaultMark): Event {.inline, raises: [].} =
|
proc endMapEvent*(startPos, endPos: Mark = defaultMark): Event {.inline, raises: [].} =
|
||||||
|
@ -194,7 +200,7 @@ proc startSeqEvent*(style: CollectionStyle,
|
||||||
kind: yamlStartSeq, seqProperties: props,
|
kind: yamlStartSeq, seqProperties: props,
|
||||||
seqStyle: style)
|
seqStyle: style)
|
||||||
|
|
||||||
proc startSeqEvent*(style: CollectionStyle,
|
proc startSeqEvent*(style: CollectionStyle = csAny,
|
||||||
tag: TagId = yTagQuestionMark,
|
tag: TagId = yTagQuestionMark,
|
||||||
anchor: Anchor = yAnchorNone,
|
anchor: Anchor = yAnchorNone,
|
||||||
startPos, endPos: Mark = defaultMark): Event {.inline.} =
|
startPos, endPos: Mark = defaultMark): Event {.inline.} =
|
||||||
|
@ -222,12 +228,12 @@ proc aliasEvent*(target: Anchor, startPos, endPos: Mark = defaultMark): Event {.
|
||||||
## creates a new event that represents a YAML alias
|
## creates a new event that represents a YAML alias
|
||||||
result = Event(startPos: startPos, endPos: endPos, kind: yamlAlias, aliasTarget: target)
|
result = Event(startPos: startPos, endPos: endPos, kind: yamlAlias, aliasTarget: target)
|
||||||
|
|
||||||
proc `==`*(left, right: Anchor): bool {.borrow.}
|
proc `==`*(left, right: Anchor): bool {.borrow, locks: 0.}
|
||||||
proc `$`*(id: Anchor): string {.borrow.}
|
proc `$`*(id: Anchor): string {.borrow, locks: 0.}
|
||||||
proc hash*(id: Anchor): Hash {.borrow.}
|
proc hash*(id: Anchor): Hash {.borrow, locks: 0.}
|
||||||
|
|
||||||
proc `==`*(left, right: TagId): bool {.borrow.}
|
proc `==`*(left, right: TagId): bool {.borrow, locks: 0.}
|
||||||
proc hash*(id: TagId): Hash {.borrow.}
|
proc hash*(id: TagId): Hash {.borrow, locks: 0.}
|
||||||
|
|
||||||
proc `$`*(id: TagId): string {.raises: [].} =
|
proc `$`*(id: TagId): string {.raises: [].} =
|
||||||
case id
|
case id
|
||||||
|
|
|
@ -195,7 +195,7 @@ proc compose*(s: var YamlStream, tagLib: TagLibrary): YamlDocument
|
||||||
yAssert n.kind == yamlEndDoc
|
yAssert n.kind == yamlEndDoc
|
||||||
|
|
||||||
proc loadDom*(s: Stream | string): YamlDocument
|
proc loadDom*(s: Stream | string): YamlDocument
|
||||||
{.raises: [IOError, YamlParserError, YamlConstructionError].} =
|
{.raises: [IOError, OSError, YamlParserError, YamlConstructionError].} =
|
||||||
var
|
var
|
||||||
tagLib = initExtendedTagLibrary()
|
tagLib = initExtendedTagLibrary()
|
||||||
parser: YamlParser
|
parser: YamlParser
|
||||||
|
|
237
yaml/parser.nim
237
yaml/parser.nim
|
@ -27,7 +27,7 @@ type
|
||||||
tagLib: TagLibrary
|
tagLib: TagLibrary
|
||||||
issueWarnings: bool
|
issueWarnings: bool
|
||||||
|
|
||||||
State = proc(c: Context, e: var Event): bool {.locks: 0, gcSafe.}
|
State = proc(c: Context, e: var Event): bool {.gcSafe.}
|
||||||
|
|
||||||
Level = object
|
Level = object
|
||||||
state: State
|
state: State
|
||||||
|
@ -38,6 +38,9 @@ type
|
||||||
issueWarnings: bool
|
issueWarnings: bool
|
||||||
lex: Lexer
|
lex: Lexer
|
||||||
levels: seq[Level]
|
levels: seq[Level]
|
||||||
|
keyCache: seq[Event]
|
||||||
|
keyCachePos: int
|
||||||
|
caching: bool
|
||||||
|
|
||||||
headerProps, inlineProps: Properties
|
headerProps, inlineProps: Properties
|
||||||
headerStart, inlineStart: Mark
|
headerStart, inlineStart: Mark
|
||||||
|
@ -87,7 +90,7 @@ const defaultProperties = (yAnchorNone, yTagQuestionMark)
|
||||||
|
|
||||||
# parser states
|
# parser states
|
||||||
|
|
||||||
{.push gcSafe, locks: 0.}
|
{.push gcSafe, .}
|
||||||
proc atStreamStart(c: Context, e: var Event): bool
|
proc atStreamStart(c: Context, e: var Event): bool
|
||||||
proc atStreamEnd(c: Context, e : var Event): bool
|
proc atStreamEnd(c: Context, e : var Event): bool
|
||||||
proc beforeDoc(c: Context, e: var Event): bool
|
proc beforeDoc(c: Context, e: var Event): bool
|
||||||
|
@ -97,14 +100,14 @@ proc beforeImplicitRoot(c: Context, e: var Event): bool
|
||||||
proc atBlockIndentation(c: Context, e: var Event): bool
|
proc atBlockIndentation(c: Context, e: var Event): bool
|
||||||
proc beforeBlockIndentation(c: Context, e: var Event): bool
|
proc beforeBlockIndentation(c: Context, e: var Event): bool
|
||||||
proc beforeNodeProperties(c: Context, e: var Event): bool
|
proc beforeNodeProperties(c: Context, e: var Event): bool
|
||||||
proc requireImplicitMapStart(c: Context, e: var Event): bool
|
|
||||||
proc afterCompactParent(c: Context, e: var Event): bool
|
proc afterCompactParent(c: Context, e: var Event): bool
|
||||||
proc afterCompactParentProps(c: Context, e: var Event): bool
|
proc afterCompactParentProps(c: Context, e: var Event): bool
|
||||||
proc requireInlineBlockItem(c: Context, e: var Event): bool
|
proc mergePropsOnNewline(c: Context, e: var Event): bool
|
||||||
proc beforeFlowItemProps(c: Context, e: var Event): bool
|
proc beforeFlowItemProps(c: Context, e: var Event): bool
|
||||||
proc inBlockSeq(c: Context, e: var Event): bool
|
proc inBlockSeq(c: Context, e: var Event): bool
|
||||||
proc beforeBlockMapValue(c: Context, e: var Event): bool
|
proc beforeBlockMapValue(c: Context, e: var Event): bool
|
||||||
proc atBlockIndentationProps(c: Context, e: var Event): bool
|
proc atBlockIndentationProps(c: Context, e: var Event): bool
|
||||||
|
proc beforeFlowItem(c: Context, e: var Event): bool
|
||||||
proc afterFlowSeqSep(c: Context, e: var Event): bool
|
proc afterFlowSeqSep(c: Context, e: var Event): bool
|
||||||
proc afterFlowMapSep(c: Context, e: var Event): bool
|
proc afterFlowMapSep(c: Context, e: var Event): bool
|
||||||
proc atBlockMapKeyProps(c: Context, e: var Event): bool
|
proc atBlockMapKeyProps(c: Context, e: var Event): bool
|
||||||
|
@ -118,6 +121,7 @@ proc afterFlowMapValue(c: Context, e: var Event): bool
|
||||||
proc afterFlowSeqSepProps(c: Context, e: var Event): bool
|
proc afterFlowSeqSepProps(c: Context, e: var Event): bool
|
||||||
proc afterFlowSeqItem(c: Context, e: var Event): bool
|
proc afterFlowSeqItem(c: Context, e: var Event): bool
|
||||||
proc afterPairValue(c: Context, e: var Event): bool
|
proc afterPairValue(c: Context, e: var Event): bool
|
||||||
|
proc emitCollectionKey(c: Context, e: var Event): bool
|
||||||
{.pop.}
|
{.pop.}
|
||||||
|
|
||||||
template debug(message: string) {.dirty.} =
|
template debug(message: string) {.dirty.} =
|
||||||
|
@ -162,15 +166,23 @@ proc init[T](c: Context, p: YamlParser, source: T) {.inline.} =
|
||||||
c.tagLib = p.tagLib
|
c.tagLib = p.tagLib
|
||||||
c.issueWarnings = p.issueWarnings
|
c.issueWarnings = p.issueWarnings
|
||||||
c.lex.init(source)
|
c.lex.init(source)
|
||||||
|
c.keyCachePos = 0
|
||||||
|
c.caching = false
|
||||||
|
|
||||||
# interface
|
# interface
|
||||||
|
|
||||||
proc init*(p: var YamlParser, tagLib: TagLibrary = initExtendedTagLibrary(),
|
proc init*(p: var YamlParser, tagLib: TagLibrary = initExtendedTagLibrary(),
|
||||||
issueWarnings: bool = false) =
|
issueWarnings: bool = false) =
|
||||||
## Creates a YAML parser.
|
## Initializes a YAML parser.
|
||||||
p.tagLib = tagLib
|
p.tagLib = tagLib
|
||||||
p.issueWarnings = issueWarnings
|
p.issueWarnings = issueWarnings
|
||||||
|
|
||||||
|
proc initYamlParser*(tagLib: TagLibrary = initExtendedTagLibrary(),
|
||||||
|
issueWarnings: bool = false): YamlParser =
|
||||||
|
## Creates an initializes YAML parser and returns it
|
||||||
|
result.tagLib = tagLib
|
||||||
|
result.issueWarnings = issueWarnings
|
||||||
|
|
||||||
proc parse*(p: YamlParser, s: Stream): YamlStream =
|
proc parse*(p: YamlParser, s: Stream): YamlStream =
|
||||||
let c = new(Context)
|
let c = new(Context)
|
||||||
c.init(p, s)
|
c.init(p, s)
|
||||||
|
@ -188,7 +200,7 @@ proc isEmpty(props: Properties): bool =
|
||||||
props.tag == yTagQuestionMark
|
props.tag == yTagQuestionMark
|
||||||
|
|
||||||
proc generateError(c: Context, message: string):
|
proc generateError(c: Context, message: string):
|
||||||
ref YamlParserError {.raises: [].} =
|
ref YamlParserError {.raises: [], .} =
|
||||||
result = (ref YamlParserError)(
|
result = (ref YamlParserError)(
|
||||||
msg: message, parent: nil, mark: c.lex.curStartPos,
|
msg: message, parent: nil, mark: c.lex.curStartPos,
|
||||||
lineContent: c.lex.currentLine())
|
lineContent: c.lex.currentLine())
|
||||||
|
@ -216,6 +228,18 @@ proc toStyle(t: Token): ScalarStyle =
|
||||||
of Folded: ssFolded
|
of Folded: ssFolded
|
||||||
else: ssAny)
|
else: ssAny)
|
||||||
|
|
||||||
|
proc mergeProps(c: Context, src, target: var Properties) =
|
||||||
|
if src.tag != yTagQuestionMark:
|
||||||
|
if target.tag != yTagQuestionMark:
|
||||||
|
raise c.generateError("Only one tag allowed per node")
|
||||||
|
target.tag = src.tag
|
||||||
|
src.tag = yTagQuestionMark
|
||||||
|
if src.anchor != yAnchorNone:
|
||||||
|
if target.anchor != yAnchorNone:
|
||||||
|
raise c.generateError("Only one anchor allowed per node")
|
||||||
|
target.anchor = src.anchor
|
||||||
|
src.anchor = yAnchorNone
|
||||||
|
|
||||||
proc autoScalarTag(props: Properties, t: Token): Properties =
|
proc autoScalarTag(props: Properties, t: Token): Properties =
|
||||||
result = props
|
result = props
|
||||||
if t in {Token.SingleQuoted, Token.DoubleQuoted} and
|
if t in {Token.SingleQuoted, Token.DoubleQuoted} and
|
||||||
|
@ -278,7 +302,7 @@ proc beforeDoc(c: Context, e: var Event): bool =
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
if c.lex.cur != Token.Suffix:
|
if c.lex.cur != Token.Suffix:
|
||||||
raise c.generateError("Invalid token (expected tag URI): " & $c.lex.cur)
|
raise c.generateError("Invalid token (expected tag URI): " & $c.lex.cur)
|
||||||
discard c.tagLib.registerHandle(c.lex.fullLexeme(), tagHandle)
|
discard c.tagLib.registerHandle(c.lex.evaluated, tagHandle)
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
of UnknownDirective:
|
of UnknownDirective:
|
||||||
seenDirectives = true
|
seenDirectives = true
|
||||||
|
@ -291,7 +315,7 @@ proc beforeDoc(c: Context, e: var Event): bool =
|
||||||
|
|
||||||
proc afterDirectivesEnd(c: Context, e: var Event): bool =
|
proc afterDirectivesEnd(c: Context, e: var Event): bool =
|
||||||
case c.lex.cur
|
case c.lex.cur
|
||||||
of TagHandle, VerbatimTag, Token.Anchor:
|
of nodePropertyKind:
|
||||||
c.inlineStart = c.lex.curStartPos
|
c.inlineStart = c.lex.curStartPos
|
||||||
c.pushLevel(beforeNodeProperties)
|
c.pushLevel(beforeNodeProperties)
|
||||||
return false
|
return false
|
||||||
|
@ -304,10 +328,9 @@ proc afterDirectivesEnd(c: Context, e: var Event): bool =
|
||||||
e = scalarEvent("", c.inlineProps, ssPlain, c.lex.curStartPos, c.lex.curEndPos)
|
e = scalarEvent("", c.inlineProps, ssPlain, c.lex.curStartPos, c.lex.curEndPos)
|
||||||
c.popLevel()
|
c.popLevel()
|
||||||
return true
|
return true
|
||||||
of Folded, Literal:
|
of scalarTokenKind:
|
||||||
e = scalarEvent(c.lex.evaluated, c.inlineProps,
|
e = scalarEvent(c.lex.evaluated, autoScalarTag(c.inlineProps, c.lex.cur),
|
||||||
if c.lex.cur == Token.Folded: ssFolded else: ssLiteral,
|
toStyle(c.lex.cur), c.lex.curStartPos, c.lex.curEndPos)
|
||||||
c.lex.curStartPos, c.lex.curEndPos)
|
|
||||||
c.popLevel()
|
c.popLevel()
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
return true
|
return true
|
||||||
|
@ -324,67 +347,15 @@ proc beforeImplicitRoot(c: Context, e: var Event): bool =
|
||||||
of SeqItemInd, MapKeyInd, MapValueInd:
|
of SeqItemInd, MapKeyInd, MapValueInd:
|
||||||
c.transition(afterCompactParent)
|
c.transition(afterCompactParent)
|
||||||
return false
|
return false
|
||||||
of scalarTokenKind:
|
of scalarTokenKind, MapStart, SeqStart:
|
||||||
c.transition(requireImplicitMapStart)
|
c.transition(atBlockIndentationProps)
|
||||||
return false
|
return false
|
||||||
of nodePropertyKind:
|
of nodePropertyKind:
|
||||||
c.transition(requireImplicitMapStart)
|
c.transition(atBlockIndentationProps)
|
||||||
c.pushLevel(beforeNodeProperties)
|
c.pushLevel(beforeNodeProperties)
|
||||||
of MapStart, SeqStart:
|
|
||||||
c.transition(afterCompactParentProps)
|
|
||||||
return false
|
|
||||||
else:
|
else:
|
||||||
raise c.generateError("Unexpected token (expected collection start): " & $c.lex.cur)
|
raise c.generateError("Unexpected token (expected collection start): " & $c.lex.cur)
|
||||||
|
|
||||||
proc requireImplicitMapStart(c: Context, e: var Event): bool =
|
|
||||||
c.updateIndentation(c.lex.recentIndentation())
|
|
||||||
case c.lex.cur
|
|
||||||
of Alias:
|
|
||||||
e = aliasEvent(c.lex.shortLexeme().Anchor, c.inlineStart, c.lex.curEndPos)
|
|
||||||
let headerEnd = c.lex.curStartPos
|
|
||||||
c.lex.next()
|
|
||||||
if c.lex.cur == Token.MapValueInd:
|
|
||||||
c.peek = e
|
|
||||||
e = startMapEvent(csBlock, c.headerProps, c.headerStart, headerEnd)
|
|
||||||
c.headerProps = defaultProperties
|
|
||||||
c.transition(afterImplicitKey)
|
|
||||||
else:
|
|
||||||
if not isEmpty(c.headerProps):
|
|
||||||
raise c.generateError("Alias may not have properties")
|
|
||||||
c.popLevel()
|
|
||||||
return true
|
|
||||||
of Plain, SingleQuoted, DoubleQuoted:
|
|
||||||
e = scalarEvent(c.lex.evaluated, autoScalarTag(c.inlineProps, c.lex.cur),
|
|
||||||
toStyle(c.lex.cur), c.inlineStart, c.lex.curEndPos)
|
|
||||||
c.inlineProps = defaultProperties
|
|
||||||
let headerEnd = c.lex.curStartPos
|
|
||||||
c.lex.next()
|
|
||||||
case c.lex.cur
|
|
||||||
of Token.MapValueInd:
|
|
||||||
if c.lex.lastScalarWasMultiline():
|
|
||||||
raise c.generateError("Implicit mapping key may not be multiline")
|
|
||||||
c.peek = move(e)
|
|
||||||
e = startMapEvent(csBlock, c.headerProps,
|
|
||||||
c.headerStart, headerEnd)
|
|
||||||
c.headerProps = defaultProperties
|
|
||||||
c.transition(afterImplicitKey)
|
|
||||||
else: c.popLevel()
|
|
||||||
return true
|
|
||||||
of Literal, Folded:
|
|
||||||
e = scalarEvent(c.lex.evaluated, c.inlineProps, toStyle(c.lex.cur),
|
|
||||||
c.inlineStart, c.lex.curEndPos)
|
|
||||||
c.inlineProps = defaultProperties
|
|
||||||
c.lex.next()
|
|
||||||
c.popLevel()
|
|
||||||
return true
|
|
||||||
of MapStart, SeqStart:
|
|
||||||
c.transition(beforeFlowItemProps)
|
|
||||||
return false
|
|
||||||
of Indentation:
|
|
||||||
raise c.generateError("Standalone node properties not allowed on non-header line")
|
|
||||||
else:
|
|
||||||
raise c.generateError("Unexpected token (expected implicit mapping key): " & $c.lex.cur)
|
|
||||||
|
|
||||||
proc atBlockIndentation(c: Context, e: var Event): bool =
|
proc atBlockIndentation(c: Context, e: var Event): bool =
|
||||||
if c.blockIndentation == c.levels[^1].indentation and
|
if c.blockIndentation == c.levels[^1].indentation and
|
||||||
(c.lex.cur != Token.SeqItemInd or
|
(c.lex.cur != Token.SeqItemInd or
|
||||||
|
@ -400,9 +371,9 @@ proc atBlockIndentation(c: Context, e: var Event): bool =
|
||||||
case c.lex.cur
|
case c.lex.cur
|
||||||
of nodePropertyKind:
|
of nodePropertyKind:
|
||||||
if isEmpty(c.headerProps):
|
if isEmpty(c.headerProps):
|
||||||
c.transition(requireInlineBlockItem)
|
c.transition(mergePropsOnNewline)
|
||||||
else:
|
else:
|
||||||
c.transition(requireImplicitMapStart)
|
c.transition(atBlockIndentationProps)
|
||||||
c.pushLevel(beforeNodeProperties)
|
c.pushLevel(beforeNodeProperties)
|
||||||
return false
|
return false
|
||||||
of SeqItemInd:
|
of SeqItemInd:
|
||||||
|
@ -486,20 +457,41 @@ proc atBlockIndentationProps(c: Context, e: var Event): bool =
|
||||||
c.headerProps = defaultProperties
|
c.headerProps = defaultProperties
|
||||||
c.transition(afterImplicitKey)
|
c.transition(afterImplicitKey)
|
||||||
else:
|
else:
|
||||||
|
c.mergeProps(c.headerProps, e.scalarProperties)
|
||||||
c.popLevel()
|
c.popLevel()
|
||||||
return true
|
return true
|
||||||
of MapStart:
|
of MapStart, SeqStart:
|
||||||
e = startMapEvent(csFlow, c.headerProps, c.headerStart, c.lex.curEndPos)
|
let
|
||||||
|
startPos = c.lex.curStartPos
|
||||||
|
indent = c.levels[^1].indentation
|
||||||
|
c.transition(beforeFlowItemProps)
|
||||||
|
c.caching = true
|
||||||
|
while c.lex.flowDepth > 0:
|
||||||
|
c.keyCache.add(c.next())
|
||||||
|
c.keyCache.add(c.next())
|
||||||
|
c.caching = false
|
||||||
|
if c.lex.cur == Token.MapValueInd:
|
||||||
|
c.pushLevel(afterImplicitKey, indent)
|
||||||
|
c.pushLevel(emitCollectionKey)
|
||||||
|
if c.lex.curStartPos.line != startPos.line:
|
||||||
|
raise c.generateError("Implicit mapping key may not be multiline")
|
||||||
|
e = startMapEvent(csBlock, c.headerProps, c.headerStart, startPos)
|
||||||
c.headerProps = defaultProperties
|
c.headerProps = defaultProperties
|
||||||
c.transition(afterFlowMapSep)
|
|
||||||
c.lex.next()
|
|
||||||
return true
|
return true
|
||||||
of SeqStart:
|
else:
|
||||||
e = startSeqEvent(csFlow, c.headerProps, c.headerStart, c.lex.curEndPos)
|
c.pushLevel(emitCollectionKey)
|
||||||
|
return false
|
||||||
|
of Literal, Folded:
|
||||||
|
c.mergeProps(c.inlineProps, c.headerProps)
|
||||||
|
e = scalarEvent(c.lex.evaluated, c.headerProps, toStyle(c.lex.cur),
|
||||||
|
c.inlineStart, c.lex.curEndPos)
|
||||||
c.headerProps = defaultProperties
|
c.headerProps = defaultProperties
|
||||||
c.transition(afterFlowSeqSep)
|
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
|
c.popLevel()
|
||||||
return true
|
return true
|
||||||
|
of Indentation:
|
||||||
|
c.lex.next()
|
||||||
|
return false
|
||||||
else:
|
else:
|
||||||
raise c.generateError("Unexpected token (expected block content): " & $c.lex.cur)
|
raise c.generateError("Unexpected token (expected block content): " & $c.lex.cur)
|
||||||
|
|
||||||
|
@ -521,8 +513,7 @@ proc beforeNodeProperties(c: Context, e: var Event): bool =
|
||||||
raise c.generateError("Only one anchor allowed per node")
|
raise c.generateError("Only one anchor allowed per node")
|
||||||
c.inlineProps.anchor = c.lex.shortLexeme().Anchor
|
c.inlineProps.anchor = c.lex.shortLexeme().Anchor
|
||||||
of Indentation:
|
of Indentation:
|
||||||
c.headerProps = c.inlineProps
|
c.mergeProps(c.inlineProps, c.headerProps)
|
||||||
c.inlineProps = defaultProperties
|
|
||||||
c.popLevel()
|
c.popLevel()
|
||||||
return false
|
return false
|
||||||
of Alias:
|
of Alias:
|
||||||
|
@ -656,19 +647,10 @@ proc afterBlockParentProps(c: Context, e: var Event): bool =
|
||||||
c.transition(afterCompactParentProps)
|
c.transition(afterCompactParentProps)
|
||||||
return false
|
return false
|
||||||
|
|
||||||
proc requireInlineBlockItem(c: Context, e: var Event): bool =
|
proc mergePropsOnNewline(c: Context, e: var Event): bool =
|
||||||
c.updateIndentation(c.lex.recentIndentation())
|
c.updateIndentation(c.lex.recentIndentation())
|
||||||
if c.lex.cur == Token.Indentation:
|
if c.lex.cur == Token.Indentation:
|
||||||
if c.inlineProps.tag != yTagQuestionMark:
|
c.mergeProps(c.inlineProps, c.headerProps)
|
||||||
if c.headerProps.tag != yTagQuestionMark:
|
|
||||||
raise c.generateError("Only one tag allowed per node")
|
|
||||||
c.headerProps.tag = c.inlineProps.tag
|
|
||||||
c.inlineProps.tag = yTagQuestionMark
|
|
||||||
if c.inlineProps.anchor != yAnchorNone:
|
|
||||||
if c.headerProps.anchor != yAnchorNone:
|
|
||||||
raise c.generateError("Only one anchor allowed per node")
|
|
||||||
c.headerProps.anchor = c.inlineProps.anchor
|
|
||||||
c.inlineProps.anchor = yAnchorNone
|
|
||||||
c.transition(afterCompactParentProps)
|
c.transition(afterCompactParentProps)
|
||||||
return false
|
return false
|
||||||
|
|
||||||
|
@ -826,7 +808,6 @@ proc beforeBlockIndentation(c: Context, e: var Event): bool =
|
||||||
raise c.generateError("Unexpected content after node in block context (expected newline): " & $c.lex.cur)
|
raise c.generateError("Unexpected content after node in block context (expected newline): " & $c.lex.cur)
|
||||||
|
|
||||||
proc beforeFlowItem(c: Context, e: var Event): bool =
|
proc beforeFlowItem(c: Context, e: var Event): bool =
|
||||||
debug("parse: beforeFlowItem")
|
|
||||||
c.inlineStart = c.lex.curStartPos
|
c.inlineStart = c.lex.curStartPos
|
||||||
case c.lex.cur
|
case c.lex.cur
|
||||||
of nodePropertyKind:
|
of nodePropertyKind:
|
||||||
|
@ -931,7 +912,7 @@ proc afterFlowMapSep(c: Context, e: var Event): bool =
|
||||||
c.pushLevel(beforeFlowItem)
|
c.pushLevel(beforeFlowItem)
|
||||||
return false
|
return false
|
||||||
|
|
||||||
proc possibleNextSequenceItem(c: Context, e: var Event, endToken: Token, afterProps, afterItem: State): bool =
|
proc afterFlowSeqSep(c: Context, e: var Event): bool =
|
||||||
c.inlineStart = c.lex.curStartPos
|
c.inlineStart = c.lex.curStartPos
|
||||||
case c.lex.cur
|
case c.lex.cur
|
||||||
of SeqSep:
|
of SeqSep:
|
||||||
|
@ -939,41 +920,42 @@ proc possibleNextSequenceItem(c: Context, e: var Event, endToken: Token, afterPr
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
return true
|
return true
|
||||||
of nodePropertyKind:
|
of nodePropertyKind:
|
||||||
c.transition(afterProps)
|
c.transition(afterFlowSeqSepProps)
|
||||||
c.pushLevel(beforeNodeProperties)
|
c.pushLevel(beforeNodeProperties)
|
||||||
return false
|
return false
|
||||||
of Plain, SingleQuoted, DoubleQuoted:
|
of Plain, SingleQuoted, DoubleQuoted, MapStart, SeqStart:
|
||||||
c.transition(afterProps)
|
c.transition(afterFlowSeqSepProps)
|
||||||
return false
|
return false
|
||||||
of MapKeyInd:
|
of MapKeyInd:
|
||||||
c.transition(afterItem)
|
c.transition(afterFlowSeqSepProps)
|
||||||
e = startMapEvent(csFlow, defaultProperties, c.lex.curStartPos, c.lex.curEndPos)
|
e = startMapEvent(csFlow, defaultProperties, c.lex.curStartPos, c.lex.curEndPos)
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
|
c.transition(afterFlowSeqItem)
|
||||||
c.pushLevel(beforePairValue)
|
c.pushLevel(beforePairValue)
|
||||||
c.pushLevel(beforeFlowItem)
|
c.pushLevel(beforeFlowItem)
|
||||||
return true
|
return true
|
||||||
of MapValueInd:
|
of MapValueInd:
|
||||||
c.transition(afterItem)
|
c.transition(afterFlowSeqItem)
|
||||||
e = startMapEvent(csFlow, defaultProperties, c.lex.curStartPos, c.lex.curEndPos)
|
e = startMapEvent(csFlow, defaultProperties, c.lex.curStartPos, c.lex.curEndPos)
|
||||||
c.pushLevel(atEmptyPairKey)
|
c.pushLevel(atEmptyPairKey)
|
||||||
return true
|
return true
|
||||||
else:
|
of SeqEnd:
|
||||||
if c.lex.cur == endToken:
|
|
||||||
e = endSeqEvent(c.lex.curStartPos, c.lex.curEndPos)
|
e = endSeqEvent(c.lex.curStartPos, c.lex.curEndPos)
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
c.popLevel()
|
c.popLevel()
|
||||||
return true
|
return true
|
||||||
else:
|
else:
|
||||||
c.transition(afterItem)
|
c.transition(afterFlowSeqItem)
|
||||||
c.pushLevel(beforeFlowItem)
|
c.pushLevel(beforeFlowItem)
|
||||||
return false
|
return false
|
||||||
|
|
||||||
proc afterFlowSeqSep(c: Context, e: var Event): bool =
|
proc afterFlowSeqSepProps(c: Context, e: var Event): bool =
|
||||||
return possibleNextSequenceItem(c, e, Token.SeqEnd, afterFlowSeqSepProps, afterFlowSeqItem)
|
# here we handle potential implicit single pairs within flow sequences.
|
||||||
|
c.transition(afterFlowSeqItem)
|
||||||
proc forcedNextSequenceItem(c: Context, e: var Event): bool =
|
case c.lex.cur
|
||||||
if c.lex.cur in {Token.Plain, Token.SingleQuoted, Token.DoubleQuoted}:
|
of Plain, SingleQuoted, DoubleQuoted:
|
||||||
e = scalarEvent(c.lex.evaluated, c.inlineProps, toStyle(c.lex.cur), c.inlineStart, c.lex.curEndPos)
|
e = scalarEvent(c.lex.evaluated, autoScalarTag(c.inlineProps, c.lex.cur),
|
||||||
|
toStyle(c.lex.cur), c.inlineStart, c.lex.curEndPos)
|
||||||
c.inlineProps = defaultProperties
|
c.inlineProps = defaultProperties
|
||||||
c.lex.next()
|
c.lex.next()
|
||||||
if c.lex.cur == Token.MapValueInd:
|
if c.lex.cur == Token.MapValueInd:
|
||||||
|
@ -981,14 +963,40 @@ proc forcedNextSequenceItem(c: Context, e: var Event): bool =
|
||||||
e = startMapEvent(csFlow, defaultProperties, c.lex.curStartPos, c.lex.curStartPos)
|
e = startMapEvent(csFlow, defaultProperties, c.lex.curStartPos, c.lex.curStartPos)
|
||||||
c.pushLevel(afterImplicitPairStart)
|
c.pushLevel(afterImplicitPairStart)
|
||||||
return true
|
return true
|
||||||
|
of MapStart, SeqStart:
|
||||||
|
let
|
||||||
|
startPos = c.lex.curStartPos
|
||||||
|
indent = c.levels[^1].indentation
|
||||||
|
cacheStart = c.keyCache.len
|
||||||
|
targetFlowDepth = c.lex.flowDepth - 1
|
||||||
|
alreadyCaching = c.caching
|
||||||
|
c.pushLevel(beforeFlowItemProps)
|
||||||
|
c.caching = true
|
||||||
|
while c.lex.flowDepth > targetFlowDepth:
|
||||||
|
c.keyCache.add(c.next())
|
||||||
|
c.keyCache.add(c.next())
|
||||||
|
c.caching = alreadyCaching
|
||||||
|
if c.lex.cur == Token.MapValueInd:
|
||||||
|
c.pushLevel(afterImplicitPairStart, indent)
|
||||||
|
if c.lex.curStartPos.line != startPos.line:
|
||||||
|
raise c.generateError("Implicit mapping key may not be multiline")
|
||||||
|
if not alreadyCaching:
|
||||||
|
c.pushLevel(emitCollectionKey)
|
||||||
|
e = startMapEvent(csPair, defaultProperties, startPos, startPos)
|
||||||
|
return true
|
||||||
|
else:
|
||||||
|
# we are already filling a cache.
|
||||||
|
# so we just squeeze the map start in.
|
||||||
|
c.keyCache.insert(startMapEvent(csPair, defaultProperties, startPos, startPos), cacheStart)
|
||||||
|
return false
|
||||||
|
else:
|
||||||
|
if not alreadyCaching:
|
||||||
|
c.pushLevel(emitCollectionKey)
|
||||||
|
return false
|
||||||
else:
|
else:
|
||||||
c.pushLevel(beforeFlowItem)
|
c.pushLevel(beforeFlowItem)
|
||||||
return false
|
return false
|
||||||
|
|
||||||
proc afterFlowSeqSepProps(c: Context, e: var Event): bool =
|
|
||||||
c.transition(afterFlowSeqItem)
|
|
||||||
return forcedNextSequenceItem(c, e)
|
|
||||||
|
|
||||||
proc atEmptyPairKey(c: Context, e: var Event): bool =
|
proc atEmptyPairKey(c: Context, e: var Event): bool =
|
||||||
c.transition(beforePairValue)
|
c.transition(beforePairValue)
|
||||||
e = scalarEvent("", defaultProperties, ssPlain, c.lex.curStartPos, c.lex.curStartPos)
|
e = scalarEvent("", defaultProperties, ssPlain, c.lex.curStartPos, c.lex.curStartPos)
|
||||||
|
@ -1017,6 +1025,17 @@ proc afterPairValue(c: Context, e: var Event): bool =
|
||||||
c.popLevel()
|
c.popLevel()
|
||||||
return true
|
return true
|
||||||
|
|
||||||
|
proc emitCollectionKey(c: Context, e: var Event): bool =
|
||||||
|
debug("emitCollection key: pos = " & $c.keyCachePos & ", len = " & $c.keyCache.len)
|
||||||
|
yAssert(c.keyCachePos < c.keyCache.len)
|
||||||
|
e = move(c.keyCache[c.keyCachePos])
|
||||||
|
inc(c.keyCachePos)
|
||||||
|
if c.keyCachePos == len(c.keyCache):
|
||||||
|
c.keyCache.setLen(0)
|
||||||
|
c.keyCachePos = 0
|
||||||
|
c.popLevel()
|
||||||
|
return true
|
||||||
|
|
||||||
proc display*(p: YamlParser, event: Event): string =
|
proc display*(p: YamlParser, event: Event): string =
|
||||||
## Generate a representation of the given event with proper visualization of
|
## Generate a representation of the given event with proper visualization of
|
||||||
## anchor and tag (if any). The generated representation is conformant to the
|
## anchor and tag (if any). The generated representation is conformant to the
|
||||||
|
|
|
@ -457,15 +457,15 @@ proc doPresent(s: var YamlStream, target: PresenterTarget,
|
||||||
of ov1_2: target.append("%YAML 1.2" & newline)
|
of ov1_2: target.append("%YAML 1.2" & newline)
|
||||||
of ov1_1: target.append("%YAML 1.1" & newLine)
|
of ov1_1: target.append("%YAML 1.1" & newLine)
|
||||||
of ovNone: discard
|
of ovNone: discard
|
||||||
for prefix, handle in tagLib.handles():
|
for prefix, uri in tagLib.handles():
|
||||||
if handle == "!":
|
if prefix == "!":
|
||||||
if prefix != "!":
|
if uri != "!":
|
||||||
target.append("%TAG ! " & prefix & newline)
|
target.append("%TAG ! " & uri & newline)
|
||||||
elif handle == "!!":
|
elif prefix == "!!":
|
||||||
if prefix != yamlTagRepositoryPrefix:
|
if uri != yamlTagRepositoryPrefix:
|
||||||
target.append("%TAG !! " & prefix & newline)
|
target.append("%TAG !! " & uri & newline)
|
||||||
else:
|
else:
|
||||||
target.append("%TAG " & handle & ' ' & prefix & newline)
|
target.append("%TAG " & prefix & ' ' & uri & newline)
|
||||||
target.append("--- ")
|
target.append("--- ")
|
||||||
except:
|
except:
|
||||||
var e = newException(YamlPresenterOutputError, "")
|
var e = newException(YamlPresenterOutputError, "")
|
||||||
|
|
|
@ -14,13 +14,13 @@ type
|
||||||
Lexer* = object
|
Lexer* = object
|
||||||
cur*: Token
|
cur*: Token
|
||||||
curStartPos*, curEndPos*: Mark
|
curStartPos*, curEndPos*: Mark
|
||||||
|
flowDepth*: int
|
||||||
# recently read scalar or URI, if any
|
# recently read scalar or URI, if any
|
||||||
evaluated*: string
|
evaluated*: string
|
||||||
# internals
|
# internals
|
||||||
indentation: int
|
indentation: int
|
||||||
source: BaseLexer
|
source: BaseLexer
|
||||||
tokenStart: int
|
tokenStart: int
|
||||||
flowDepth: int
|
|
||||||
state, lineStartState, jsonEnablingState: State
|
state, lineStartState, jsonEnablingState: State
|
||||||
c: char
|
c: char
|
||||||
seenMultiline: bool
|
seenMultiline: bool
|
||||||
|
@ -90,10 +90,10 @@ const
|
||||||
|
|
||||||
UnknownIndentation* = int.low
|
UnknownIndentation* = int.low
|
||||||
|
|
||||||
proc currentIndentation*(lex: Lexer): int =
|
proc currentIndentation*(lex: Lexer): int {.locks: 0.} =
|
||||||
return lex.source.getColNumber(lex.source.bufpos) - 1
|
return lex.source.getColNumber(lex.source.bufpos) - 1
|
||||||
|
|
||||||
proc recentIndentation*(lex: Lexer): int =
|
proc recentIndentation*(lex: Lexer): int {.locks: 0.} =
|
||||||
return lex.indentation
|
return lex.indentation
|
||||||
|
|
||||||
# lexer source handling
|
# lexer source handling
|
||||||
|
@ -163,7 +163,7 @@ proc afterJsonEnablingToken(lex: var Lexer): bool {.raises: LexerError.}
|
||||||
proc lineIndentation(lex: var Lexer): bool {.raises: [].}
|
proc lineIndentation(lex: var Lexer): bool {.raises: [].}
|
||||||
proc lineDirEnd(lex: var Lexer): bool {.raises: [].}
|
proc lineDirEnd(lex: var Lexer): bool {.raises: [].}
|
||||||
proc lineDocEnd(lex: var Lexer): bool {.raises: [].}
|
proc lineDocEnd(lex: var Lexer): bool {.raises: [].}
|
||||||
proc atSuffix(lex: var Lexer): bool {.raises: [].}
|
proc atSuffix(lex: var Lexer): bool {.raises: [LexerError].}
|
||||||
proc streamEnd(lex: var Lexer): bool {.raises: [].}
|
proc streamEnd(lex: var Lexer): bool {.raises: [].}
|
||||||
{.pop.}
|
{.pop.}
|
||||||
|
|
||||||
|
@ -333,7 +333,7 @@ proc readPlainScalar(lex: var Lexer) =
|
||||||
while true:
|
while true:
|
||||||
lex.advance()
|
lex.advance()
|
||||||
case lex.c
|
case lex.c
|
||||||
of ' ':
|
of space:
|
||||||
lex.endToken()
|
lex.endToken()
|
||||||
let spaceStart = lex.source.bufpos - 2
|
let spaceStart = lex.source.bufpos - 2
|
||||||
block spaceLoop:
|
block spaceLoop:
|
||||||
|
@ -363,7 +363,7 @@ proc readPlainScalar(lex: var Lexer) =
|
||||||
lex.state = insideLine
|
lex.state = insideLine
|
||||||
break multilineLoop
|
break multilineLoop
|
||||||
break spaceLoop
|
break spaceLoop
|
||||||
of ' ': discard
|
of space: discard
|
||||||
else: break spaceLoop
|
else: break spaceLoop
|
||||||
of ':':
|
of ':':
|
||||||
if not lex.isPlainSafe():
|
if not lex.isPlainSafe():
|
||||||
|
@ -412,7 +412,7 @@ proc readPlainScalar(lex: var Lexer) =
|
||||||
break multilineLoop
|
break multilineLoop
|
||||||
of lsNewline: lex.endLine()
|
of lsNewline: lex.endLine()
|
||||||
newlines += 1
|
newlines += 1
|
||||||
while lex.c == ' ': lex.advance()
|
while lex.c in space: lex.advance()
|
||||||
if (lex.c == ':' and not lex.isPlainSafe()) or
|
if (lex.c == ':' and not lex.isPlainSafe()) or
|
||||||
lex.c == '#' or (lex.c in flowIndicators and
|
lex.c == '#' or (lex.c in flowIndicators and
|
||||||
lex.flowDepth > 0):
|
lex.flowDepth > 0):
|
||||||
|
@ -478,7 +478,9 @@ proc readBlockScalar(lex: var Lexer) =
|
||||||
|
|
||||||
block body:
|
block body:
|
||||||
# determining indentation and leading empty lines
|
# determining indentation and leading empty lines
|
||||||
var maxLeadingSpaces = 0
|
var
|
||||||
|
maxLeadingSpaces = 0
|
||||||
|
moreIndented = false
|
||||||
while true:
|
while true:
|
||||||
if indent == 0:
|
if indent == 0:
|
||||||
while lex.c == ' ': lex.advance()
|
while lex.c == ' ': lex.advance()
|
||||||
|
@ -506,16 +508,18 @@ proc readBlockScalar(lex: var Lexer) =
|
||||||
elif indent < maxLeadingSpaces:
|
elif indent < maxLeadingSpaces:
|
||||||
raise lex.generateError("Leading all-spaces line contains too many spaces")
|
raise lex.generateError("Leading all-spaces line contains too many spaces")
|
||||||
elif lex.currentIndentation() < indent: break body
|
elif lex.currentIndentation() < indent: break body
|
||||||
|
if lex.cur == Token.Folded and lex.c in space:
|
||||||
|
moreIndented = true
|
||||||
break
|
break
|
||||||
for i in countup(0, separationLines - 1):
|
for i in countup(0, separationLines - 1):
|
||||||
lex.evaluated.add('\l')
|
lex.evaluated.add('\l')
|
||||||
|
separationLines = if moreIndented: 1 else: 0
|
||||||
|
|
||||||
block content:
|
block content:
|
||||||
while true:
|
while true:
|
||||||
contentStart = lex.source.bufpos - 1
|
contentStart = lex.source.bufpos - 1
|
||||||
while lex.c notin lineEnd: lex.advance()
|
while lex.c notin lineEnd: lex.advance()
|
||||||
lex.evaluated.add(lex.source.buf[contentStart .. lex.source.bufpos - 2])
|
lex.evaluated.add(lex.source.buf[contentStart .. lex.source.bufpos - 2])
|
||||||
separationLines = 0
|
|
||||||
if lex.c == EndOfFile:
|
if lex.c == EndOfFile:
|
||||||
lex.state = streamEnd
|
lex.state = streamEnd
|
||||||
lex.streamEndAfterBlock()
|
lex.streamEndAfterBlock()
|
||||||
|
@ -524,7 +528,9 @@ proc readBlockScalar(lex: var Lexer) =
|
||||||
lex.endToken()
|
lex.endToken()
|
||||||
lex.endLine()
|
lex.endLine()
|
||||||
|
|
||||||
|
let oldMoreIndented = moreIndented
|
||||||
# empty lines and indentation of next line
|
# empty lines and indentation of next line
|
||||||
|
moreIndented = false
|
||||||
while true:
|
while true:
|
||||||
while lex.c == ' ' and lex.currentIndentation() < indent:
|
while lex.c == ' ' and lex.currentIndentation() < indent:
|
||||||
lex.advance()
|
lex.advance()
|
||||||
|
@ -541,7 +547,11 @@ proc readBlockScalar(lex: var Lexer) =
|
||||||
if lex.currentIndentation() < indent or
|
if lex.currentIndentation() < indent or
|
||||||
(indent == 0 and lex.dirEndFollows() or lex.docEndFollows()):
|
(indent == 0 and lex.dirEndFollows() or lex.docEndFollows()):
|
||||||
break content
|
break content
|
||||||
else: break
|
if lex.cur == Token.Folded and lex.c in space:
|
||||||
|
moreIndented = true
|
||||||
|
if not oldMoreIndented:
|
||||||
|
separationLines += 1
|
||||||
|
break
|
||||||
|
|
||||||
# line folding
|
# line folding
|
||||||
if lex.cur == Token.Literal:
|
if lex.cur == Token.Literal:
|
||||||
|
@ -552,6 +562,7 @@ proc readBlockScalar(lex: var Lexer) =
|
||||||
else:
|
else:
|
||||||
for i in countup(0, separationLines - 2):
|
for i in countup(0, separationLines - 2):
|
||||||
lex.evaluated.add('\l')
|
lex.evaluated.add('\l')
|
||||||
|
separationLines = if moreIndented: 1 else: 0
|
||||||
|
|
||||||
let markerFollows = lex.currentIndentation() == 0 and
|
let markerFollows = lex.currentIndentation() == 0 and
|
||||||
(lex.dirEndFollows() or lex.docEndFollows())
|
(lex.dirEndFollows() or lex.docEndFollows())
|
||||||
|
@ -718,16 +729,16 @@ proc basicInit(lex: var Lexer) =
|
||||||
|
|
||||||
# interface
|
# interface
|
||||||
|
|
||||||
proc lastScalarWasMultiline*(lex: Lexer): bool =
|
proc lastScalarWasMultiline*(lex: Lexer): bool {.locks: 0.} =
|
||||||
result = lex.seenMultiline
|
result = lex.seenMultiline
|
||||||
|
|
||||||
proc shortLexeme*(lex: Lexer): string =
|
proc shortLexeme*(lex: Lexer): string {.locks: 0.} =
|
||||||
return lex.source.buf[lex.tokenStart..lex.source.bufpos-2]
|
return lex.source.buf[lex.tokenStart..lex.source.bufpos-2]
|
||||||
|
|
||||||
proc fullLexeme*(lex: Lexer): string =
|
proc fullLexeme*(lex: Lexer): string {.locks: 0.} =
|
||||||
return lex.source.buf[lex.tokenStart - 1..lex.source.bufpos-2]
|
return lex.source.buf[lex.tokenStart - 1..lex.source.bufpos-2]
|
||||||
|
|
||||||
proc currentLine*(lex: Lexer): string =
|
proc currentLine*(lex: Lexer): string {.locks: 0.} =
|
||||||
return lex.source.getCurrentLine(false)
|
return lex.source.getCurrentLine(false)
|
||||||
|
|
||||||
proc next*(lex: var Lexer) =
|
proc next*(lex: var Lexer) =
|
||||||
|
@ -900,6 +911,7 @@ proc flowLineStart(lex: var Lexer): bool =
|
||||||
let lineStart = lex.source.bufpos
|
let lineStart = lex.source.bufpos
|
||||||
while lex.c == ' ': lex.advance()
|
while lex.c == ' ': lex.advance()
|
||||||
indent = lex.source.bufpos - lineStart
|
indent = lex.source.bufpos - lineStart
|
||||||
|
while lex.c in space: lex.advance()
|
||||||
if indent <= lex.indentation:
|
if indent <= lex.indentation:
|
||||||
raise lex.generateError("Too few indentation spaces (must surpass surrounding block level)")
|
raise lex.generateError("Too few indentation spaces (must surpass surrounding block level)")
|
||||||
lex.state = insideLine
|
lex.state = insideLine
|
||||||
|
@ -980,10 +992,8 @@ proc readAnchorName(lex: var Lexer) =
|
||||||
lex.startToken()
|
lex.startToken()
|
||||||
while true:
|
while true:
|
||||||
lex.advance()
|
lex.advance()
|
||||||
if lex.c notin tagShorthandChars + {'_'}: break
|
if lex.c in spaceOrLineEnd + flowIndicators: break
|
||||||
if lex.c notin spaceOrLineEnd + flowIndicators:
|
if lex.source.bufpos == lex.tokenStart + 1:
|
||||||
raise lex.generateError("Illegal character in anchor: " & escape("" & lex.c))
|
|
||||||
elif lex.source.bufpos == lex.tokenStart + 1:
|
|
||||||
raise lex.generateError("Anchor name must not be empty")
|
raise lex.generateError("Anchor name must not be empty")
|
||||||
lex.state = afterToken
|
lex.state = afterToken
|
||||||
|
|
||||||
|
@ -1052,7 +1062,7 @@ proc indentationSettingToken(lex: var Lexer): bool =
|
||||||
lex.indentation = cachedIntentation
|
lex.indentation = cachedIntentation
|
||||||
|
|
||||||
proc afterToken(lex: var Lexer): bool =
|
proc afterToken(lex: var Lexer): bool =
|
||||||
while lex.c == ' ': lex.advance()
|
while lex.c in space: lex.advance()
|
||||||
if lex.c in commentOrLineEnd:
|
if lex.c in commentOrLineEnd:
|
||||||
lex.endLine()
|
lex.endLine()
|
||||||
else:
|
else:
|
||||||
|
@ -1115,8 +1125,20 @@ proc lineDocEnd(lex: var Lexer): bool =
|
||||||
|
|
||||||
proc atSuffix(lex: var Lexer): bool =
|
proc atSuffix(lex: var Lexer): bool =
|
||||||
lex.startToken()
|
lex.startToken()
|
||||||
while lex.c in suffixChars: lex.advance()
|
lex.evaluated.setLen(0)
|
||||||
lex.evaluated = lex.fullLexeme()
|
var curStart = lex.tokenStart - 1
|
||||||
|
while true:
|
||||||
|
case lex.c
|
||||||
|
of suffixChars: lex.advance()
|
||||||
|
of '%':
|
||||||
|
if curStart <= lex.source.bufpos - 2:
|
||||||
|
lex.evaluated.add(lex.source.buf[curStart..lex.source.bufpos - 2])
|
||||||
|
lex.readHexSequence(2)
|
||||||
|
curStart = lex.source.bufpos
|
||||||
|
lex.advance()
|
||||||
|
else: break
|
||||||
|
if curStart <= lex.source.bufpos - 2:
|
||||||
|
lex.evaluated.add(lex.source.buf[curStart..lex.source.bufpos - 2])
|
||||||
lex.endToken()
|
lex.endToken()
|
||||||
lex.cur = Token.Suffix
|
lex.cur = Token.Suffix
|
||||||
lex.state = afterToken
|
lex.state = afterToken
|
||||||
|
|
|
@ -141,11 +141,11 @@ template constructScalarItem*(s: var YamlStream, i: untyped,
|
||||||
bind constructionError
|
bind constructionError
|
||||||
let i = s.next()
|
let i = s.next()
|
||||||
if i.kind != yamlScalar:
|
if i.kind != yamlScalar:
|
||||||
raise s.constructionError(i.startPos, "Expected scalar")
|
raise constructionError(s, i.startPos, "Expected scalar")
|
||||||
try: content
|
try: content
|
||||||
except YamlConstructionError as e: raise e
|
except YamlConstructionError as e: raise e
|
||||||
except Exception:
|
except Exception:
|
||||||
var e = s.constructionError(i.startPos,
|
var e = constructionError(s, i.startPos,
|
||||||
"Cannot construct to " & name(t) & ": " & item.scalarContent &
|
"Cannot construct to " & name(t) & ": " & item.scalarContent &
|
||||||
"; error: " & getCurrentExceptionMsg())
|
"; error: " & getCurrentExceptionMsg())
|
||||||
e.parent = getCurrentException()
|
e.parent = getCurrentException()
|
||||||
|
@ -447,7 +447,7 @@ proc representObject*[T](value: seq[T]|set[T], ts: TagStyle,
|
||||||
c: SerializationContext, tag: TagId) =
|
c: SerializationContext, tag: TagId) =
|
||||||
## represents a Nim seq as YAML sequence
|
## represents a Nim seq as YAML sequence
|
||||||
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
||||||
c.put(startSeqEvent(csBlock, tag))
|
c.put(startSeqEvent(tag = tag))
|
||||||
for item in value:
|
for item in value:
|
||||||
representChild(item, childTagStyle, c)
|
representChild(item, childTagStyle, c)
|
||||||
c.put(endSeqEvent())
|
c.put(endSeqEvent())
|
||||||
|
@ -478,7 +478,7 @@ proc representObject*[I, T](value: array[I, T], ts: TagStyle,
|
||||||
c: SerializationContext, tag: TagId) =
|
c: SerializationContext, tag: TagId) =
|
||||||
## represents a Nim array as YAML sequence
|
## represents a Nim array as YAML sequence
|
||||||
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
||||||
c.put(startSeqEvent(tag))
|
c.put(startSeqEvent(tag = tag))
|
||||||
for item in value:
|
for item in value:
|
||||||
representChild(item, childTagStyle, c)
|
representChild(item, childTagStyle, c)
|
||||||
c.put(endSeqEvent())
|
c.put(endSeqEvent())
|
||||||
|
@ -515,7 +515,7 @@ proc representObject*[K, V](value: Table[K, V], ts: TagStyle,
|
||||||
c: SerializationContext, tag: TagId) =
|
c: SerializationContext, tag: TagId) =
|
||||||
## represents a Nim Table as YAML mapping
|
## represents a Nim Table as YAML mapping
|
||||||
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
||||||
c.put(startMapEvent(tag))
|
c.put(startMapEvent(tag = tag))
|
||||||
for key, value in value.pairs:
|
for key, value in value.pairs:
|
||||||
representChild(key, childTagStyle, c)
|
representChild(key, childTagStyle, c)
|
||||||
representChild(value, childTagStyle, c)
|
representChild(value, childTagStyle, c)
|
||||||
|
@ -559,7 +559,7 @@ proc constructObject*[K, V](s: var YamlStream, c: ConstructionContext,
|
||||||
proc representObject*[K, V](value: OrderedTable[K, V], ts: TagStyle,
|
proc representObject*[K, V](value: OrderedTable[K, V], ts: TagStyle,
|
||||||
c: SerializationContext, tag: TagId) =
|
c: SerializationContext, tag: TagId) =
|
||||||
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
||||||
c.put(startSeqEvent(tag))
|
c.put(startSeqEvent(tag = tag))
|
||||||
for key, value in value.pairs:
|
for key, value in value.pairs:
|
||||||
c.put(startMapEvent())
|
c.put(startMapEvent())
|
||||||
representChild(key, childTagStyle, c)
|
representChild(key, childTagStyle, c)
|
||||||
|
@ -692,13 +692,13 @@ proc markAsFound(i: int, matched: NimNode): NimNode {.compileTime.} =
|
||||||
|
|
||||||
proc ifNotTransient(o, field: NimNode,
|
proc ifNotTransient(o, field: NimNode,
|
||||||
content: openarray[NimNode],
|
content: openarray[NimNode],
|
||||||
elseError: bool, s: NimNode, tName, fName: string = ""):
|
elseError: bool, s: NimNode, m: NimNode, tName, fName: string = ""):
|
||||||
NimNode {.compileTime.} =
|
NimNode {.compileTime.} =
|
||||||
var stmts = newStmtList(content)
|
var stmts = newStmtList(content)
|
||||||
if elseError:
|
if elseError:
|
||||||
result = quote do:
|
result = quote do:
|
||||||
when `o`.`field`.hasCustomPragma(transient):
|
when `o`.`field`.hasCustomPragma(transient):
|
||||||
raise constructionError(`s`, "While constructing " & `tName` &
|
raise constructionError(`s`, `m`, "While constructing " & `tName` &
|
||||||
": Field \"" & `fName` & "\" is transient and may not occur in input")
|
": Field \"" & `fName` & "\" is transient and may not occur in input")
|
||||||
else:
|
else:
|
||||||
`stmts`
|
`stmts`
|
||||||
|
@ -804,12 +804,12 @@ macro constructFieldValue(t: typedesc, stream: untyped,
|
||||||
var ifStmt = newIfStmt((cond: discTest, body: newStmtList(
|
var ifStmt = newIfStmt((cond: discTest, body: newStmtList(
|
||||||
newCall("constructChild", stream, context, field))))
|
newCall("constructChild", stream, context, field))))
|
||||||
ifStmt.add(newNimNode(nnkElse).add(newNimNode(nnkRaiseStmt).add(
|
ifStmt.add(newNimNode(nnkElse).add(newNimNode(nnkRaiseStmt).add(
|
||||||
newCall(bindSym("constructionError"), stream,
|
newCall(bindSym("constructionError"), stream, m,
|
||||||
infix(newStrLitNode("Field " & $item & " not allowed for " &
|
infix(newStrLitNode("Field " & $item & " not allowed for " &
|
||||||
$child[0] & " == "), "&", prefix(discriminant, "$"))))))
|
$child[0] & " == "), "&", prefix(discriminant, "$"))))))
|
||||||
ob.add(ifNotTransient(o, item,
|
ob.add(ifNotTransient(o, item,
|
||||||
[checkDuplicate(stream, tName, $item, fieldIndex, matched, m),
|
[checkDuplicate(stream, tName, $item, fieldIndex, matched, m),
|
||||||
ifStmt, markAsFound(fieldIndex, matched)], true, stream, tName,
|
ifStmt, markAsFound(fieldIndex, matched)], true, stream, m, tName,
|
||||||
$item))
|
$item))
|
||||||
caseStmt.add(ob)
|
caseStmt.add(ob)
|
||||||
else:
|
else:
|
||||||
|
@ -819,7 +819,7 @@ macro constructFieldValue(t: typedesc, stream: untyped,
|
||||||
ob.add(ifNotTransient(o, child,
|
ob.add(ifNotTransient(o, child,
|
||||||
[checkDuplicate(stream, tName, $child, fieldIndex, matched, m),
|
[checkDuplicate(stream, tName, $child, fieldIndex, matched, m),
|
||||||
newCall("constructChild", stream, context, field),
|
newCall("constructChild", stream, context, field),
|
||||||
markAsFound(fieldIndex, matched)], true, stream, tName, $child))
|
markAsFound(fieldIndex, matched)], true, stream, m, tName, $child))
|
||||||
caseStmt.add(ob)
|
caseStmt.add(ob)
|
||||||
inc(fieldIndex)
|
inc(fieldIndex)
|
||||||
caseStmt.add(newNimNode(nnkElse).add(newNimNode(nnkWhenStmt).add(
|
caseStmt.add(newNimNode(nnkElse).add(newNimNode(nnkWhenStmt).add(
|
||||||
|
@ -942,9 +942,9 @@ macro genRepresentObject(t: typedesc, value, childTagStyle: typed) =
|
||||||
fieldName = $child[0]
|
fieldName = $child[0]
|
||||||
fieldAccessor = newDotExpr(value, newIdentNode(fieldName))
|
fieldAccessor = newDotExpr(value, newIdentNode(fieldName))
|
||||||
result.add(quote do:
|
result.add(quote do:
|
||||||
c.put(startMapEvent(yTagQuestionMark, yAnchorNone))
|
c.put(startMapEvent())
|
||||||
c.put(scalarEvent(`fieldName`, if `childTagStyle` == tsNone:
|
c.put(scalarEvent(`fieldName`, tag = if `childTagStyle` == tsNone:
|
||||||
yTagQuestionMark else: yTagNimField, yAnchorNone))
|
yTagQuestionMark else: yTagNimField))
|
||||||
representChild(`fieldAccessor`, `childTagStyle`, c)
|
representChild(`fieldAccessor`, `childTagStyle`, c)
|
||||||
c.put(endMapEvent())
|
c.put(endMapEvent())
|
||||||
)
|
)
|
||||||
|
@ -973,9 +973,9 @@ macro genRepresentObject(t: typedesc, value, childTagStyle: typed) =
|
||||||
itemAccessor = newDotExpr(value, newIdentNode(name))
|
itemAccessor = newDotExpr(value, newIdentNode(name))
|
||||||
curStmtList.add(quote do:
|
curStmtList.add(quote do:
|
||||||
when not `itemAccessor`.hasCustomPragma(transient):
|
when not `itemAccessor`.hasCustomPragma(transient):
|
||||||
c.put(startMapEvent(yTagQuestionMark, yAnchorNone))
|
c.put(startMapEvent())
|
||||||
c.put(scalarEvent(`name`, if `childTagStyle` == tsNone:
|
c.put(scalarEvent(`name`, tag = if `childTagStyle` == tsNone:
|
||||||
yTagQuestionMark else: yTagNimField, yAnchorNone))
|
yTagQuestionMark else: yTagNimField))
|
||||||
representChild(`itemAccessor`, `childTagStyle`, c)
|
representChild(`itemAccessor`, `childTagStyle`, c)
|
||||||
c.put(endMapEvent())
|
c.put(endMapEvent())
|
||||||
)
|
)
|
||||||
|
@ -990,7 +990,7 @@ macro genRepresentObject(t: typedesc, value, childTagStyle: typed) =
|
||||||
childAccessor = newDotExpr(value, newIdentNode(name))
|
childAccessor = newDotExpr(value, newIdentNode(name))
|
||||||
result.add(quote do:
|
result.add(quote do:
|
||||||
when not `childAccessor`.hasCustomPragma(transient):
|
when not `childAccessor`.hasCustomPragma(transient):
|
||||||
when bool(`isVO`): c.put(startMapEvent(yTagQuestionMark, yAnchorNone))
|
when bool(`isVO`): c.put(startMapEvent())
|
||||||
c.put(scalarEvent(`name`, if `childTagStyle` == tsNone:
|
c.put(scalarEvent(`name`, if `childTagStyle` == tsNone:
|
||||||
yTagQuestionMark else: yTagNimField, yAnchorNone))
|
yTagQuestionMark else: yTagNimField, yAnchorNone))
|
||||||
representChild(`childAccessor`, `childTagStyle`, c)
|
representChild(`childAccessor`, `childTagStyle`, c)
|
||||||
|
@ -1002,8 +1002,8 @@ proc representObject*[O: object](value: O, ts: TagStyle,
|
||||||
c: SerializationContext, tag: TagId) =
|
c: SerializationContext, tag: TagId) =
|
||||||
## represents a Nim object or tuple as YAML mapping
|
## represents a Nim object or tuple as YAML mapping
|
||||||
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
||||||
when isVariantObject(getType(O)): c.put(startSeqEvent(csBlock, (yAnchorNone, tag)))
|
when isVariantObject(getType(O)): c.put(startSeqEvent(tag = tag))
|
||||||
else: c.put(startMapEvent(csBlock, (yAnchorNone, tag)))
|
else: c.put(startMapEvent(tag = tag))
|
||||||
genRepresentObject(O, value, childTagStyle)
|
genRepresentObject(O, value, childTagStyle)
|
||||||
when isVariantObject(getType(O)): c.put(endSeqEvent())
|
when isVariantObject(getType(O)): c.put(endSeqEvent())
|
||||||
else: c.put(endMapEvent())
|
else: c.put(endMapEvent())
|
||||||
|
@ -1012,10 +1012,10 @@ proc representObject*[O: tuple](value: O, ts: TagStyle,
|
||||||
c: SerializationContext, tag: TagId) =
|
c: SerializationContext, tag: TagId) =
|
||||||
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
let childTagStyle = if ts == tsRootOnly: tsNone else: ts
|
||||||
var fieldIndex = 0'i16
|
var fieldIndex = 0'i16
|
||||||
c.put(startMapEvent(tag, yAnchorNone))
|
c.put(startMapEvent(tag = tag))
|
||||||
for name, fvalue in fieldPairs(value):
|
for name, fvalue in fieldPairs(value):
|
||||||
c.put(scalarEvent(name, if childTagStyle == tsNone:
|
c.put(scalarEvent(name, tag = if childTagStyle == tsNone:
|
||||||
yTagQuestionMark else: yTagNimField, yAnchorNone))
|
yTagQuestionMark else: yTagNimField))
|
||||||
representChild(fvalue, childTagStyle, c)
|
representChild(fvalue, childTagStyle, c)
|
||||||
inc(fieldIndex)
|
inc(fieldIndex)
|
||||||
c.put(endMapEvent())
|
c.put(endMapEvent())
|
||||||
|
@ -1041,7 +1041,7 @@ proc representObject*[O: enum](value: O, ts: TagStyle,
|
||||||
|
|
||||||
proc yamlTag*[O](T: typedesc[ref O]): TagId {.inline, raises: [].} = yamlTag(O)
|
proc yamlTag*[O](T: typedesc[ref O]): TagId {.inline, raises: [].} = yamlTag(O)
|
||||||
|
|
||||||
macro constructImplicitVariantObject(s, c, r, possibleTagIds: untyped,
|
macro constructImplicitVariantObject(s, m, c, r, possibleTagIds: untyped,
|
||||||
t: typedesc) =
|
t: typedesc) =
|
||||||
let tDesc = getType(getType(t)[1])
|
let tDesc = getType(getType(t)[1])
|
||||||
yAssert tDesc.kind == nnkObjectTy
|
yAssert tDesc.kind == nnkObjectTy
|
||||||
|
@ -1071,7 +1071,7 @@ macro constructImplicitVariantObject(s, c, r, possibleTagIds: untyped,
|
||||||
branch.add(branchContent)
|
branch.add(branchContent)
|
||||||
result.add(branch)
|
result.add(branch)
|
||||||
let raiseStmt = newNimNode(nnkRaiseStmt).add(
|
let raiseStmt = newNimNode(nnkRaiseStmt).add(
|
||||||
newCall(bindSym("constructionError"), s,
|
newCall(bindSym("constructionError"), s, m,
|
||||||
infix(newStrLitNode("This value type does not map to any field in " &
|
infix(newStrLitNode("This value type does not map to any field in " &
|
||||||
getTypeImpl(t)[1].repr & ": "), "&",
|
getTypeImpl(t)[1].repr & ": "), "&",
|
||||||
newCall("uri", newIdentNode("serializationTagLibrary"),
|
newCall("uri", newIdentNode("serializationTagLibrary"),
|
||||||
|
@ -1114,7 +1114,7 @@ proc constructChild*[T](s: var YamlStream, c: ConstructionContext,
|
||||||
var possibleTagIds = newSeq[TagId]()
|
var possibleTagIds = newSeq[TagId]()
|
||||||
case item.kind
|
case item.kind
|
||||||
of yamlScalar:
|
of yamlScalar:
|
||||||
case item.scalarTag
|
case item.scalarProperties.tag
|
||||||
of yTagQuestionMark:
|
of yTagQuestionMark:
|
||||||
case guessType(item.scalarContent)
|
case guessType(item.scalarContent)
|
||||||
of yTypeInteger:
|
of yTypeInteger:
|
||||||
|
@ -1137,19 +1137,19 @@ proc constructChild*[T](s: var YamlStream, c: ConstructionContext,
|
||||||
of yTagExclamationMark:
|
of yTagExclamationMark:
|
||||||
possibleTagIds.add(yamlTag(string))
|
possibleTagIds.add(yamlTag(string))
|
||||||
else:
|
else:
|
||||||
possibleTagIds.add(item.scalarTag)
|
possibleTagIds.add(item.scalarProperties.tag)
|
||||||
of yamlStartMap:
|
of yamlStartMap:
|
||||||
if item.mapTag in [yTagQuestionMark, yTagExclamationMark]:
|
if item.mapProperties.tag in [yTagQuestionMark, yTagExclamationMark]:
|
||||||
raise s.constructionError(item.startPos,
|
raise s.constructionError(item.startPos,
|
||||||
"Complex value of implicit variant object type must have a tag.")
|
"Complex value of implicit variant object type must have a tag.")
|
||||||
possibleTagIds.add(item.mapTag)
|
possibleTagIds.add(item.mapProperties.tag)
|
||||||
of yamlStartSeq:
|
of yamlStartSeq:
|
||||||
if item.seqTag in [yTagQuestionMark, yTagExclamationMark]:
|
if item.seqProperties.tag in [yTagQuestionMark, yTagExclamationMark]:
|
||||||
raise s.constructionError(item.startPos,
|
raise s.constructionError(item.startPos,
|
||||||
"Complex value of implicit variant object type must have a tag.")
|
"Complex value of implicit variant object type must have a tag.")
|
||||||
possibleTagIds.add(item.seqTag)
|
possibleTagIds.add(item.seqProperties.tag)
|
||||||
else: internalError("Unexpected item kind: " & $item.kind)
|
else: internalError("Unexpected item kind: " & $item.kind)
|
||||||
constructImplicitVariantObject(s, c, result, possibleTagIds, T)
|
constructImplicitVariantObject(s, item.startPos, c, result, possibleTagIds, T)
|
||||||
else:
|
else:
|
||||||
case item.kind
|
case item.kind
|
||||||
of yamlScalar:
|
of yamlScalar:
|
||||||
|
@ -1197,7 +1197,7 @@ proc constructChild*[T](s: var YamlStream, c: ConstructionContext,
|
||||||
## constructs an optional value. A value with a !!null tag will be loaded
|
## constructs an optional value. A value with a !!null tag will be loaded
|
||||||
## an empty value.
|
## an empty value.
|
||||||
let event = s.peek()
|
let event = s.peek()
|
||||||
if event.kind == yamlScalar and event.scalarTag == yTagNull:
|
if event.kind == yamlScalar and event.scalarProperties.tag == yTagNull:
|
||||||
result = none(T)
|
result = none(T)
|
||||||
discard s.next()
|
discard s.next()
|
||||||
else:
|
else:
|
||||||
|
@ -1250,9 +1250,9 @@ proc constructChild*[O](s: var YamlStream, c: ConstructionContext,
|
||||||
anchor = yAnchorNone
|
anchor = yAnchorNone
|
||||||
|
|
||||||
case e.kind
|
case e.kind
|
||||||
of yamlScalar: removeAnchor(e.scalarAnchor)
|
of yamlScalar: removeAnchor(e.scalarProperties.anchor)
|
||||||
of yamlStartMap: removeAnchor(e.mapAnchor)
|
of yamlStartMap: removeAnchor(e.mapProperties.anchor)
|
||||||
of yamlStartSeq: removeAnchor(e.seqAnchor)
|
of yamlStartSeq: removeAnchor(e.seqProperties.anchor)
|
||||||
else: internalError("Unexpected event kind: " & $e.kind)
|
else: internalError("Unexpected event kind: " & $e.kind)
|
||||||
s.peek = e
|
s.peek = e
|
||||||
try: constructChild(s, c, result[])
|
try: constructChild(s, c, result[])
|
||||||
|
@ -1297,17 +1297,17 @@ proc representChild*[O](value: ref O, ts: TagStyle, c: SerializationContext) =
|
||||||
if c.refs.hasKey(p):
|
if c.refs.hasKey(p):
|
||||||
val = c.refs.getOrDefault(p)
|
val = c.refs.getOrDefault(p)
|
||||||
if val == yAnchorNone:
|
if val == yAnchorNone:
|
||||||
val = c.nextAnchorId
|
val = c.nextAnchorId.Anchor
|
||||||
c.refs[p] = val
|
c.refs[p] = val
|
||||||
nextAnchor(c, len(c.nextAnchorId) - 1)
|
nextAnchor(c.nextAnchorId, len(c.nextAnchorId) - 1)
|
||||||
c.put(aliasEvent(val))
|
c.put(aliasEvent(val))
|
||||||
return
|
return
|
||||||
if c.style == asAlways:
|
if c.style == asAlways:
|
||||||
val = c.nextAnchorId
|
val = c.nextAnchorId.Anchor
|
||||||
when defined(JS):
|
when defined(JS):
|
||||||
{.emit: [c, ".refs.set(", p, ", ", val, ");"].}
|
{.emit: [c, ".refs.set(", p, ", ", val, ");"].}
|
||||||
else: c.refs[p] = val
|
else: c.refs[p] = val
|
||||||
nextAnchor(c, len(c.nextAnchorId) - 1)
|
nextAnchor(c.nextAnchorId, len(c.nextAnchorId) - 1)
|
||||||
else: c.refs[p] = yAnchorNone
|
else: c.refs[p] = yAnchorNone
|
||||||
let
|
let
|
||||||
a = if c.style == asAlways: val else: cast[Anchor](p)
|
a = if c.style == asAlways: val else: cast[Anchor](p)
|
||||||
|
@ -1317,15 +1317,15 @@ proc representChild*[O](value: ref O, ts: TagStyle, c: SerializationContext) =
|
||||||
var ex = e
|
var ex = e
|
||||||
case ex.kind
|
case ex.kind
|
||||||
of yamlStartMap:
|
of yamlStartMap:
|
||||||
ex.mapAnchor = a
|
ex.mapProperties.anchor = a
|
||||||
if ts == tsNone: ex.mapTag = yTagQuestionMark
|
if ts == tsNone: ex.mapProperties.tag = yTagQuestionMark
|
||||||
of yamlStartSeq:
|
of yamlStartSeq:
|
||||||
ex.seqAnchor = a
|
ex.seqProperties.anchor = a
|
||||||
if ts == tsNone: ex.seqTag = yTagQuestionMark
|
if ts == tsNone: ex.seqProperties.tag = yTagQuestionMark
|
||||||
of yamlScalar:
|
of yamlScalar:
|
||||||
ex.scalarAnchor = a
|
ex.scalarProperties.anchor = a
|
||||||
if ts == tsNone and guessType(ex.scalarContent) != yTypeNull:
|
if ts == tsNone and guessType(ex.scalarContent) != yTypeNull:
|
||||||
ex.scalarTag = yTagQuestionMark
|
ex.scalarProperties.tag = yTagQuestionMark
|
||||||
else: discard
|
else: discard
|
||||||
c.put = origPut
|
c.put = origPut
|
||||||
c.put(ex)
|
c.put(ex)
|
||||||
|
@ -1400,14 +1400,16 @@ proc loadMultiDoc*[K](input: Stream | string, target: var seq[K]) =
|
||||||
var parser: YamlParser
|
var parser: YamlParser
|
||||||
parser.init(serializationTagLibrary)
|
parser.init(serializationTagLibrary)
|
||||||
var events = parser.parse(input)
|
var events = parser.parse(input)
|
||||||
|
discard events.next() # stream start
|
||||||
try:
|
try:
|
||||||
while not events.finished():
|
while events.peek().kind == yamlStartDoc:
|
||||||
var item: K
|
var item: K
|
||||||
construct(events, item)
|
construct(events, item)
|
||||||
target.add(item)
|
target.add(item)
|
||||||
|
discard events.next() # stream end
|
||||||
except YamlConstructionError:
|
except YamlConstructionError:
|
||||||
var e = (ref YamlConstructionError)(getCurrentException())
|
var e = (ref YamlConstructionError)(getCurrentException())
|
||||||
discard events.getLastTokenContext(e.line, e.column, e.lineContent)
|
discard events.getLastTokenContext(e.lineContent)
|
||||||
raise e
|
raise e
|
||||||
except YamlStreamError:
|
except YamlStreamError:
|
||||||
let e = (ref YamlStreamError)(getCurrentException())
|
let e = (ref YamlStreamError)(getCurrentException())
|
||||||
|
@ -1430,9 +1432,11 @@ proc represent*[T](value: T, ts: TagStyle = tsRootOnly,
|
||||||
var context = newSerializationContext(a, proc(e: Event) =
|
var context = newSerializationContext(a, proc(e: Event) =
|
||||||
bys.put(e)
|
bys.put(e)
|
||||||
)
|
)
|
||||||
|
bys.put(startStreamEvent())
|
||||||
bys.put(startDocEvent())
|
bys.put(startDocEvent())
|
||||||
representChild(value, ts, context)
|
representChild(value, ts, context)
|
||||||
bys.put(endDocEvent())
|
bys.put(endDocEvent())
|
||||||
|
bys.put(endStreamEvent())
|
||||||
if a == asTidy:
|
if a == asTidy:
|
||||||
for item in bys.mitems():
|
for item in bys.mitems():
|
||||||
case item.kind
|
case item.kind
|
||||||
|
|
|
@ -31,7 +31,7 @@ type
|
||||||
## and is not required to check for it. The procs in this module will
|
## and is not required to check for it. The procs in this module will
|
||||||
## always yield a well-formed ``YamlStream`` and expect it to be
|
## always yield a well-formed ``YamlStream`` and expect it to be
|
||||||
## well-formed if they take it as input parameter.
|
## well-formed if they take it as input parameter.
|
||||||
nextImpl*: proc(s: YamlStream, e: var Event): bool
|
nextImpl*: proc(s: YamlStream, e: var Event): bool {.gcSafe.}
|
||||||
lastTokenContextImpl*:
|
lastTokenContextImpl*:
|
||||||
proc(s: YamlStream, lineContent: var string): bool {.raises: [].}
|
proc(s: YamlStream, lineContent: var string): bool {.raises: [].}
|
||||||
peeked: bool
|
peeked: bool
|
||||||
|
@ -55,15 +55,15 @@ proc basicInit*(s: YamlStream, lastTokenContextImpl:
|
||||||
|
|
||||||
when not defined(JS):
|
when not defined(JS):
|
||||||
type IteratorYamlStream = ref object of YamlStream
|
type IteratorYamlStream = ref object of YamlStream
|
||||||
backend: iterator(): Event
|
backend: iterator(): Event {.gcSafe.}
|
||||||
|
|
||||||
proc initYamlStream*(backend: iterator(): Event): YamlStream
|
proc initYamlStream*(backend: iterator(): Event {.gcSafe.}): YamlStream
|
||||||
{.raises: [].} =
|
{.raises: [].} =
|
||||||
## Creates a new ``YamlStream`` that uses the given iterator as backend.
|
## Creates a new ``YamlStream`` that uses the given iterator as backend.
|
||||||
result = new(IteratorYamlStream)
|
result = new(IteratorYamlStream)
|
||||||
result.basicInit()
|
result.basicInit()
|
||||||
IteratorYamlStream(result).backend = backend
|
IteratorYamlStream(result).backend = backend
|
||||||
result.nextImpl = proc(s: YamlStream, e: var Event): bool =
|
result.nextImpl = proc(s: YamlStream, e: var Event): bool {.gcSafe.} =
|
||||||
e = IteratorYamlStream(s).backend()
|
e = IteratorYamlStream(s).backend()
|
||||||
result = true
|
result = true
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ proc newBufferYamlStream*(): BufferYamlStream not nil =
|
||||||
proc put*(bys: BufferYamlStream, e: Event) {.raises: [].} =
|
proc put*(bys: BufferYamlStream, e: Event) {.raises: [].} =
|
||||||
bys.buf.add(e)
|
bys.buf.add(e)
|
||||||
|
|
||||||
proc next*(s: YamlStream): Event {.raises: [YamlStreamError].} =
|
proc next*(s: YamlStream): Event {.raises: [YamlStreamError], gcSafe.} =
|
||||||
## Get the next item of the stream. Requires ``finished(s) == true``.
|
## Get the next item of the stream. Requires ``finished(s) == true``.
|
||||||
## If the backend yields an exception, that exception will be encapsulated
|
## If the backend yields an exception, that exception will be encapsulated
|
||||||
## into a ``YamlStreamError``, which will be raised.
|
## into a ``YamlStreamError``, which will be raised.
|
||||||
|
|
Loading…
Reference in New Issue