mirror of https://github.com/status-im/NimYAML.git
fastparse: made it pass the parsing tests
* Scan for implicit map key as soon as a property is encountered * Proper handling of node properties throughout parser * Fixed an issue where plain scalars sometimes contained garbage when yielded * Catch reserved chars @ and ` * Fixed some test cases * Made parser test cases use fastparse
This commit is contained in:
parent
7f8afef3d4
commit
971218314e
|
@ -63,10 +63,13 @@ template yieldLevelEnd() {.dirty.} =
|
||||||
anchor = yAnchorNone
|
anchor = yAnchorNone
|
||||||
yield endMapEvent()
|
yield endMapEvent()
|
||||||
of fplScalar:
|
of fplScalar:
|
||||||
applyObjectProperties()
|
|
||||||
yield scalarEvent(content, tag, anchor)
|
yield scalarEvent(content, tag, anchor)
|
||||||
|
tag = yTagQuestionMark
|
||||||
|
anchor = yAnchorNone
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
yield scalarEvent("")
|
yield scalarEvent("", tag, anchor)
|
||||||
|
tag = yTagQuestionMark
|
||||||
|
anchor = yAnchorNone
|
||||||
|
|
||||||
template handleLineEnd(insideDocument: bool) {.dirty.} =
|
template handleLineEnd(insideDocument: bool) {.dirty.} =
|
||||||
case lexer.buf[lexer.bufpos]
|
case lexer.buf[lexer.bufpos]
|
||||||
|
@ -99,31 +102,24 @@ template handleObjectEnd(nextState: FastParseState) {.dirty.} =
|
||||||
of fplUnknown, fplScalar:
|
of fplUnknown, fplScalar:
|
||||||
assert(false)
|
assert(false)
|
||||||
|
|
||||||
template handleObjectStart(k: YamlStreamEventKind, isFlow: bool) {.dirty.} =
|
template handleObjectStart(k: YamlStreamEventKind) {.dirty.} =
|
||||||
assert(level.kind == fplUnknown)
|
assert(level.kind == fplUnknown)
|
||||||
when k == yamlStartMap:
|
when k == yamlStartMap:
|
||||||
when isFlow:
|
yield startMapEvent(tag, anchor)
|
||||||
yield startMapEvent(tag, anchor)
|
debug("started map at " & (if level.indentation == -1: $indentation else:
|
||||||
else:
|
$level.indentation))
|
||||||
yield startMapEvent(objectTag, objectAnchor)
|
|
||||||
debug("started map at " & $indentation)
|
|
||||||
level.kind = fplMapKey
|
level.kind = fplMapKey
|
||||||
else:
|
else:
|
||||||
when isFlow:
|
yield startSeqEvent(tag, anchor)
|
||||||
yield startSeqEvent(tag, anchor)
|
debug("started sequence at " & (if level.indentation == -1: $indentation else:
|
||||||
else:
|
$level.indentation))
|
||||||
yield startSeqEvent(objectTag, objectAnchor)
|
|
||||||
debug("started sequence at " & $indentation)
|
|
||||||
level.kind = fplSequence
|
level.kind = fplSequence
|
||||||
when isFlow:
|
tag = yTagQuestionmark
|
||||||
tag = yTagQuestionmark
|
anchor = yAnchorNone
|
||||||
anchor = yAnchorNone
|
if level.indentation == -1:
|
||||||
else:
|
level.indentation = indentation
|
||||||
objectTag = yTagQuestionmark
|
|
||||||
objectAnchor = yAnchorNone
|
|
||||||
level.indentation = indentation
|
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
|
|
||||||
template closeMoreIndentedLevels() {.dirty.} =
|
template closeMoreIndentedLevels() {.dirty.} =
|
||||||
while ancestry.len > 0:
|
while ancestry.len > 0:
|
||||||
|
@ -145,23 +141,22 @@ template closeEverything() {.dirty.} =
|
||||||
template handleBlockSequenceIndicator() {.dirty.} =
|
template handleBlockSequenceIndicator() {.dirty.} =
|
||||||
case level.kind
|
case level.kind
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
handleObjectStart(yamlStartSequence, false)
|
handleObjectStart(yamlStartSequence)
|
||||||
of fplSequence:
|
of fplSequence:
|
||||||
if level.indentation != indentation:
|
if level.indentation != indentation:
|
||||||
raiseError("Invalid indentation of block sequence indicator",
|
raiseError("Invalid indentation of block sequence indicator",
|
||||||
lexer.bufpos)
|
lexer.bufpos)
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
else:
|
else:
|
||||||
raiseError("Illegal sequence item in map")
|
raiseError("Illegal sequence item in map")
|
||||||
lexer.skipWhitespace()
|
lexer.skipWhitespace()
|
||||||
indentation = lexer.getColNumber(lexer.bufpos)
|
indentation = lexer.getColNumber(lexer.bufpos)
|
||||||
level.indentation = indentation
|
|
||||||
|
|
||||||
template handleMapKeyIndicator() {.dirty.} =
|
template handleMapKeyIndicator() {.dirty.} =
|
||||||
case level.kind
|
case level.kind
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
handleObjectStart(yamlStartMap, false)
|
handleObjectStart(yamlStartMap)
|
||||||
of fplMapValue:
|
of fplMapValue:
|
||||||
if level.indentation != indentation:
|
if level.indentation != indentation:
|
||||||
raiseError("Invalid indentation of map key indicator",
|
raiseError("Invalid indentation of map key indicator",
|
||||||
|
@ -169,26 +164,31 @@ template handleMapKeyIndicator() {.dirty.} =
|
||||||
yield scalarEvent("", yTagQuestionmark, yAnchorNone)
|
yield scalarEvent("", yTagQuestionmark, yAnchorNone)
|
||||||
level.kind = fplMapKey
|
level.kind = fplMapKey
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
of fplMapKey:
|
of fplMapKey:
|
||||||
if level.indentation != indentation:
|
if level.indentation != indentation:
|
||||||
raiseError("Invalid indentation of map key indicator",
|
raiseError("Invalid indentation of map key indicator",
|
||||||
lexer.bufpos)
|
lexer.bufpos)
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
of fplSequence:
|
of fplSequence:
|
||||||
raiseError("Unexpected map key indicator (expected '- ')")
|
raiseError("Unexpected map key indicator (expected '- ')")
|
||||||
of fplScalar:
|
of fplScalar:
|
||||||
raiseError("Unexpected map key indicator (expected multiline scalar end)")
|
raiseError("Unexpected map key indicator (expected multiline scalar end)")
|
||||||
lexer.skipWhitespace()
|
lexer.skipWhitespace()
|
||||||
indentation = lexer.getColNumber(lexer.bufpos)
|
indentation = lexer.getColNumber(lexer.bufpos)
|
||||||
level.indentation = indentation
|
|
||||||
|
|
||||||
template handleMapValueIndicator() {.dirty.} =
|
template handleMapValueIndicator() {.dirty.} =
|
||||||
case level.kind
|
case level.kind
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
handleObjectStart(yamlStartMap, false)
|
if level.indentation == -1:
|
||||||
yield scalarEvent("", yTagQuestionmark, yAnchorNone)
|
handleObjectStart(yamlStartMap)
|
||||||
|
yield scalarEvent("", yTagQuestionmark, yAnchorNone)
|
||||||
|
else:
|
||||||
|
yield scalarEvent("", tag, anchor)
|
||||||
|
tag = yTagQuestionmark
|
||||||
|
anchor = yAnchorNone
|
||||||
|
ancestry[ancestry.high].kind = fplMapValue
|
||||||
of fplMapKey:
|
of fplMapKey:
|
||||||
if level.indentation != indentation:
|
if level.indentation != indentation:
|
||||||
raiseError("Invalid indentation of map key indicator",
|
raiseError("Invalid indentation of map key indicator",
|
||||||
|
@ -196,32 +196,19 @@ template handleMapValueIndicator() {.dirty.} =
|
||||||
yield scalarEvent("", yTagQuestionmark, yAnchorNone)
|
yield scalarEvent("", yTagQuestionmark, yAnchorNone)
|
||||||
level.kind = fplMapValue
|
level.kind = fplMapValue
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
of fplMapValue:
|
of fplMapValue:
|
||||||
if level.indentation != indentation:
|
if level.indentation != indentation:
|
||||||
raiseError("Invalid indentation of map key indicator",
|
raiseError("Invalid indentation of map key indicator",
|
||||||
lexer.bufpos)
|
lexer.bufpos)
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
of fplSequence:
|
of fplSequence:
|
||||||
raiseError("Unexpected map value indicator (expected '- ')")
|
raiseError("Unexpected map value indicator (expected '- ')")
|
||||||
of fplScalar:
|
of fplScalar:
|
||||||
raiseError("Unexpected map value indicator (expected multiline scalar end)")
|
raiseError("Unexpected map value indicator (expected multiline scalar end)")
|
||||||
lexer.skipWhitespace()
|
lexer.skipWhitespace()
|
||||||
indentation = lexer.getColNumber(lexer.bufpos)
|
indentation = lexer.getColNumber(lexer.bufpos)
|
||||||
level.indentation = indentation
|
|
||||||
|
|
||||||
template propsToObjectProps() {.dirty.} =
|
|
||||||
if objectTag == yTagQuestionmark:
|
|
||||||
objectTag = tag
|
|
||||||
tag = yTagQuestionmark
|
|
||||||
elif tag != yTagQuestionMark:
|
|
||||||
raiseError("Only one tag is allowed per node")
|
|
||||||
if objectAnchor == yAnchorNone:
|
|
||||||
objectAnchor = anchor
|
|
||||||
anchor = yAnchorNone
|
|
||||||
elif anchor != yAnchorNone:
|
|
||||||
raiseError("Only one anchor is allowed per node")
|
|
||||||
|
|
||||||
template initDocValues() {.dirty.} =
|
template initDocValues() {.dirty.} =
|
||||||
shorthands = initTable[string, string]()
|
shorthands = initTable[string, string]()
|
||||||
|
@ -229,26 +216,9 @@ template initDocValues() {.dirty.} =
|
||||||
shorthands["!"] = "!"
|
shorthands["!"] = "!"
|
||||||
shorthands["!!"] = "tag:yaml.org,2002:"
|
shorthands["!!"] = "tag:yaml.org,2002:"
|
||||||
nextAnchorId = 0.AnchorId
|
nextAnchorId = 0.AnchorId
|
||||||
level = FastParseLevel(kind: fplUnknown, indentation: 0)
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
tag = yTagQuestionmark
|
tag = yTagQuestionmark
|
||||||
objectTag = yTagQuestionmark
|
|
||||||
anchor = yAnchorNone
|
anchor = yAnchorNone
|
||||||
objectAnchor = yAnchorNone
|
|
||||||
|
|
||||||
template applyObjectProperties() {.dirty.} =
|
|
||||||
if objectTag != yTagQuestionmark:
|
|
||||||
if tag != yTagQuestionmark:
|
|
||||||
debug("tag = " & $tag & ", object = " & $objectTag)
|
|
||||||
raiseError("Only one tag is allowed per node")
|
|
||||||
else:
|
|
||||||
tag = objectTag
|
|
||||||
objectTag = yTagQuestionmark
|
|
||||||
if objectAnchor != yAnchorNone:
|
|
||||||
if anchor != yAnchorNone:
|
|
||||||
raiseError("Only one anchor is allowed per node")
|
|
||||||
else:
|
|
||||||
anchor = objectAnchor
|
|
||||||
objectAnchor = yAnchorNone
|
|
||||||
|
|
||||||
template handleTagHandle() {.dirty.} =
|
template handleTagHandle() {.dirty.} =
|
||||||
if level.kind != fplUnknown:
|
if level.kind != fplUnknown:
|
||||||
|
@ -309,43 +279,47 @@ template leaveFlowLevel() {.dirty.} =
|
||||||
handleObjectEnd(fpFlowAfterObject)
|
handleObjectEnd(fpFlowAfterObject)
|
||||||
|
|
||||||
template handlePossibleMapStart() {.dirty.} =
|
template handlePossibleMapStart() {.dirty.} =
|
||||||
var flowDepth = 0
|
if level.indentation == -1:
|
||||||
for p in countup(lexer.bufpos, lexer.bufpos + 1024):
|
var flowDepth = 0
|
||||||
case lexer.buf[p]
|
for p in countup(lexer.bufpos, lexer.bufpos + 1024):
|
||||||
of ':':
|
case lexer.buf[p]
|
||||||
if flowDepth == 0 and lexer.buf[p + 1] in spaceOrLineEnd:
|
of ':':
|
||||||
handleObjectStart(yamlStartMap, false)
|
if flowDepth == 0 and lexer.buf[p + 1] in spaceOrLineEnd:
|
||||||
|
handleObjectStart(yamlStartMap)
|
||||||
|
break
|
||||||
|
of lineEnd:
|
||||||
break
|
break
|
||||||
of lineEnd:
|
of '[', '{':
|
||||||
break
|
flowDepth.inc()
|
||||||
of '[', '{':
|
of '}', ']':
|
||||||
flowDepth.inc()
|
flowDepth.inc(-1)
|
||||||
of '}', ']':
|
of '?':
|
||||||
flowDepth.inc(-1)
|
if flowDepth == 0: break
|
||||||
of '?':
|
of '#':
|
||||||
if flowDepth == 0: break
|
if lexer.buf[p - 1] in space:
|
||||||
of '#':
|
break
|
||||||
if lexer.buf[p - 1] in space: break
|
else:
|
||||||
else:
|
discard
|
||||||
discard
|
if level.indentation == -1:
|
||||||
|
level.indentation = indentation
|
||||||
|
|
||||||
template handleBlockItemStart() {.dirty.} =
|
template handleBlockItemStart() {.dirty.} =
|
||||||
case level.kind
|
case level.kind
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
discard
|
handlePossibleMapStart()
|
||||||
of fplSequence:
|
of fplSequence:
|
||||||
raiseError("Unexpected token (expected block sequence indicator)",
|
raiseError("Unexpected token (expected block sequence indicator)",
|
||||||
lexer.bufpos)
|
lexer.bufpos)
|
||||||
of fplMapKey:
|
of fplMapKey:
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: indentation)
|
||||||
of fplMapValue:
|
of fplMapValue:
|
||||||
yield scalarEvent("", tag, anchor)
|
yield scalarEvent("", tag, anchor)
|
||||||
tag = yTagQuestionmark
|
tag = yTagQuestionmark
|
||||||
anchor = yAnchorNone
|
anchor = yAnchorNone
|
||||||
level.kind = fplMapKey
|
level.kind = fplMapKey
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: indentation)
|
||||||
of fplScalar:
|
of fplScalar:
|
||||||
assert(false)
|
assert(false)
|
||||||
|
|
||||||
|
@ -815,10 +789,8 @@ template anchorName(lexer: FastLexer, content: var string) =
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
let c = lexer.buf[lexer.bufpos]
|
let c = lexer.buf[lexer.bufpos]
|
||||||
case c
|
case c
|
||||||
of spaceOrLineEnd:
|
of spaceOrLineEnd, '[', ']', '{', '}', ',':
|
||||||
break
|
break
|
||||||
of '[', ']', '{', '}', ',':
|
|
||||||
raiseError("Illegal character in anchor", lexer.bufpos)
|
|
||||||
else:
|
else:
|
||||||
content.add(c)
|
content.add(c)
|
||||||
|
|
||||||
|
@ -998,8 +970,8 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
anchors: Table[string, AnchorId]
|
anchors: Table[string, AnchorId]
|
||||||
nextAnchorId: AnchorId
|
nextAnchorId: AnchorId
|
||||||
content: string
|
content: string
|
||||||
tag, objectTag: TagId
|
tag: TagId
|
||||||
anchor, objectAnchor: AnchorId
|
anchor: AnchorId
|
||||||
ancestry = newSeq[FastParseLevel]()
|
ancestry = newSeq[FastParseLevel]()
|
||||||
level: FastParseLevel
|
level: FastParseLevel
|
||||||
indentation: int
|
indentation: int
|
||||||
|
@ -1040,12 +1012,30 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
lexer.finishLine()
|
lexer.finishLine()
|
||||||
handleLineEnd(false)
|
handleLineEnd(false)
|
||||||
of ' ', '\t':
|
of ' ', '\t':
|
||||||
lexer.bufpos.inc()
|
while true:
|
||||||
|
lexer.bufpos.inc()
|
||||||
|
case lexer.buf[lexer.bufpos]
|
||||||
|
of ' ', '\t':
|
||||||
|
discard
|
||||||
|
of '\x0A':
|
||||||
|
lexer.bufpos = lexer.handleLF(lexer.bufpos)
|
||||||
|
break
|
||||||
|
of '\c':
|
||||||
|
lexer.bufpos = lexer.handleCR(lexer.bufpos)
|
||||||
|
break
|
||||||
|
of '#', EndOfFile:
|
||||||
|
lexer.lineEnding()
|
||||||
|
handleLineEnd(false)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
indentation = lexer.getColNumber(lexer.bufpos)
|
||||||
|
yield startDocEvent()
|
||||||
|
state = fpBlockObjectStart
|
||||||
|
break
|
||||||
of '\x0A':
|
of '\x0A':
|
||||||
lexer.bufpos = lexer.handleLF(lexer.bufpos)
|
lexer.bufpos = lexer.handleLF(lexer.bufpos)
|
||||||
of '\c':
|
of '\c':
|
||||||
lexer.bufpos = lexer.handleCR(lexer.bufpos)
|
lexer.bufpos = lexer.handleCR(lexer.bufpos)
|
||||||
lexer.bufpos.inc()
|
|
||||||
of EndOfFile:
|
of EndOfFile:
|
||||||
return
|
return
|
||||||
of '#':
|
of '#':
|
||||||
|
@ -1099,7 +1089,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
else:
|
else:
|
||||||
ensureCorrectIndentation()
|
ensureCorrectIndentation()
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
content = ""
|
content = ""
|
||||||
lexer.plainScalar(content, cBlockOut)
|
lexer.plainScalar(content, cBlockOut)
|
||||||
state = fpBlockAfterPlainScalar
|
state = fpBlockAfterPlainScalar
|
||||||
|
@ -1124,7 +1114,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
else:
|
else:
|
||||||
ensureCorrectIndentation()
|
ensureCorrectIndentation()
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
content = ""
|
content = ""
|
||||||
lexer.plainScalar(content, cBlockOut)
|
lexer.plainScalar(content, cBlockOut)
|
||||||
state = fpBlockAfterPlainScalar
|
state = fpBlockAfterPlainScalar
|
||||||
|
@ -1140,9 +1130,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of fplScalar:
|
of fplScalar:
|
||||||
state = fpBlockContinueScalar
|
state = fpBlockContinueScalar
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
handlePossibleMapStart()
|
|
||||||
state = fpBlockObjectStart
|
state = fpBlockObjectStart
|
||||||
level.indentation = indentation
|
|
||||||
else:
|
else:
|
||||||
ensureCorrectIndentation()
|
ensureCorrectIndentation()
|
||||||
state = fpBlockObjectStart
|
state = fpBlockObjectStart
|
||||||
|
@ -1153,9 +1141,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of fplScalar:
|
of fplScalar:
|
||||||
state = fpBlockContinueScalar
|
state = fpBlockContinueScalar
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
handlePossibleMapStart()
|
|
||||||
state = fpBlockObjectStart
|
state = fpBlockObjectStart
|
||||||
level.indentation = indentation
|
|
||||||
else:
|
else:
|
||||||
ensureCorrectIndentation()
|
ensureCorrectIndentation()
|
||||||
state = fpBlockObjectStart
|
state = fpBlockObjectStart
|
||||||
|
@ -1219,16 +1205,16 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of ':':
|
of ':':
|
||||||
case level.kind
|
case level.kind
|
||||||
of fplUnknown:
|
of fplUnknown:
|
||||||
handleObjectStart(yamlStartMap, false)
|
handleObjectStart(yamlStartMap)
|
||||||
of fplMapKey:
|
of fplMapKey:
|
||||||
yield scalarEvent("", yTagQuestionMark, yAnchorNone)
|
yield scalarEvent("", yTagQuestionMark, yAnchorNone)
|
||||||
level.kind = fplMapValue
|
level.kind = fplMapValue
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
of fplMapValue:
|
of fplMapValue:
|
||||||
level.kind = fplMapValue
|
level.kind = fplMapValue
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level.kind = fplUnknown
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
of fplSequence:
|
of fplSequence:
|
||||||
raiseError("Illegal token (expected sequence item)")
|
raiseError("Illegal token (expected sequence item)")
|
||||||
of fplScalar:
|
of fplScalar:
|
||||||
|
@ -1236,10 +1222,8 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
lexer.skipWhitespace()
|
lexer.skipWhitespace()
|
||||||
indentation = lexer.getColNumber(lexer.bufpos)
|
indentation = lexer.getColNumber(lexer.bufpos)
|
||||||
level.indentation = indentation
|
|
||||||
state = fpBlockObjectStart
|
state = fpBlockObjectStart
|
||||||
of '#':
|
of '#':
|
||||||
applyObjectProperties()
|
|
||||||
lexer.lineEnding()
|
lexer.lineEnding()
|
||||||
handleLineEnd(true)
|
handleLineEnd(true)
|
||||||
handleObjectEnd(fpBlockLineStart)
|
handleObjectEnd(fpBlockLineStart)
|
||||||
|
@ -1253,13 +1237,13 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
let objectStart = lexer.getColNumber(lexer.bufpos)
|
let objectStart = lexer.getColNumber(lexer.bufpos)
|
||||||
case lexer.buf[lexer.bufpos]
|
case lexer.buf[lexer.bufpos]
|
||||||
of '\x0A':
|
of '\x0A':
|
||||||
propsToObjectProps()
|
|
||||||
lexer.bufpos = lexer.handleLF(lexer.bufpos)
|
lexer.bufpos = lexer.handleLF(lexer.bufpos)
|
||||||
state = fpBlockLineStart
|
state = fpBlockLineStart
|
||||||
|
level.indentation = -1
|
||||||
of '\c':
|
of '\c':
|
||||||
propsToObjectProps()
|
|
||||||
lexer.bufpos = lexer.handleCR(lexer.bufpos)
|
lexer.bufpos = lexer.handleCR(lexer.bufpos)
|
||||||
state = fpBlockLineStart
|
state = fpBlockLineStart
|
||||||
|
level.indentation = -1
|
||||||
of EndOfFile:
|
of EndOfFile:
|
||||||
closeEverything()
|
closeEverything()
|
||||||
return
|
return
|
||||||
|
@ -1283,8 +1267,11 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
yield scalarEvent(content, tag, anchor)
|
yield scalarEvent(content, tag, anchor)
|
||||||
handleObjectEnd(fpBlockAfterObject)
|
handleObjectEnd(fpBlockAfterObject)
|
||||||
of '|', '>':
|
of '|', '>':
|
||||||
|
# TODO: this will scan for possible map start, which is not
|
||||||
|
# neccessary in this case
|
||||||
handleBlockItemStart()
|
handleBlockItemStart()
|
||||||
var stateAfter: FastParseState
|
var stateAfter: FastParseState
|
||||||
|
content = ""
|
||||||
lexer.blockScalar(content, stateAfter)
|
lexer.blockScalar(content, stateAfter)
|
||||||
if tag == yTagQuestionmark:
|
if tag == yTagQuestionmark:
|
||||||
tag = yTagExclamationmark
|
tag = yTagExclamationmark
|
||||||
|
@ -1293,6 +1280,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of '-':
|
of '-':
|
||||||
if lexer.isPlainSafe(lexer.bufpos + 1, cBlockOut):
|
if lexer.isPlainSafe(lexer.bufpos + 1, cBlockOut):
|
||||||
handleBlockItemStart()
|
handleBlockItemStart()
|
||||||
|
content = ""
|
||||||
lexer.tokenstart = lexer.getColNumber(lexer.bufpos)
|
lexer.tokenstart = lexer.getColNumber(lexer.bufpos)
|
||||||
lexer.plainScalar(content, cBlockOut)
|
lexer.plainScalar(content, cBlockOut)
|
||||||
state = fpBlockAfterPlainScalar
|
state = fpBlockAfterPlainScalar
|
||||||
|
@ -1310,11 +1298,11 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
handleAlias()
|
handleAlias()
|
||||||
of '[', '{':
|
of '[', '{':
|
||||||
handleBlockItemStart()
|
handleBlockItemStart()
|
||||||
applyObjectProperties()
|
|
||||||
state = fpFlow
|
state = fpFlow
|
||||||
of '?':
|
of '?':
|
||||||
if lexer.isPlainSafe(lexer.bufpos + 1, cBlockOut):
|
if lexer.isPlainSafe(lexer.bufpos + 1, cBlockOut):
|
||||||
handleBlockItemStart()
|
handleBlockItemStart()
|
||||||
|
content = ""
|
||||||
lexer.tokenstart = lexer.getColNumber(lexer.bufpos)
|
lexer.tokenstart = lexer.getColNumber(lexer.bufpos)
|
||||||
lexer.plainScalar(content, cBlockOut)
|
lexer.plainScalar(content, cBlockOut)
|
||||||
state = fpBlockAfterPlainScalar
|
state = fpBlockAfterPlainScalar
|
||||||
|
@ -1324,12 +1312,16 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of ':':
|
of ':':
|
||||||
if lexer.isPlainSafe(lexer.bufpos + 1, cBlockOut):
|
if lexer.isPlainSafe(lexer.bufpos + 1, cBlockOut):
|
||||||
handleBlockItemStart()
|
handleBlockItemStart()
|
||||||
|
content = ""
|
||||||
lexer.tokenstart = lexer.getColNumber(lexer.bufpos)
|
lexer.tokenstart = lexer.getColNumber(lexer.bufpos)
|
||||||
lexer.plainScalar(content, cBlockOut)
|
lexer.plainScalar(content, cBlockOut)
|
||||||
state = fpBlockAfterPlainScalar
|
state = fpBlockAfterPlainScalar
|
||||||
else:
|
else:
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
handleMapValueIndicator()
|
handleMapValueIndicator()
|
||||||
|
of '@', '`':
|
||||||
|
raiseError("Reserved characters cannot start a plain scalar",
|
||||||
|
lexer.bufpos)
|
||||||
else:
|
else:
|
||||||
handleBlockItemStart()
|
handleBlockItemStart()
|
||||||
content = ""
|
content = ""
|
||||||
|
@ -1376,12 +1368,12 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
lexer.skipWhitespaceAndNewlines()
|
lexer.skipWhitespaceAndNewlines()
|
||||||
case lexer.buf[lexer.bufpos]
|
case lexer.buf[lexer.bufpos]
|
||||||
of '{':
|
of '{':
|
||||||
handleObjectStart(yamlStartMap, true)
|
handleObjectStart(yamlStartMap)
|
||||||
flowdepth.inc()
|
flowdepth.inc()
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
explicitFlowKey = false
|
explicitFlowKey = false
|
||||||
of '[':
|
of '[':
|
||||||
handleObjectStart(yamlStartSequence, true)
|
handleObjectStart(yamlStartSequence)
|
||||||
flowdepth.inc()
|
flowdepth.inc()
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
of '}':
|
of '}':
|
||||||
|
@ -1444,7 +1436,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of fplUnknown, fplScalar:
|
of fplUnknown, fplScalar:
|
||||||
assert(false)
|
assert(false)
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level = FastParseLevel(kind: fplUnknown)
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
of ':':
|
of ':':
|
||||||
assert(level.kind == fplUnknown)
|
assert(level.kind == fplUnknown)
|
||||||
|
@ -1461,7 +1453,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of fplUnknown, fplScalar:
|
of fplUnknown, fplScalar:
|
||||||
assert(false)
|
assert(false)
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level = FastParseLevel(kind: fplUnknown)
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
else:
|
else:
|
||||||
handleFlowPlainScalar()
|
handleFlowPlainScalar()
|
||||||
|
@ -1533,7 +1525,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of fplUnknown, fplScalar:
|
of fplUnknown, fplScalar:
|
||||||
assert(false)
|
assert(false)
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level = FastParseLevel(kind: fplUnknown)
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
state = fpFlow
|
state = fpFlow
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
of ':':
|
of ':':
|
||||||
|
@ -1545,7 +1537,7 @@ proc fastparse*(tagLib: TagLibrary, s: Stream): YamlStream =
|
||||||
of fplUnknown, fplScalar:
|
of fplUnknown, fplScalar:
|
||||||
assert(false)
|
assert(false)
|
||||||
ancestry.add(level)
|
ancestry.add(level)
|
||||||
level = FastParseLevel(kind: fplUnknown)
|
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||||
state = fpFlow
|
state = fpFlow
|
||||||
lexer.bufpos.inc()
|
lexer.bufpos.inc()
|
||||||
of '#':
|
of '#':
|
||||||
|
|
310
test/parsing.nim
310
test/parsing.nim
|
@ -2,39 +2,6 @@ import "../yaml"
|
||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
proc startDoc(): YamlStreamEvent =
|
|
||||||
result.kind = yamlStartDocument
|
|
||||||
|
|
||||||
proc endDoc(): YamlStreamEvent =
|
|
||||||
result.kind = yamlEndDocument
|
|
||||||
|
|
||||||
proc scalar(content: string,
|
|
||||||
tag: TagId = yTagQuestionMark, anchor: AnchorId = yAnchorNone):
|
|
||||||
YamlStreamEvent = scalarEvent(content, tag, anchor)
|
|
||||||
|
|
||||||
proc startSequence(tag: TagId = yTagQuestionMark,
|
|
||||||
anchor: AnchorId = yAnchorNone):
|
|
||||||
YamlStreamEvent =
|
|
||||||
result.kind = yamlStartSequence
|
|
||||||
result.seqAnchor = anchor
|
|
||||||
result.seqTag = tag
|
|
||||||
|
|
||||||
proc endSequence(): YamlStreamEvent =
|
|
||||||
result.kind = yamlEndSequence
|
|
||||||
|
|
||||||
proc startMap(tag: TagId = yTagQuestionMark, anchor: AnchorId = yAnchorNone):
|
|
||||||
YamlStreamEvent =
|
|
||||||
result.kind = yamlStartMap
|
|
||||||
result.mapAnchor = anchor
|
|
||||||
result.mapTag = tag
|
|
||||||
|
|
||||||
proc endMap(): YamlStreamEvent =
|
|
||||||
result.kind = yamlEndMap
|
|
||||||
|
|
||||||
proc alias(target: AnchorId): YamlStreamEvent =
|
|
||||||
result.kind = yamlAlias
|
|
||||||
result.aliasTarget = target
|
|
||||||
|
|
||||||
proc printDifference(expected, actual: YamlStreamEvent) =
|
proc printDifference(expected, actual: YamlStreamEvent) =
|
||||||
if expected.kind != actual.kind:
|
if expected.kind != actual.kind:
|
||||||
echo "expected " & $expected.kind & ", got " & $actual.kind
|
echo "expected " & $expected.kind & ", got " & $actual.kind
|
||||||
|
@ -45,10 +12,10 @@ proc printDifference(expected, actual: YamlStreamEvent) =
|
||||||
echo "[\"", actual.scalarContent, "\".tag] expected tag ",
|
echo "[\"", actual.scalarContent, "\".tag] expected tag ",
|
||||||
expected.scalarTag, ", got ", actual.scalarTag
|
expected.scalarTag, ", got ", actual.scalarTag
|
||||||
elif expected.scalarAnchor != actual.scalarAnchor:
|
elif expected.scalarAnchor != actual.scalarAnchor:
|
||||||
echo "[scalar] expected anchor ", expected.scalarAnchor,
|
echo "[scalarEvent] expected anchor ", expected.scalarAnchor,
|
||||||
", got ", actual.scalarAnchor
|
", got ", actual.scalarAnchor
|
||||||
elif expected.scalarContent != actual.scalarContent:
|
elif expected.scalarContent != actual.scalarContent:
|
||||||
let msg = "[scalar] expected content \"" &
|
let msg = "[scalarEvent] expected content \"" &
|
||||||
expected.scalarContent & "\", got \"" &
|
expected.scalarContent & "\", got \"" &
|
||||||
actual.scalarContent & "\" "
|
actual.scalarContent & "\" "
|
||||||
if expected.scalarContent.len != actual.scalarContent.len:
|
if expected.scalarContent.len != actual.scalarContent.len:
|
||||||
|
@ -63,7 +30,7 @@ proc printDifference(expected, actual: YamlStreamEvent) =
|
||||||
cast[int](actual.scalarContent[i]), ")"
|
cast[int](actual.scalarContent[i]), ")"
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
echo "[scalar] Unknown difference"
|
echo "[scalarEvent] Unknown difference"
|
||||||
of yamlStartMap:
|
of yamlStartMap:
|
||||||
if expected.mapTag != actual.mapTag:
|
if expected.mapTag != actual.mapTag:
|
||||||
echo "[map.tag] expected ", expected.mapTag, ", got ",
|
echo "[map.tag] expected ", expected.mapTag, ", got ",
|
||||||
|
@ -85,9 +52,8 @@ proc printDifference(expected, actual: YamlStreamEvent) =
|
||||||
|
|
||||||
template ensure(input: string, expected: varargs[YamlStreamEvent]) {.dirty.} =
|
template ensure(input: string, expected: varargs[YamlStreamEvent]) {.dirty.} =
|
||||||
var
|
var
|
||||||
parser = newParser(tagLib)
|
|
||||||
i = 0
|
i = 0
|
||||||
events = parser.parse(newStringStream(input))
|
events = fastparse(tagLib, newStringStream(input))
|
||||||
try:
|
try:
|
||||||
for token in events():
|
for token in events():
|
||||||
if i >= expected.len:
|
if i >= expected.len:
|
||||||
|
@ -113,183 +79,183 @@ suite "Parsing":
|
||||||
teardown:
|
teardown:
|
||||||
discard
|
discard
|
||||||
|
|
||||||
test "Parsing: Simple Scalar":
|
test "Parsing: Simple scalarEvent":
|
||||||
ensure("Scalar", startDoc(), scalar("Scalar"), endDoc())
|
ensure("scalarEvent", startDocEvent(), scalarEvent("scalarEvent"), endDocEvent())
|
||||||
test "Parsing: Simple Sequence":
|
test "Parsing: Simple Sequence":
|
||||||
ensure("- off", startDoc(), startSequence(),
|
ensure("- off", startDocEvent(), startSeqEvent(),
|
||||||
scalar("off"), endSequence(), endDoc())
|
scalarEvent("off"), endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: Simple Map":
|
test "Parsing: Simple Map":
|
||||||
ensure("42: value\nkey2: -7.5", startDoc(), startMap(),
|
ensure("42: value\nkey2: -7.5", startDocEvent(), startMapEvent(),
|
||||||
scalar("42"), scalar("value"), scalar("key2"),
|
scalarEvent("42"), scalarEvent("value"), scalarEvent("key2"),
|
||||||
scalar("-7.5"), endMap(), endDoc())
|
scalarEvent("-7.5"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Explicit Map":
|
test "Parsing: Explicit Map":
|
||||||
ensure("? null\n: value\n? ON\n: value2", startDoc(), startMap(),
|
ensure("? null\n: value\n? ON\n: value2", startDocEvent(), startMapEvent(),
|
||||||
scalar("null"), scalar("value"),
|
scalarEvent("null"), scalarEvent("value"),
|
||||||
scalar("ON"), scalar("value2"),
|
scalarEvent("ON"), scalarEvent("value2"),
|
||||||
endMap(), endDoc())
|
endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Mixed Map (explicit to implicit)":
|
test "Parsing: Mixed Map (explicit to implicit)":
|
||||||
ensure("? a\n: 13\n1.5: d", startDoc(), startMap(), scalar("a"),
|
ensure("? a\n: 13\n1.5: d", startDocEvent(), startMapEvent(), scalarEvent("a"),
|
||||||
scalar("13"), scalar("1.5"),
|
scalarEvent("13"), scalarEvent("1.5"),
|
||||||
scalar("d"), endMap(), endDoc())
|
scalarEvent("d"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Mixed Map (implicit to explicit)":
|
test "Parsing: Mixed Map (implicit to explicit)":
|
||||||
ensure("a: 4.2\n? 23\n: d", startDoc(), startMap(), scalar("a"),
|
ensure("a: 4.2\n? 23\n: d", startDocEvent(), startMapEvent(), scalarEvent("a"),
|
||||||
scalar("4.2"), scalar("23"),
|
scalarEvent("4.2"), scalarEvent("23"),
|
||||||
scalar("d"), endMap(), endDoc())
|
scalarEvent("d"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Missing values in map":
|
test "Parsing: Missing values in map":
|
||||||
ensure("? a\n? b\nc:", startDoc(), startMap(), scalar("a"), scalar(""),
|
ensure("? a\n? b\nc:", startDocEvent(), startMapEvent(), scalarEvent("a"), scalarEvent(""),
|
||||||
scalar("b"), scalar(""), scalar("c"), scalar(""), endMap(),
|
scalarEvent("b"), scalarEvent(""), scalarEvent("c"), scalarEvent(""), endMapEvent(),
|
||||||
endDoc())
|
endDocEvent())
|
||||||
test "Parsing: Missing keys in map":
|
test "Parsing: Missing keys in map":
|
||||||
ensure(": a\n: b", startDoc(), startMap(), scalar(""), scalar("a"),
|
ensure(": a\n: b", startDocEvent(), startMapEvent(), scalarEvent(""), scalarEvent("a"),
|
||||||
scalar(""), scalar("b"), endMap(), endDoc())
|
scalarEvent(""), scalarEvent("b"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Multiline scalars in explicit map":
|
test "Parsing: Multiline scalarEvents in explicit map":
|
||||||
ensure("? a\n true\n: null\n d\n? e\n 42", startDoc(), startMap(),
|
ensure("? a\n true\n: null\n d\n? e\n 42", startDocEvent(), startMapEvent(),
|
||||||
scalar("a true"), scalar("null d"), scalar("e 42"), scalar(""),
|
scalarEvent("a true"), scalarEvent("null d"), scalarEvent("e 42"), scalarEvent(""),
|
||||||
endMap(), endDoc())
|
endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Map in Sequence":
|
test "Parsing: Map in Sequence":
|
||||||
ensure(" - key: value\n key2: value2\n -\n key3: value3",
|
ensure(" - key: value\n key2: value2\n -\n key3: value3",
|
||||||
startDoc(), startSequence(), startMap(), scalar("key"),
|
startDocEvent(), startSeqEvent(), startMapEvent(), scalarEvent("key"),
|
||||||
scalar("value"), scalar("key2"), scalar("value2"), endMap(),
|
scalarEvent("value"), scalarEvent("key2"), scalarEvent("value2"), endMapEvent(),
|
||||||
startMap(), scalar("key3"), scalar("value3"), endMap(),
|
startMapEvent(), scalarEvent("key3"), scalarEvent("value3"), endMapEvent(),
|
||||||
endSequence(), endDoc())
|
endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: Sequence in Map":
|
test "Parsing: Sequence in Map":
|
||||||
ensure("key:\n - item1\n - item2", startDoc(), startMap(),
|
ensure("key:\n - item1\n - item2", startDocEvent(), startMapEvent(),
|
||||||
scalar("key"), startSequence(), scalar("item1"), scalar("item2"),
|
scalarEvent("key"), startSeqEvent(), scalarEvent("item1"), scalarEvent("item2"),
|
||||||
endSequence(), endMap(), endDoc())
|
endSeqEvent(), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Sequence in Sequence":
|
test "Parsing: Sequence in Sequence":
|
||||||
ensure("- - l1_i1\n - l1_i2\n- l2_i1", startDoc(), startSequence(),
|
ensure("- - l1_i1\n - l1_i2\n- l2_i1", startDocEvent(), startSeqEvent(),
|
||||||
startSequence(), scalar("l1_i1"), scalar("l1_i2"), endSequence(),
|
startSeqEvent(), scalarEvent("l1_i1"), scalarEvent("l1_i2"), endSeqEvent(),
|
||||||
scalar("l2_i1"), endSequence(), endDoc())
|
scalarEvent("l2_i1"), endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: Flow Sequence":
|
test "Parsing: Flow Sequence":
|
||||||
ensure("[2, b]", startDoc(), startSequence(), scalar("2"),
|
ensure("[2, b]", startDocEvent(), startSeqEvent(), scalarEvent("2"),
|
||||||
scalar("b"), endSequence(), endDoc())
|
scalarEvent("b"), endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: Flow Map":
|
test "Parsing: Flow Map":
|
||||||
ensure("{a: Y, 1.337: d}", startDoc(), startMap(), scalar("a"),
|
ensure("{a: Y, 1.337: d}", startDocEvent(), startMapEvent(), scalarEvent("a"),
|
||||||
scalar("Y"), scalar("1.337"),
|
scalarEvent("Y"), scalarEvent("1.337"),
|
||||||
scalar("d"), endMap(), endDoc())
|
scalarEvent("d"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Flow Sequence in Flow Sequence":
|
test "Parsing: Flow Sequence in Flow Sequence":
|
||||||
ensure("[a, [b, c]]", startDoc(), startSequence(), scalar("a"),
|
ensure("[a, [b, c]]", startDocEvent(), startSeqEvent(), scalarEvent("a"),
|
||||||
startSequence(), scalar("b"), scalar("c"), endSequence(),
|
startSeqEvent(), scalarEvent("b"), scalarEvent("c"), endSeqEvent(),
|
||||||
endSequence(), endDoc())
|
endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: Flow Sequence in Flow Map":
|
test "Parsing: Flow Sequence in Flow Map":
|
||||||
ensure("{a: [b, c], [d, e]: f}", startDoc(), startMap(), scalar("a"),
|
ensure("{a: [b, c], [d, e]: f}", startDocEvent(), startMapEvent(), scalarEvent("a"),
|
||||||
startSequence(), scalar("b"), scalar("c"), endSequence(),
|
startSeqEvent(), scalarEvent("b"), scalarEvent("c"), endSeqEvent(),
|
||||||
startSequence(), scalar("d"), scalar("e"), endSequence(),
|
startSeqEvent(), scalarEvent("d"), scalarEvent("e"), endSeqEvent(),
|
||||||
scalar("f"), endMap(), endDoc())
|
scalarEvent("f"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Flow Sequence in Map":
|
test "Parsing: Flow Sequence in Map":
|
||||||
ensure("a: [b, c]", startDoc(), startMap(), scalar("a"),
|
ensure("a: [b, c]", startDocEvent(), startMapEvent(), scalarEvent("a"),
|
||||||
startSequence(), scalar("b"), scalar("c"), endSequence(),
|
startSeqEvent(), scalarEvent("b"), scalarEvent("c"), endSeqEvent(),
|
||||||
endMap(), endDoc())
|
endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Flow Map in Sequence":
|
test "Parsing: Flow Map in Sequence":
|
||||||
ensure("- {a: b}", startDoc(), startSequence(), startMap(), scalar("a"),
|
ensure("- {a: b}", startDocEvent(), startSeqEvent(), startMapEvent(), scalarEvent("a"),
|
||||||
scalar("b"), endMap(), endSequence(), endDoc())
|
scalarEvent("b"), endMapEvent(), endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: Multiline scalar (top level)":
|
test "Parsing: Multiline scalar (top level)":
|
||||||
ensure("a\nb \n c\nd", startDoc(), scalar("a b c d"), endDoc())
|
ensure("a\nb \n c\nd", startDocEvent(), scalarEvent("a b c d"), endDocEvent())
|
||||||
test "Parsing: Multiline scalar (in map)":
|
test "Parsing: Multiline scalar (in map)":
|
||||||
ensure("a: b\n c\nd:\n e\n f", startDoc(), startMap(), scalar("a"),
|
ensure("a: b\n c\nd:\n e\n f", startDocEvent(), startMapEvent(), scalarEvent("a"),
|
||||||
scalar("b c"), scalar("d"), scalar("e f"), endMap(), endDoc())
|
scalarEvent("b c"), scalarEvent("d"), scalarEvent("e f"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Block scalar (literal)":
|
test "Parsing: Block scalar (literal)":
|
||||||
ensure("a: |\x0A ab\x0A \x0A cd\x0A ef\x0A \x0A", startDoc(),
|
ensure("a: |\x0A ab\x0A \x0A cd\x0A ef\x0A \x0A", startDocEvent(),
|
||||||
startMap(), scalar("a"), scalar("ab\x0A\x0Acd\x0Aef\x0A"),
|
startMapEvent(), scalarEvent("a"), scalarEvent("ab\x0A\x0Acd\x0Aef\x0A", yTagExclamationmark),
|
||||||
endMap(), endDoc())
|
endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Block scalar (folded)":
|
test "Parsing: Block scalar (folded)":
|
||||||
ensure("a: >\x0A ab\x0A cd\x0A \x0Aef\x0A\x0A\x0Agh\x0A", startDoc(),
|
ensure("a: >\x0A ab\x0A cd\x0A \x0A ef\x0A\x0A\x0A gh\x0A", startDocEvent(),
|
||||||
startMap(), scalar("a"), scalar("ab cd\x0Aef\x0Agh\x0A"),
|
startMapEvent(), scalarEvent("a"), scalarEvent("ab cd\x0Aef\x0A\x0Agh\x0A", yTagExclamationmark),
|
||||||
endMap(), endDoc())
|
endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Block scalar (keep)":
|
test "Parsing: Block scalar (keep)":
|
||||||
ensure("a: |+\x0A ab\x0A \x0A \x0A", startDoc(), startMap(),
|
ensure("a: |+\x0A ab\x0A \x0A \x0A", startDocEvent(), startMapEvent(),
|
||||||
scalar("a"), scalar("ab\x0A\x0A \x0A"), endMap(), endDoc())
|
scalarEvent("a"), scalarEvent("ab\x0A\x0A \x0A", yTagExclamationmark), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Block scalar (strip)":
|
test "Parsing: Block scalar (strip)":
|
||||||
ensure("a: |-\x0A ab\x0A \x0A \x0A", startDoc(), startMap(),
|
ensure("a: |-\x0A ab\x0A \x0A \x0A", startDocEvent(), startMapEvent(),
|
||||||
scalar("a"), scalar("ab"), endMap(), endDoc())
|
scalarEvent("a"), scalarEvent("ab", yTagExclamationmark), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: non-specific tags of quoted strings":
|
test "Parsing: non-specific tags of quoted strings":
|
||||||
ensure("\"a\"", startDoc(),
|
ensure("\"a\"", startDocEvent(),
|
||||||
scalar("a", yTagExclamationMark), endDoc())
|
scalarEvent("a", yTagExclamationMark), endDocEvent())
|
||||||
test "Parsing: explicit non-specific tag":
|
test "Parsing: explicit non-specific tag":
|
||||||
ensure("! a", startDoc(), scalar("a", yTagExclamationMark), endDoc())
|
ensure("! a", startDocEvent(), scalarEvent("a", yTagExclamationMark), endDocEvent())
|
||||||
test "Parsing: secondary tag handle resolution":
|
test "Parsing: secondary tag handle resolution":
|
||||||
ensure("!!str a", startDoc(), scalar("a", yTagString), endDoc())
|
ensure("!!str a", startDocEvent(), scalarEvent("a", yTagString), endDocEvent())
|
||||||
test "Parsing: resolving custom tag handles":
|
test "Parsing: resolving custom tag handles":
|
||||||
let fooId = tagLib.registerUri("tag:example.com,2015:foo")
|
let fooId = tagLib.registerUri("tag:example.com,2015:foo")
|
||||||
ensure("%TAG !t! tag:example.com,2015:\n---\n!t!foo a", startDoc(),
|
ensure("%TAG !t! tag:example.com,2015:\n---\n!t!foo a", startDocEvent(),
|
||||||
scalar("a", fooId), endDoc())
|
scalarEvent("a", fooId), endDocEvent())
|
||||||
test "Parsing: tags in sequence":
|
test "Parsing: tags in sequence":
|
||||||
ensure(" - !!str a\n - b\n - !!int c\n - d", startDoc(),
|
ensure(" - !!str a\n - b\n - !!int c\n - d", startDocEvent(),
|
||||||
startSequence(), scalar("a", yTagString), scalar("b"),
|
startSeqEvent(), scalarEvent("a", yTagString), scalarEvent("b"),
|
||||||
scalar("c", yTagInteger), scalar("d"), endSequence(), endDoc())
|
scalarEvent("c", yTagInteger), scalarEvent("d"), endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: tags in implicit map":
|
test "Parsing: tags in implicit map":
|
||||||
ensure("!!str a: b\nc: !!int d\ne: !!str f\ng: h", startDoc(), startMap(),
|
ensure("!!str a: b\nc: !!int d\ne: !!str f\ng: h", startDocEvent(), startMapEvent(),
|
||||||
scalar("a", yTagString), scalar("b"), scalar("c"),
|
scalarEvent("a", yTagString), scalarEvent("b"), scalarEvent("c"),
|
||||||
scalar("d", yTagInteger), scalar("e"), scalar("f", yTagString),
|
scalarEvent("d", yTagInteger), scalarEvent("e"), scalarEvent("f", yTagString),
|
||||||
scalar("g"), scalar("h"), endMap(), endDoc())
|
scalarEvent("g"), scalarEvent("h"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: tags in explicit map":
|
test "Parsing: tags in explicit map":
|
||||||
ensure("? !!str a\n: !!int b\n? c\n: !!str d", startDoc(), startMap(),
|
ensure("? !!str a\n: !!int b\n? c\n: !!str d", startDocEvent(), startMapEvent(),
|
||||||
scalar("a", yTagString), scalar("b", yTagInteger), scalar("c"),
|
scalarEvent("a", yTagString), scalarEvent("b", yTagInteger), scalarEvent("c"),
|
||||||
scalar("d", yTagString), endMap(), endDoc())
|
scalarEvent("d", yTagString), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: tags for block objects":
|
test "Parsing: tags for block objects":
|
||||||
ensure("--- !!map\nfoo: !!seq\n - a\n - !!str b\n!!str bar: !!str baz",
|
ensure("--- !!map\nfoo: !!seq\n - a\n - !!str b\n!!str bar: !!str baz",
|
||||||
startDoc(), startMap(yTagMap), scalar("foo"),
|
startDocEvent(), startMapEvent(yTagMap), scalarEvent("foo"),
|
||||||
startSequence(yTagSequence), scalar("a"), scalar("b", yTagString),
|
startSeqEvent(yTagSequence), scalarEvent("a"), scalarEvent("b", yTagString),
|
||||||
endSequence(), scalar("bar", yTagString),
|
endSeqEvent(), scalarEvent("bar", yTagString),
|
||||||
scalar("baz", yTagString), endMap(), endDoc())
|
scalarEvent("baz", yTagString), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: root tag for block sequence":
|
test "Parsing: root tag for block sequence":
|
||||||
ensure("--- !!seq\n- a", startDoc(), startSequence(yTagSequence),
|
ensure("--- !!seq\n- a", startDocEvent(), startSeqEvent(yTagSequence),
|
||||||
scalar("a"), endSequence(), endDoc())
|
scalarEvent("a"), endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: root tag for explicit block map":
|
test "Parsing: root tag for explicit block map":
|
||||||
ensure("--- !!map\n? a\n: b", startDoc(), startMap(yTagMap),
|
ensure("--- !!map\n? a\n: b", startDocEvent(), startMapEvent(yTagMap),
|
||||||
scalar("a"), scalar("b"), endMap(), endDoc())
|
scalarEvent("a"), scalarEvent("b"), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: tags for flow objects":
|
test "Parsing: tags for flow objects":
|
||||||
ensure("!!map { k: !!seq [ a, !!str b] }", startDoc(), startMap(yTagMap),
|
ensure("!!map { k: !!seq [ a, !!str b] }", startDocEvent(), startMapEvent(yTagMap),
|
||||||
scalar("k"), startSequence(yTagSequence), scalar("a"),
|
scalarEvent("k"), startSeqEvent(yTagSequence), scalarEvent("a"),
|
||||||
scalar("b", yTagString), endSequence(), endMap(), endDoc())
|
scalarEvent("b", yTagString), endSeqEvent(), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Tag after directives end":
|
test "Parsing: Tag after directives end":
|
||||||
ensure("--- !!str\nfoo", startDoc(), scalar("foo", yTagString), endDoc())
|
ensure("--- !!str\nfoo", startDocEvent(), scalarEvent("foo", yTagString), endDocEvent())
|
||||||
test "Parsing: Simple Anchor":
|
test "Parsing: Simple Anchor":
|
||||||
ensure("&a str", startDoc(), scalar("str", yTagQuestionMark,
|
ensure("&a str", startDocEvent(), scalarEvent("str", yTagQuestionMark,
|
||||||
0.AnchorId), endDoc())
|
0.AnchorId), endDocEvent())
|
||||||
test "Parsing: Anchors in sequence":
|
test "Parsing: Anchors in sequence":
|
||||||
ensure(" - &a a\n - b\n - &c c\n - &a d", startDoc(), startSequence(),
|
ensure(" - &a a\n - b\n - &c c\n - &a d", startDocEvent(), startSeqEvent(),
|
||||||
scalar("a", yTagQuestionMark, 0.AnchorId), scalar("b"),
|
scalarEvent("a", yTagQuestionMark, 0.AnchorId), scalarEvent("b"),
|
||||||
scalar("c", yTagQuestionMark, 1.AnchorId),
|
scalarEvent("c", yTagQuestionMark, 1.AnchorId),
|
||||||
scalar("d", yTagQuestionMark, 0.AnchorId), endSequence(),
|
scalarEvent("d", yTagQuestionMark, 2.AnchorId), endSeqEvent(),
|
||||||
endDoc())
|
endDocEvent())
|
||||||
test "Parsing: Anchors in map":
|
test "Parsing: Anchors in map":
|
||||||
ensure("&a a: b\nc: &d d", startDoc(), startMap(),
|
ensure("&a a: b\nc: &d d", startDocEvent(), startMapEvent(),
|
||||||
scalar("a", yTagQuestionMark, 0.AnchorId),
|
scalarEvent("a", yTagQuestionMark, 0.AnchorId),
|
||||||
scalar("b"), scalar("c"),
|
scalarEvent("b"), scalarEvent("c"),
|
||||||
scalar("d", yTagQuestionMark, 1.AnchorId),
|
scalarEvent("d", yTagQuestionMark, 1.AnchorId),
|
||||||
endMap(), endDoc())
|
endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Anchors and tags":
|
test "Parsing: Anchors and tags":
|
||||||
ensure(" - &a !!str a\n - !!int b\n - &c !!int c\n - &d d", startDoc(),
|
ensure(" - &a !!str a\n - !!int b\n - &c !!int c\n - &d d", startDocEvent(),
|
||||||
startSequence(), scalar("a", yTagString, 0.AnchorId),
|
startSeqEvent(), scalarEvent("a", yTagString, 0.AnchorId),
|
||||||
scalar("b", yTagInteger), scalar("c", yTagInteger, 1.AnchorId),
|
scalarEvent("b", yTagInteger), scalarEvent("c", yTagInteger, 1.AnchorId),
|
||||||
scalar("d", yTagQuestionMark, 2.AnchorId), endSequence(),
|
scalarEvent("d", yTagQuestionMark, 2.AnchorId), endSeqEvent(),
|
||||||
endDoc())
|
endDocEvent())
|
||||||
test "Parsing: Aliases in sequence":
|
test "Parsing: Aliases in sequence":
|
||||||
ensure(" - &a a\n - &b b\n - *a\n - *b", startDoc(), startSequence(),
|
ensure(" - &a a\n - &b b\n - *a\n - *b", startDocEvent(), startSeqEvent(),
|
||||||
scalar("a", yTagQuestionMark, 0.AnchorId),
|
scalarEvent("a", yTagQuestionMark, 0.AnchorId),
|
||||||
scalar("b", yTagQuestionMark, 1.AnchorId), alias(0.AnchorId),
|
scalarEvent("b", yTagQuestionMark, 1.AnchorId), aliasEvent(0.AnchorId),
|
||||||
alias(1.AnchorId), endSequence(), endDoc())
|
aliasEvent(1.AnchorId), endSeqEvent(), endDocEvent())
|
||||||
test "Parsing: Aliases in map":
|
test "Parsing: Aliases in map":
|
||||||
ensure("&a a: &b b\n*a: *b", startDoc(), startMap(),
|
ensure("&a a: &b b\n*a : *b", startDocEvent(), startMapEvent(),
|
||||||
scalar("a", yTagQuestionMark, 0.AnchorId),
|
scalarEvent("a", yTagQuestionMark, 0.AnchorId),
|
||||||
scalar("b", yTagQuestionMark, 1.AnchorId), alias(0.AnchorId),
|
scalarEvent("b", yTagQuestionMark, 1.AnchorId), aliasEvent(0.AnchorId),
|
||||||
alias(1.AnchorId), endMap(), endDoc())
|
aliasEvent(1.AnchorId), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Aliases in flow":
|
test "Parsing: Aliases in flow":
|
||||||
ensure("{ &a [a, &b b]: *b, *a: [c, *b, d]}", startDoc(), startMap(),
|
ensure("{ &a [a, &b b]: *b, *a : [c, *b, d]}", startDocEvent(), startMapEvent(),
|
||||||
startSequence(yTagQuestionMark, 0.AnchorId), scalar("a"),
|
startSeqEvent(yTagQuestionMark, 0.AnchorId), scalarEvent("a"),
|
||||||
scalar("b", yTagQuestionMark, 1.AnchorId), endSequence(),
|
scalarEvent("b", yTagQuestionMark, 1.AnchorId), endSeqEvent(),
|
||||||
alias(1.AnchorId), alias(0.AnchorId), startSequence(),
|
aliasEvent(1.AnchorId), aliasEvent(0.AnchorId), startSeqEvent(),
|
||||||
scalar("c"), alias(1.AnchorId), scalar("d"), endSequence(),
|
scalarEvent("c"), aliasEvent(1.AnchorId), scalarEvent("d"), endSeqEvent(),
|
||||||
endMap(), endDoc())
|
endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Tags on empty scalars":
|
test "Parsing: Tags on empty scalars":
|
||||||
ensure("!!str : a\nb: !!int\n!!str : !!str", startDoc(), startMap(),
|
ensure("!!str : a\nb: !!int\n!!str : !!str", startDocEvent(), startMapEvent(),
|
||||||
scalar("", yTagString), scalar("a"), scalar("b"),
|
scalarEvent("", yTagString), scalarEvent("a"), scalarEvent("b"),
|
||||||
scalar("", yTagInteger), scalar("", yTagString),
|
scalarEvent("", yTagInteger), scalarEvent("", yTagString),
|
||||||
scalar("", yTagString), endMap(), endDoc())
|
scalarEvent("", yTagString), endMapEvent(), endDocEvent())
|
||||||
test "Parsing: Anchors on empty scalars":
|
test "Parsing: Anchors on empty scalars":
|
||||||
ensure("&a : a\nb: &b\n&c : &a", startDoc(), startMap(),
|
ensure("&a : a\nb: &b\n&c : &a", startDocEvent(), startMapEvent(),
|
||||||
scalar("", yTagQuestionMark, 0.AnchorId), scalar("a"),
|
scalarEvent("", yTagQuestionMark, 0.AnchorId), scalarEvent("a"),
|
||||||
scalar("b"), scalar("", yTagQuestionMark, 1.AnchorId),
|
scalarEvent("b"), scalarEvent("", yTagQuestionMark, 1.AnchorId),
|
||||||
scalar("", yTagQuestionMark, 2.AnchorId),
|
scalarEvent("", yTagQuestionMark, 2.AnchorId),
|
||||||
scalar("", yTagQuestionMark, 0.AnchorId), endMap(), endDoc())
|
scalarEvent("", yTagQuestionMark, 3.AnchorId), endMapEvent(), endDocEvent())
|
Loading…
Reference in New Issue