mirror of https://github.com/status-im/NimYAML.git
Refactoring of assertions, improved exceptions
* Replaced all assert statements with yAssert, which prints an error message and quits on failure, but does nothing when compiling with -d:release. * Implemented and used internalError for better discovery of internal bugs. Fixes #8. * Made RawYamlStream able to raise YamlStreamError, which might primarily be useful for user-defined serialization * Properly encapsulate RangeError coming from int/uint serialization on 64bit machines in YamlStreamError. * Some minor style improvements
This commit is contained in:
parent
0f297adffb
commit
09824f931b
|
@ -35,7 +35,7 @@ proc composeNode(s: var YamlStream, tagLib: TagLibrary,
|
|||
result.pairs.add((key: key, value: value))
|
||||
discard s.next()
|
||||
if start.mapAnchor != yAnchorNone:
|
||||
assert(not c.refs.hasKey(start.mapAnchor))
|
||||
yAssert(not c.refs.hasKey(start.mapAnchor))
|
||||
c.refs[start.mapAnchor] = cast[pointer](result)
|
||||
of yamlStartSeq:
|
||||
result.tag = tagLib.uri(start.seqTag)
|
||||
|
@ -44,7 +44,7 @@ proc composeNode(s: var YamlStream, tagLib: TagLibrary,
|
|||
while s.peek().kind != yamlEndSeq:
|
||||
result.children.add(composeNode(s, tagLib, c))
|
||||
if start.seqAnchor != yAnchorNone:
|
||||
assert(not c.refs.hasKey(start.seqAnchor))
|
||||
yAssert(not c.refs.hasKey(start.seqAnchor))
|
||||
c.refs[start.seqAnchor] = cast[pointer](result)
|
||||
discard s.next()
|
||||
of yamlScalar:
|
||||
|
@ -52,11 +52,11 @@ proc composeNode(s: var YamlStream, tagLib: TagLibrary,
|
|||
result.kind = yScalar
|
||||
shallowCopy(result.content, start.scalarContent)
|
||||
if start.scalarAnchor != yAnchorNone:
|
||||
assert(not c.refs.hasKey(start.scalarAnchor))
|
||||
yAssert(not c.refs.hasKey(start.scalarAnchor))
|
||||
c.refs[start.scalarAnchor] = cast[pointer](result)
|
||||
of yamlAlias:
|
||||
result = cast[YamlNode](c.refs[start.aliasTarget])
|
||||
else: assert false, "Malformed YamlStream"
|
||||
else: internalError("Malformed YamlStream")
|
||||
except KeyError:
|
||||
raise newException(YamlConstructionError,
|
||||
"Wrong tag library: TagId missing")
|
||||
|
@ -64,9 +64,9 @@ proc composeNode(s: var YamlStream, tagLib: TagLibrary,
|
|||
proc compose*(s: var YamlStream, tagLib: TagLibrary): YamlDocument
|
||||
{.raises: [YamlStreamError, YamlConstructionError].} =
|
||||
var context = newConstructionContext()
|
||||
assert s.next().kind == yamlStartDoc, "Malformed YamlStream"
|
||||
yAssert s.next().kind == yamlStartDoc
|
||||
result.root = composeNode(s, tagLib, context)
|
||||
assert s.next().kind == yamlEndDoc, "Malformed YamlStream"
|
||||
yAssert s.next().kind == yamlEndDoc
|
||||
|
||||
proc loadDOM*(s: Stream): YamlDocument
|
||||
{.raises: [IOError, YamlParserError, YamlConstructionError].} =
|
||||
|
@ -81,38 +81,31 @@ proc loadDOM*(s: Stream): YamlDocument
|
|||
raise (ref YamlParserError)(e.parent)
|
||||
elif e.parent of IOError:
|
||||
raise (ref IOError)(e.parent)
|
||||
else: assert false, "Never happens: " & e.parent.repr
|
||||
else: internalError("Unexpected exception: " & e.parent.repr)
|
||||
|
||||
proc serializeNode(n: YamlNode, c: SerializationContext, a: AnchorStyle,
|
||||
tagLib: TagLibrary): RawYamlStream {.raises: [].}=
|
||||
let p = cast[pointer](n)
|
||||
if a != asNone and c.refs.hasKey(p):
|
||||
try:
|
||||
if c.refs[p] == yAnchorNone:
|
||||
c.refs[p] = c.nextAnchorId
|
||||
c.nextAnchorId = AnchorId(int(c.nextAnchorId) + 1)
|
||||
except KeyError: assert false, "Can never happen"
|
||||
if c.refs.getOrDefault(p) == yAnchorNone:
|
||||
c.refs[p] = c.nextAnchorId
|
||||
c.nextAnchorId = AnchorId(int(c.nextAnchorId) + 1)
|
||||
result = iterator(): YamlStreamEvent {.raises: [].} =
|
||||
var event: YamlStreamEvent
|
||||
try: event = aliasEvent(c.refs[p])
|
||||
except KeyError: assert false, "Can never happen"
|
||||
yield event
|
||||
yield aliasEvent(c.refs.getOrDefault(p))
|
||||
return
|
||||
var
|
||||
tagId: TagId
|
||||
anchor: AnchorId
|
||||
try:
|
||||
if a == asAlways:
|
||||
c.refs[p] = c.nextAnchorId
|
||||
c.nextAnchorId = AnchorId(int(c.nextAnchorId) + 1)
|
||||
else: c.refs[p] = yAnchorNone
|
||||
tagId = if tagLib.tags.hasKey(n.tag): tagLib.tags[n.tag] else:
|
||||
tagLib.registerUri(n.tag)
|
||||
case a
|
||||
of asNone: anchor = yAnchorNone
|
||||
of asTidy: anchor = cast[AnchorId](n)
|
||||
of asAlways: anchor = c.refs[p]
|
||||
except KeyError: assert false, "Can never happen"
|
||||
if a == asAlways:
|
||||
c.refs[p] = c.nextAnchorId
|
||||
c.nextAnchorId = AnchorId(int(c.nextAnchorId) + 1)
|
||||
else: c.refs[p] = yAnchorNone
|
||||
tagId = if tagLib.tags.hasKey(n.tag): tagLib.tags.getOrDefault(n.tag) else:
|
||||
tagLib.registerUri(n.tag)
|
||||
case a
|
||||
of asNone: anchor = yAnchorNone
|
||||
of asTidy: anchor = cast[AnchorId](n)
|
||||
of asAlways: anchor = c.refs.getOrDefault(p)
|
||||
result = iterator(): YamlStreamEvent =
|
||||
case n.kind
|
||||
of yScalar: yield scalarEvent(n.content, tagId, anchor)
|
||||
|
@ -141,20 +134,18 @@ proc serializeNode(n: YamlNode, c: SerializationContext, a: AnchorStyle,
|
|||
yield endMapEvent()
|
||||
|
||||
template processAnchoredEvent(target: expr, c: SerializationContext): stmt =
|
||||
try:
|
||||
let anchorId = c.refs[cast[pointer](target)]
|
||||
if anchorId != yAnchorNone: target = anchorId
|
||||
else: target = yAnchorNone
|
||||
except KeyError: assert false, "Can never happen"
|
||||
let anchorId = c.refs.getOrDefault(cast[pointer](target))
|
||||
if anchorId != yAnchorNone: target = anchorId
|
||||
else: target = yAnchorNone
|
||||
yield event
|
||||
|
||||
proc serialize*(doc: YamlDocument, tagLib: TagLibrary, a: AnchorStyle = asTidy):
|
||||
YamlStream {.raises: [].} =
|
||||
YamlStream {.raises: [YamlStreamError].} =
|
||||
var
|
||||
context = newSerializationContext(a)
|
||||
events = serializeNode(doc.root, context, a, tagLib)
|
||||
if a == asTidy:
|
||||
var backend = iterator(): YamlStreamEvent {.raises: [].} =
|
||||
var backend = iterator(): YamlStreamEvent {.raises: [YamlStreamError].} =
|
||||
var output = newSeq[YamlStreamEvent]()
|
||||
while true:
|
||||
let event = events()
|
||||
|
@ -170,7 +161,7 @@ proc serialize*(doc: YamlDocument, tagLib: TagLibrary, a: AnchorStyle = asTidy):
|
|||
yield endDocEvent()
|
||||
result = initYamlStream(backend)
|
||||
else:
|
||||
var backend = iterator(): YamlStreamEvent {.raises: [].} =
|
||||
var backend = iterator(): YamlStreamEvent {.raises: [YamlStreamError].} =
|
||||
yield startDocEvent()
|
||||
while true:
|
||||
let event = events()
|
||||
|
@ -182,14 +173,11 @@ proc serialize*(doc: YamlDocument, tagLib: TagLibrary, a: AnchorStyle = asTidy):
|
|||
proc dumpDOM*(doc: YamlDocument, target: Stream,
|
||||
anchorStyle: AnchorStyle = asTidy,
|
||||
options: PresentationOptions = defaultPresentationOptions)
|
||||
{.raises: [YamlPresenterJsonError, YamlPresenterOutputError].} =
|
||||
{.raises: [YamlPresenterJsonError, YamlPresenterOutputError,
|
||||
YamlStreamError].} =
|
||||
## Dump a YamlDocument as YAML character stream.
|
||||
var
|
||||
tagLib = initExtendedTagLibrary()
|
||||
events = serialize(doc, tagLib,
|
||||
if options.style == psJson: asNone else: anchorStyle)
|
||||
try:
|
||||
present(events, target, tagLib, options)
|
||||
except YamlStreamError:
|
||||
# serializing object does not raise any errors, so we can ignore this
|
||||
assert false, "Can never happen"
|
||||
present(events, target, tagLib, options)
|
||||
|
|
|
@ -57,10 +57,8 @@ template debug(message: string) {.dirty.} =
|
|||
try: styledWriteLine(stdout, fgBlue, message)
|
||||
except IOError: discard
|
||||
|
||||
template debugFail() {.dirty.} =
|
||||
when not defined(release):
|
||||
echo "internal error at line: ", instantiationInfo().line
|
||||
assert(false)
|
||||
template unexpectedLevelKind() {.dirty.} =
|
||||
internalError("Unexpected level kind: " & $p.level.kind)
|
||||
|
||||
proc generateError(p: YamlParser, message: string):
|
||||
ref YamlParserError {.raises: [].} =
|
||||
|
@ -119,7 +117,7 @@ template yieldLevelEnd() {.dirty.} =
|
|||
of fplUnknown:
|
||||
if p.ancestry.len > 1:
|
||||
yield emptyScalar(p) # don't yield scalar for empty doc
|
||||
of fplSinglePairKey, fplDocument: debugFail()
|
||||
of fplSinglePairKey, fplDocument: unexpectedLevelKind()
|
||||
|
||||
template handleLineEnd(insideDocument: bool) {.dirty.} =
|
||||
case p.lexer.buf[p.lexer.bufpos]
|
||||
|
@ -142,11 +140,11 @@ template handleObjectEnd(nextState: FastParseState) {.dirty.} =
|
|||
of fplSinglePairKey: p.level.kind = fplSinglePairValue
|
||||
of fplMapValue: p.level.kind = fplMapKey
|
||||
of fplSequence, fplDocument: discard
|
||||
of fplUnknown, fplScalar, fplSinglePairValue: debugFail()
|
||||
of fplUnknown, fplScalar, fplSinglePairValue: unexpectedLevelKind()
|
||||
|
||||
proc objectStart(p: YamlParser, k: static[YamlStreamEventKind],
|
||||
single: bool = false): YamlStreamEvent {.raises: [].} =
|
||||
assert(p.level.kind == fplUnknown)
|
||||
yAssert(p.level.kind == fplUnknown)
|
||||
when k == yamlStartMap:
|
||||
result = startMapEvent(p.tag, p.anchor)
|
||||
if single:
|
||||
|
@ -234,7 +232,7 @@ template handleMapKeyIndicator() {.dirty.} =
|
|||
of fplScalar:
|
||||
raise p.generateError(
|
||||
"Unexpected map key indicator (expected multiline scalar end)")
|
||||
of fplSinglePairKey, fplSinglePairValue, fplDocument: debugFail()
|
||||
of fplSinglePairKey, fplSinglePairValue, fplDocument: unexpectedLevelKind()
|
||||
p.lexer.skipWhitespace()
|
||||
p.indentation = p.lexer.getColNumber(p.lexer.bufpos)
|
||||
|
||||
|
@ -264,7 +262,7 @@ template handleMapValueIndicator() {.dirty.} =
|
|||
of fplScalar:
|
||||
raise p.generateError(
|
||||
"Unexpected map value indicator (expected multiline scalar end)")
|
||||
of fplSinglePairKey, fplSinglePairValue, fplDocument: debugFail()
|
||||
of fplSinglePairKey, fplSinglePairValue, fplDocument: unexpectedLevelKind()
|
||||
p.lexer.skipWhitespace()
|
||||
p.indentation = p.lexer.getColNumber(p.lexer.bufpos)
|
||||
|
||||
|
@ -383,7 +381,8 @@ template handleBlockItemStart() {.dirty.} =
|
|||
p.level.kind = fplMapKey
|
||||
p.ancestry.add(p.level)
|
||||
p.level = FastParseLevel(kind: fplUnknown, indentation: p.indentation)
|
||||
of fplScalar, fplSinglePairKey, fplSinglePairValue, fplDocument: debugFail()
|
||||
of fplScalar, fplSinglePairKey, fplSinglePairValue, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
|
||||
template handleFlowItemStart() {.dirty.} =
|
||||
if p.level.kind == fplUnknown and
|
||||
|
@ -487,7 +486,7 @@ proc lineEnding(p: YamlParser) {.raises: [YamlParserError], inline.} =
|
|||
proc tagShorthand(lexer: var BaseLexer, shorthand: var string) {.inline.} =
|
||||
debug("lex: tagShorthand")
|
||||
while lexer.buf[lexer.bufpos] in space: lexer.bufpos.inc()
|
||||
assert lexer.buf[lexer.bufpos] == '!'
|
||||
yAssert lexer.buf[lexer.bufpos] == '!'
|
||||
shorthand.add('!')
|
||||
lexer.bufpos.inc()
|
||||
var c = lexer.buf[lexer.bufpos]
|
||||
|
@ -1099,7 +1098,8 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
p.startToken()
|
||||
raise p.generateError(
|
||||
"Multiline scalars may not be implicit map keys")
|
||||
of fplSinglePairKey, fplSinglePairValue, fplDocument: debugFail()
|
||||
of fplSinglePairKey, fplSinglePairValue, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.lexer.bufpos.inc()
|
||||
p.lexer.skipWhitespace()
|
||||
p.indentation = p.lexer.getColNumber(p.lexer.bufpos)
|
||||
|
@ -1345,7 +1345,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
flowdepth.inc()
|
||||
p.lexer.bufpos.inc()
|
||||
of '}':
|
||||
assert(p.level.kind == fplUnknown)
|
||||
yAssert(p.level.kind == fplUnknown)
|
||||
p.level = p.ancestry.pop()
|
||||
case p.level.kind
|
||||
of fplMapValue:
|
||||
|
@ -1362,11 +1362,12 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
of fplSinglePairValue:
|
||||
p.startToken()
|
||||
raise p.generateError("Unexpected token (expected ']')")
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument: debugFail()
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.lexer.bufpos.inc()
|
||||
leaveFlowLevel()
|
||||
of ']':
|
||||
assert(p.level.kind == fplUnknown)
|
||||
yAssert(p.level.kind == fplUnknown)
|
||||
p.level = p.ancestry.pop()
|
||||
case p.level.kind
|
||||
of fplSequence:
|
||||
|
@ -1376,15 +1377,16 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
yield emptyScalar(p)
|
||||
p.level = p.ancestry.pop()
|
||||
yield endMapEvent()
|
||||
assert(p.level.kind == fplSequence)
|
||||
yAssert(p.level.kind == fplSequence)
|
||||
of fplMapKey, fplMapValue:
|
||||
p.startToken()
|
||||
raise p.generateError("Unexpected token (expected '}')")
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument: debugFail()
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.lexer.bufpos.inc()
|
||||
leaveFlowLevel()
|
||||
of ',':
|
||||
assert(p.level.kind == fplUnknown)
|
||||
yAssert(p.level.kind == fplUnknown)
|
||||
p.level = p.ancestry.pop()
|
||||
case p.level.kind
|
||||
of fplSequence: yield emptyScalar(p)
|
||||
|
@ -1400,8 +1402,9 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
yield emptyScalar(p)
|
||||
p.level = p.ancestry.pop()
|
||||
yield endMapEvent()
|
||||
assert(p.level.kind == fplSequence)
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument: debugFail()
|
||||
yAssert(p.level.kind == fplSequence)
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.ancestry.add(p.level)
|
||||
p.level = initLevel(fplUnknown)
|
||||
p.lexer.bufpos.inc()
|
||||
|
@ -1433,7 +1436,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
of fplSinglePairKey:
|
||||
yield emptyScalar(p)
|
||||
p.level.kind = fplSinglePairValue
|
||||
of fplUnknown, fplScalar, fplDocument: debugFail()
|
||||
of fplUnknown, fplScalar, fplDocument: unexpectedLevelKind()
|
||||
p.ancestry.add(p.level)
|
||||
p.level = initLevel(fplUnknown)
|
||||
p.lexer.bufpos.inc()
|
||||
|
@ -1496,9 +1499,10 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
raise p.generateError("Unexpected token (expected '}')")
|
||||
of fplSinglePairValue:
|
||||
p.level = p.ancestry.pop()
|
||||
assert(p.level.kind == fplSequence)
|
||||
yAssert(p.level.kind == fplSequence)
|
||||
yield endMapEvent()
|
||||
of fplScalar, fplUnknown, fplSinglePairKey, fplDocument: debugFail()
|
||||
of fplScalar, fplUnknown, fplSinglePairKey, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.lexer.bufpos.inc()
|
||||
leaveFlowLevel()
|
||||
of '}':
|
||||
|
@ -1507,7 +1511,8 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
of fplSequence, fplSinglePairValue:
|
||||
p.startToken()
|
||||
raise p.generateError("Unexpected token (expected ']')")
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument: debugFail()
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.lexer.bufpos.inc()
|
||||
leaveFlowLevel()
|
||||
of ',':
|
||||
|
@ -1519,10 +1524,11 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
explicitFlowKey = false
|
||||
of fplSinglePairValue:
|
||||
p.level = p.ancestry.pop()
|
||||
assert(p.level.kind == fplSequence)
|
||||
yAssert(p.level.kind == fplSequence)
|
||||
yield endMapEvent()
|
||||
of fplMapKey: explicitFlowKey = false
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument: debugFail()
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.ancestry.add(p.level)
|
||||
p.level = initLevel(fplUnknown)
|
||||
state = fpFlow
|
||||
|
@ -1533,7 +1539,8 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
p.startToken()
|
||||
raise p.generateError("Unexpected token (expected ',')")
|
||||
of fplMapValue, fplSinglePairValue: discard
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument: debugFail()
|
||||
of fplUnknown, fplScalar, fplSinglePairKey, fplDocument:
|
||||
unexpectedLevelKind()
|
||||
p.ancestry.add(p.level)
|
||||
p.level = initLevel(fplUnknown)
|
||||
state = fpFlow
|
||||
|
@ -1548,4 +1555,5 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
p.startToken()
|
||||
raise p.generateError("Unexpected content (expected flow indicator)")
|
||||
try: result = initYamlStream(backend)
|
||||
except Exception: debugFail() # compiler error
|
||||
except Exception: # nimc enforces this handler although it isn't necessary
|
||||
internalError("Reached code that should be unreachable")
|
|
@ -29,10 +29,10 @@ type
|
|||
ythMinus, yth0, ythInt, ythDecimal, ythNumE, ythNumEPlusMinus, ythExponent
|
||||
|
||||
macro typeHintStateMachine(c: untyped, content: untyped): stmt =
|
||||
assert content.kind == nnkStmtList
|
||||
yAssert content.kind == nnkStmtList
|
||||
result = newNimNode(nnkCaseStmt, content).add(copyNimNode(c))
|
||||
for branch in content.children:
|
||||
assert branch.kind == nnkOfBranch
|
||||
yAssert branch.kind == nnkOfBranch
|
||||
var
|
||||
charBranch = newNimNode(nnkOfBranch, branch)
|
||||
i = 0
|
||||
|
@ -42,14 +42,14 @@ macro typeHintStateMachine(c: untyped, content: untyped): stmt =
|
|||
charBranch.add(copyNimTree(branch[i]))
|
||||
inc(i)
|
||||
for rule in branch[i].children:
|
||||
assert rule.kind == nnkInfix
|
||||
assert ($rule[0].ident == "=>")
|
||||
yAssert rule.kind == nnkInfix
|
||||
yAssert ($rule[0].ident == "=>")
|
||||
var stateBranch = newNimNode(nnkOfBranch, rule)
|
||||
case rule[1].kind
|
||||
of nnkBracket:
|
||||
for item in rule[1].children: stateBranch.add(item)
|
||||
of nnkIdent: stateBranch.add(rule[1])
|
||||
else: assert false
|
||||
else: internalError("Invalid rule kind: " & $rule[1].kind)
|
||||
if rule[2].kind == nnkNilLit:
|
||||
stateBranch.add(newStmtList(newNimNode(nnkDiscardStmt).add(
|
||||
newEmptyNode())))
|
||||
|
|
|
@ -6,6 +6,9 @@
|
|||
|
||||
type Level = tuple[node: JsonNode, key: string]
|
||||
|
||||
template unexpectedNodeKind() {.dirty.} =
|
||||
internalError("Unexpected node kind: " & $levels[levels.high].node.kind)
|
||||
|
||||
proc initLevel(node: JsonNode): Level {.raises: [].} =
|
||||
(node: node, key: cast[string](nil))
|
||||
|
||||
|
@ -117,7 +120,7 @@ proc constructJson*(s: var YamlStream): seq[JsonNode] =
|
|||
levels[levels.high].key = nil
|
||||
if event.scalarAnchor != yAnchorNone:
|
||||
anchors[event.scalarAnchor] = jsonScalar
|
||||
else: discard # will never happen
|
||||
else: unexpectedNodeKind()
|
||||
of yamlEndSeq, yamlEndMap:
|
||||
if levels.len > 1:
|
||||
let level = levels.pop()
|
||||
|
@ -130,33 +133,24 @@ proc constructJson*(s: var YamlStream): seq[JsonNode] =
|
|||
else:
|
||||
levels[levels.high].node[levels[levels.high].key] = level.node
|
||||
levels[levels.high].key = nil
|
||||
else: discard # will never happen
|
||||
else: unexpectedNodeKind()
|
||||
else: discard # wait for yamlEndDocument
|
||||
of yamlAlias:
|
||||
# we can savely assume that the alias exists in anchors
|
||||
# (else the parser would have already thrown an exception)
|
||||
case levels[levels.high].node.kind
|
||||
of JArray:
|
||||
try:
|
||||
levels[levels.high].node.elems.add(anchors[event.aliasTarget])
|
||||
except KeyError:
|
||||
# we can safely assume that this doesn't happen. It would
|
||||
# have resulted in a parser error earlier.
|
||||
assert(false)
|
||||
levels[levels.high].node.elems.add(
|
||||
anchors.getOrDefault(event.aliasTarget))
|
||||
of JObject:
|
||||
if isNil(levels[levels.high].key):
|
||||
raise newException(YamlConstructionError,
|
||||
"cannot use alias node as key in JSON")
|
||||
else:
|
||||
try:
|
||||
levels[levels.high].node.fields.add(
|
||||
levels[levels.high].key, anchors[event.aliasTarget])
|
||||
except KeyError:
|
||||
# we can safely assume that this doesn't happen. It would
|
||||
# have resulted in a parser error earlier.
|
||||
assert(false)
|
||||
levels[levels.high].node.fields.add(
|
||||
levels[levels.high].key, anchors.getOrDefault(event.aliasTarget))
|
||||
levels[levels.high].key = nil
|
||||
else: discard # will never happen
|
||||
else: unexpectedNodeKind()
|
||||
|
||||
proc loadToJson*(s: Stream): seq[JsonNode] =
|
||||
var
|
||||
|
@ -165,7 +159,7 @@ proc loadToJson*(s: Stream): seq[JsonNode] =
|
|||
try:
|
||||
return constructJson(events)
|
||||
except YamlConstructionError:
|
||||
var e = cast[ref YamlConstructionError](getCurrentException())
|
||||
var e = (ref YamlConstructionError)(getCurrentException())
|
||||
e.line = parser.getLineNumber()
|
||||
e.column = parser.getColNumber()
|
||||
e.lineContent = parser.getLineContent()
|
||||
|
@ -173,13 +167,10 @@ proc loadToJson*(s: Stream): seq[JsonNode] =
|
|||
except YamlStreamError:
|
||||
let e = getCurrentException()
|
||||
if e.parent of IOError:
|
||||
raise cast[ref IOError](e.parent)
|
||||
raise (ref IOError)(e.parent)
|
||||
elif e.parent of YamlParserError:
|
||||
raise cast[ref YamlParserError](e.parent)
|
||||
else:
|
||||
# can never happen
|
||||
assert(false)
|
||||
except AssertionError: raise
|
||||
raise (ref YamlParserError)(e.parent)
|
||||
else: internalError("Unexpected exception: " & e.parent.repr)
|
||||
except Exception:
|
||||
# compiler bug: https://github.com/nim-lang/Nim/issues/3772
|
||||
assert(false)
|
||||
internalError("Reached code that should be unreachable")
|
|
@ -325,7 +325,7 @@ proc nextItem(c: var Queue, s: var YamlStream):
|
|||
YamlStreamEvent {.raises: [YamlStreamError].} =
|
||||
if c.len > 0:
|
||||
try: result = c.dequeue
|
||||
except IndexError: assert false
|
||||
except IndexError: internalError("Unexpected IndexError")
|
||||
else:
|
||||
result = s.next()
|
||||
|
||||
|
@ -403,7 +403,7 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
if options.style == psJson:
|
||||
raise newException(YamlPresenterJsonError,
|
||||
"Alias not allowed in JSON output")
|
||||
assert levels.len > 0
|
||||
yAssert levels.len > 0
|
||||
startItem(target, options.style, indentation, levels[levels.high],
|
||||
false, newline)
|
||||
try:
|
||||
|
@ -419,7 +419,7 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
of psDefault:
|
||||
var length = 0
|
||||
while true:
|
||||
assert(not(s.finished()))
|
||||
yAssert(not s.finished())
|
||||
let next = s.next()
|
||||
cached.enqueue(next)
|
||||
case next.kind
|
||||
|
@ -449,7 +449,7 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
if options.style != psJson:
|
||||
writeTagAndAnchor(target, item.seqTag, tagLib, item.seqAnchor)
|
||||
indentation += options.indentationStep
|
||||
else: assert false
|
||||
else: internalError("Invalid nextState: " & $nextState)
|
||||
else:
|
||||
startItem(target, options.style, indentation,
|
||||
levels[levels.high], true, newline)
|
||||
|
@ -505,7 +505,7 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
writeTagAndAnchor(target, item.mapTag, tagLib, item.mapAnchor)
|
||||
indentation += options.indentationStep
|
||||
of dBlockInlineMap: discard
|
||||
else: assert false
|
||||
else: internalError("Invalid nextState: " & $nextState)
|
||||
else:
|
||||
if nextState in [dBlockMapValue, dBlockImplicitMapKey]:
|
||||
startItem(target, options.style, indentation,
|
||||
|
@ -528,7 +528,7 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
levels.add(nextState)
|
||||
|
||||
of yamlEndSeq:
|
||||
assert levels.len > 0
|
||||
yAssert levels.len > 0
|
||||
case levels.pop()
|
||||
of dFlowSequenceItem:
|
||||
case options.style
|
||||
|
@ -554,10 +554,10 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
indentation -= options.indentationStep
|
||||
safeWrite(']')
|
||||
of dBlockSequenceItem: discard
|
||||
else: assert false
|
||||
else: internalError("Invalid popped level")
|
||||
indentation -= options.indentationStep
|
||||
of yamlEndMap:
|
||||
assert levels.len > 0
|
||||
yAssert levels.len > 0
|
||||
let level = levels.pop()
|
||||
case level
|
||||
of dFlowMapValue:
|
||||
|
@ -584,7 +584,7 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
indentation -= options.indentationStep
|
||||
safeWrite('}')
|
||||
of dBlockMapValue, dBlockInlineMap: discard
|
||||
else: assert(false)
|
||||
else: internalError("Invalid level: " & $level)
|
||||
indentation -= options.indentationStep
|
||||
of yamlEndDoc:
|
||||
if finished(s): break
|
||||
|
@ -633,6 +633,4 @@ proc transform*(input: Stream, output: Stream,
|
|||
while e.parent of YamlStreamError: e = e.parent
|
||||
if e.parent of IOError: raise (ref IOError)(e.parent)
|
||||
elif e.parent of YamlParserError: raise (ref YamlParserError)(e.parent)
|
||||
else:
|
||||
# never happens
|
||||
assert(false)
|
||||
else: internalError("Unexpected exception: " & e.parent.repr)
|
||||
|
|
|
@ -27,9 +27,7 @@ proc safeTagUri(id: TagId): string {.raises: [].} =
|
|||
let uri = serializationTagLibrary.uri(id)
|
||||
if uri.len > 0 and uri[0] == '!': return uri[1..uri.len - 1]
|
||||
else: return uri
|
||||
except KeyError:
|
||||
# cannot happen (theoretically, you know)
|
||||
assert(false)
|
||||
except KeyError: internalError("Unexpected KeyError for TagId " & $id)
|
||||
|
||||
template constructScalarItem*(s: var YamlStream, i: expr,
|
||||
t: typedesc, content: untyped) =
|
||||
|
@ -91,10 +89,17 @@ proc representObject*(value: int, tagStyle: TagStyle,
|
|||
c: SerializationContext, tag: TagId): RawYamlStream
|
||||
{.raises: [], inline.}=
|
||||
## represent an integer of architecture-defined length by casting it to int32.
|
||||
## on 64-bit systems, this may cause a type conversion error.
|
||||
## on 64-bit systems, this may cause a RangeError.
|
||||
|
||||
# currently, sizeof(int) is at least sizeof(int32).
|
||||
representObject(int32(value), tagStyle, c, tag)
|
||||
result = iterator(): YamlStreamEvent =
|
||||
var ev: YamlStreamEvent
|
||||
try: ev = scalarEvent($int32(value), tag, yAnchorNone)
|
||||
except RangeError:
|
||||
var e = newException(YamlStreamError, getCurrentExceptionMsg())
|
||||
e.parent = getCurrentException()
|
||||
raise e
|
||||
yield ev
|
||||
|
||||
{.push overflowChecks: on.}
|
||||
proc parseBiggestUInt(s: string): uint64 =
|
||||
|
@ -130,8 +135,15 @@ proc representObject*[T: uint8|uint16|uint32|uint64](value: T, ts: TagStyle,
|
|||
proc representObject*(value: uint, ts: TagStyle, c: SerializationContext,
|
||||
tag: TagId): RawYamlStream {.raises: [], inline.} =
|
||||
## represent an unsigned integer of architecture-defined length by casting it
|
||||
## to int32. on 64-bit systems, this may cause a type conversion error.
|
||||
representObject(uint32(value), ts, c, tag)
|
||||
## to int32. on 64-bit systems, this may cause a RangeError.
|
||||
result = iterator(): YamlStreamEvent =
|
||||
var ev: YamlStreamEvent
|
||||
try: ev = scalarEvent($uint32(value), tag, yAnchorNone)
|
||||
except RangeError:
|
||||
var e = newException(YamlStreamError, getCurrentExceptionMsg())
|
||||
e.parent = getCurrentException()
|
||||
raise e
|
||||
yield ev
|
||||
|
||||
proc constructObject*[T: float|float32|float64](
|
||||
s: var YamlStream, c: ConstructionContext, result: var T)
|
||||
|
@ -293,7 +305,7 @@ proc yamlTag*[K, V](T: typedesc[Table[K, V]]): TagId {.inline, raises: [].} =
|
|||
result = lazyLoadTag(uri)
|
||||
except KeyError:
|
||||
# cannot happen (theoretically, you know)
|
||||
assert(false)
|
||||
internalError("Unexpected KeyError")
|
||||
|
||||
proc constructObject*[K, V](s: var YamlStream, c: ConstructionContext,
|
||||
result: var Table[K, V])
|
||||
|
@ -342,7 +354,7 @@ proc yamlTag*[K, V](T: typedesc[OrderedTable[K, V]]): TagId
|
|||
result = lazyLoadTag(uri)
|
||||
except KeyError:
|
||||
# cannot happen (theoretically, you know)
|
||||
assert(false)
|
||||
internalError("Unexpected KeyError")
|
||||
|
||||
proc constructObject*[K, V](s: var YamlStream, c: ConstructionContext,
|
||||
result: var OrderedTable[K, V])
|
||||
|
@ -430,7 +442,7 @@ macro constructFieldValue(t: typedesc, stream: expr, context: expr,
|
|||
for bIndex in 1 .. len(child) - 1:
|
||||
let discTest = infix(discriminant, "==", child[bIndex][0])
|
||||
for item in child[bIndex][1].children:
|
||||
assert item.kind == nnkSym
|
||||
yAssert item.kind == nnkSym
|
||||
var ob = newNimNode(nnkOfBranch).add(newStrLitNode($item))
|
||||
let field = newDotExpr(o, newIdentNode($item))
|
||||
var ifStmt = newIfStmt((cond: discTest, body: newStmtList(
|
||||
|
@ -442,7 +454,7 @@ macro constructFieldValue(t: typedesc, stream: expr, context: expr,
|
|||
ob.add(newStmtList(ifStmt))
|
||||
result.add(ob)
|
||||
else:
|
||||
assert child.kind == nnkSym
|
||||
yAssert child.kind == nnkSym
|
||||
var ob = newNimNode(nnkOfBranch).add(newStrLitNode($child))
|
||||
let field = newDotExpr(o, newIdentNode($child))
|
||||
ob.add(newStmtList(newCall("constructChild", stream, context, field)))
|
||||
|
@ -547,15 +559,15 @@ proc yamlTag*[O](T: typedesc[ref O]): TagId {.inline, raises: [].} = yamlTag(O)
|
|||
macro constructImplicitVariantObject(s, c, r, possibleTagIds: expr,
|
||||
t: typedesc): stmt =
|
||||
let tDesc = getType(getType(t)[1])
|
||||
assert tDesc.kind == nnkObjectTy
|
||||
yAssert tDesc.kind == nnkObjectTy
|
||||
let recCase = tDesc[2][0]
|
||||
assert recCase.kind == nnkRecCase
|
||||
yAssert recCase.kind == nnkRecCase
|
||||
let
|
||||
discriminant = newDotExpr(r, newIdentNode($recCase[0]))
|
||||
discType = newCall("type", discriminant)
|
||||
var ifStmt = newNimNode(nnkIfStmt)
|
||||
for i in 1 .. recCase.len - 1:
|
||||
assert recCase[i].kind == nnkOfBranch
|
||||
yAssert recCase[i].kind == nnkOfBranch
|
||||
var branch = newNimNode(nnkElifBranch)
|
||||
var branchContent = newStmtList(newAssignment(discriminant, recCase[i][0]))
|
||||
case recCase[i][1].len
|
||||
|
@ -567,7 +579,7 @@ macro constructImplicitVariantObject(s, c, r, possibleTagIds: expr,
|
|||
branch.add(infix(
|
||||
newCall("yamlTag", newCall("type", field)), "in", possibleTagIds))
|
||||
branchContent.add(newCall("constructChild", s, c, field))
|
||||
else: assert false
|
||||
else: internalError("Too many children: " & $recCase[i][1].len)
|
||||
branch.add(branchContent)
|
||||
ifStmt.add(branch)
|
||||
let raiseStmt = newNimNode(nnkRaiseStmt).add(
|
||||
|
@ -580,7 +592,8 @@ macro constructImplicitVariantObject(s, c, r, possibleTagIds: expr,
|
|||
))
|
||||
ifStmt.add(newNimNode(nnkElse).add(newNimNode(nnkTryStmt).add(
|
||||
newStmtList(raiseStmt), newNimNode(nnkExceptBranch).add(
|
||||
newIdentNode("KeyError"), newStmtList(newCall("assert", newLit(false)))
|
||||
newIdentNode("KeyError"), newStmtList(newCall("internalError",
|
||||
newStrLitNode("Unexcpected KeyError")))
|
||||
))))
|
||||
result = newStmtList(newCall("reset", r), ifStmt)
|
||||
|
||||
|
@ -623,7 +636,7 @@ proc constructChild*[T](s: var YamlStream, c: ConstructionContext,
|
|||
raise newException(YamlConstructionError,
|
||||
"Complex value of implicit variant object type must have a tag.")
|
||||
possibleTagIds.add(item.seqTag)
|
||||
else: assert false
|
||||
else: internalError("Unexpected item kind: " & $item.kind)
|
||||
constructImplicitVariantObject(s, c, result, possibleTagIds, T)
|
||||
else:
|
||||
case item.kind
|
||||
|
@ -646,7 +659,7 @@ proc constructChild*[T](s: var YamlStream, c: ConstructionContext,
|
|||
typetraits.name(T))
|
||||
elif item.seqAnchor != yAnchorNone:
|
||||
raise newException(YamlConstructionError, "Anchor on non-ref type")
|
||||
else: assert false
|
||||
else: internalError("Unexpected item kind: " & $item.kind)
|
||||
constructObject(s, c, result)
|
||||
|
||||
proc constructChild*(s: var YamlStream, c: ConstructionContext,
|
||||
|
@ -690,15 +703,13 @@ proc constructChild*[O](s: var YamlStream, c: ConstructionContext,
|
|||
discard s.next()
|
||||
return
|
||||
elif e.kind == yamlAlias:
|
||||
try:
|
||||
result = cast[ref O](c.refs[e.aliasTarget])
|
||||
discard s.next()
|
||||
return
|
||||
except KeyError: assert(false)
|
||||
result = cast[ref O](c.refs.getOrDefault(e.aliasTarget))
|
||||
discard s.next()
|
||||
return
|
||||
new(result)
|
||||
template removeAnchor(anchor: var AnchorId) {.dirty.} =
|
||||
if anchor != yAnchorNone:
|
||||
assert(not c.refs.hasKey(anchor))
|
||||
yAssert(not c.refs.hasKey(anchor))
|
||||
c.refs[anchor] = cast[pointer](result)
|
||||
anchor = yAnchorNone
|
||||
|
||||
|
@ -706,10 +717,10 @@ proc constructChild*[O](s: var YamlStream, c: ConstructionContext,
|
|||
of yamlScalar: removeAnchor(e.scalarAnchor)
|
||||
of yamlStartMap: removeAnchor(e.mapAnchor)
|
||||
of yamlStartSeq: removeAnchor(e.seqAnchor)
|
||||
else: assert(false)
|
||||
else: internalError("Unexpected event kind: " & $e.kind)
|
||||
s.peek = e
|
||||
try: constructChild(s, c, result[])
|
||||
except YamlConstructionError, YamlStreamError, AssertionError: raise
|
||||
except YamlConstructionError, YamlStreamError: raise
|
||||
except Exception:
|
||||
var e = newException(YamlStreamError, getCurrentExceptionMsg())
|
||||
e.parent = getCurrentException()
|
||||
|
@ -738,47 +749,40 @@ proc representChild*[O](value: ref O, ts: TagStyle, c: SerializationContext):
|
|||
else:
|
||||
let p = cast[pointer](value)
|
||||
if c.refs.hasKey(p):
|
||||
try:
|
||||
if c.refs[p] == yAnchorNone:
|
||||
c.refs[p] = c.nextAnchorId
|
||||
c.nextAnchorId = AnchorId(int(c.nextAnchorId) + 1)
|
||||
except KeyError: assert false, "Can never happen"
|
||||
result = iterator(): YamlStreamEvent {.raises: [].} =
|
||||
var event: YamlStreamEvent
|
||||
try: event = aliasEvent(c.refs[p])
|
||||
except KeyError: assert false, "Can never happen"
|
||||
yield event
|
||||
return
|
||||
try:
|
||||
if c.style == asAlways:
|
||||
if c.refs.getOrDefault(p) == yAnchorNone:
|
||||
c.refs[p] = c.nextAnchorId
|
||||
c.nextAnchorId = AnchorId(int(c.nextAnchorId) + 1)
|
||||
else: c.refs[p] = yAnchorNone
|
||||
let
|
||||
a = if c.style == asAlways: c.refs[p] else: cast[AnchorId](p)
|
||||
childTagStyle = if ts == tsAll: tsAll else: tsRootOnly
|
||||
result = iterator(): YamlStreamEvent =
|
||||
var child = representChild(value[], childTagStyle, c)
|
||||
var first = child()
|
||||
assert(not finished(child))
|
||||
case first.kind
|
||||
of yamlStartMap:
|
||||
first.mapAnchor = a
|
||||
if ts == tsNone: first.mapTag = yTagQuestionMark
|
||||
of yamlStartSeq:
|
||||
first.seqAnchor = a
|
||||
if ts == tsNone: first.seqTag = yTagQuestionMark
|
||||
of yamlScalar:
|
||||
first.scalarAnchor = a
|
||||
if ts == tsNone and guessType(first.scalarContent) != yTypeNull:
|
||||
first.scalarTag = yTagQuestionMark
|
||||
else: discard
|
||||
yield first
|
||||
while true:
|
||||
let event = child()
|
||||
if finished(child): break
|
||||
yield event
|
||||
except KeyError: assert false, "Can never happen"
|
||||
result = iterator(): YamlStreamEvent {.raises: [].} =
|
||||
yield aliasEvent(c.refs.getOrDefault(p))
|
||||
return
|
||||
if c.style == asAlways:
|
||||
c.refs[p] = c.nextAnchorId
|
||||
c.nextAnchorId = AnchorId(int(c.nextAnchorId) + 1)
|
||||
else: c.refs[p] = yAnchorNone
|
||||
let
|
||||
a = if c.style == asAlways: c.refs.getOrDefault(p) else: cast[AnchorId](p)
|
||||
childTagStyle = if ts == tsAll: tsAll else: tsRootOnly
|
||||
result = iterator(): YamlStreamEvent =
|
||||
var child = representChild(value[], childTagStyle, c)
|
||||
var first = child()
|
||||
yAssert(not finished(child))
|
||||
case first.kind
|
||||
of yamlStartMap:
|
||||
first.mapAnchor = a
|
||||
if ts == tsNone: first.mapTag = yTagQuestionMark
|
||||
of yamlStartSeq:
|
||||
first.seqAnchor = a
|
||||
if ts == tsNone: first.seqTag = yTagQuestionMark
|
||||
of yamlScalar:
|
||||
first.scalarAnchor = a
|
||||
if ts == tsNone and guessType(first.scalarContent) != yTypeNull:
|
||||
first.scalarTag = yTagQuestionMark
|
||||
else: discard
|
||||
yield first
|
||||
while true:
|
||||
let event = child()
|
||||
if finished(child): break
|
||||
yield event
|
||||
|
||||
proc representChild*[O](value: O, ts: TagStyle,
|
||||
c: SerializationContext): RawYamlStream =
|
||||
|
@ -801,17 +805,15 @@ proc construct*[T](s: var YamlStream, target: var T) =
|
|||
var context = newConstructionContext()
|
||||
try:
|
||||
var e = s.next()
|
||||
assert(e.kind == yamlStartDoc)
|
||||
yAssert(e.kind == yamlStartDoc)
|
||||
|
||||
constructChild(s, context, target)
|
||||
e = s.next()
|
||||
assert(e.kind == yamlEndDoc)
|
||||
yAssert(e.kind == yamlEndDoc)
|
||||
except YamlConstructionError:
|
||||
raise (ref YamlConstructionError)(getCurrentException())
|
||||
except YamlStreamError:
|
||||
raise (ref YamlStreamError)(getCurrentException())
|
||||
except AssertionError:
|
||||
raise (ref AssertionError)(getCurrentException())
|
||||
except Exception:
|
||||
# may occur while calling s()
|
||||
var ex = newException(YamlStreamError, "")
|
||||
|
@ -833,19 +835,17 @@ proc load*[K](input: Stream, target: var K) =
|
|||
let e = (ref YamlStreamError)(getCurrentException())
|
||||
if e.parent of IOError: raise (ref IOError)(e.parent)
|
||||
elif e.parent of YamlParserError: raise (ref YamlParserError)(e.parent)
|
||||
else: assert false
|
||||
else: internalError("Unexpected exception: " & e.parent.repr)
|
||||
|
||||
proc setAnchor(a: var AnchorId, q: var Table[pointer, AnchorId])
|
||||
{.inline.} =
|
||||
if a != yAnchorNone:
|
||||
try: a = q[cast[pointer](a)]
|
||||
except KeyError: assert false, "Can never happen"
|
||||
if a != yAnchorNone: a = q.getOrDefault(cast[pointer](a))
|
||||
|
||||
proc represent*[T](value: T, ts: TagStyle = tsRootOnly,
|
||||
a: AnchorStyle = asTidy): YamlStream =
|
||||
var
|
||||
context = newSerializationContext(a)
|
||||
objStream = iterator(): YamlStreamEvent =
|
||||
objStream = iterator(): YamlStreamEvent {.raises: [YamlStreamError].} =
|
||||
yield startDocEvent()
|
||||
var events = representChild(value, ts, context)
|
||||
while true:
|
||||
|
@ -855,11 +855,8 @@ proc represent*[T](value: T, ts: TagStyle = tsRootOnly,
|
|||
yield endDocEvent()
|
||||
if a == asTidy:
|
||||
var objQueue = newSeq[YamlStreamEvent]()
|
||||
try:
|
||||
for event in objStream(): objQueue.add(event)
|
||||
except Exception:
|
||||
assert(false)
|
||||
var backend = iterator(): YamlStreamEvent =
|
||||
for event in objStream(): objQueue.add(event)
|
||||
var backend = iterator(): YamlStreamEvent {.raises: [YamlStreamError].} =
|
||||
for i in countup(0, objQueue.len - 1):
|
||||
var event = objQueue[i]
|
||||
case event.kind
|
||||
|
@ -879,5 +876,4 @@ proc dump*[K](value: K, target: Stream, tagStyle: TagStyle = tsRootOnly,
|
|||
if options.style == psJson: asNone else: anchorStyle)
|
||||
try: present(events, target, serializationTagLibrary, options)
|
||||
except YamlStreamError:
|
||||
# serializing object does not raise any errors, so we can ignore this
|
||||
assert false, "Can never happen"
|
||||
internalError("Unexpected exception: " & getCurrentException().repr)
|
||||
|
|
|
@ -15,8 +15,7 @@ proc next*(s: var YamlStream): YamlStreamEvent =
|
|||
else:
|
||||
try:
|
||||
shallowCopy(result, s.backend())
|
||||
assert(not finished(s.backend))
|
||||
except AssertionError: raise
|
||||
yAssert(not finished(s.backend))
|
||||
except YamlStreamError:
|
||||
let cur = getCurrentException()
|
||||
var e = newException(YamlStreamError, cur.msg)
|
||||
|
@ -47,7 +46,6 @@ proc finished*(s: var YamlStream): bool =
|
|||
else:
|
||||
s.peeked = true
|
||||
result = false
|
||||
except AssertionError: raise
|
||||
except YamlStreamError:
|
||||
let cur = getCurrentException()
|
||||
var e = newException(YamlStreamError, cur.msg)
|
||||
|
|
28
yaml.nim
28
yaml.nim
|
@ -256,9 +256,6 @@ type
|
|||
style: AnchorStyle
|
||||
nextAnchorId: AnchorId
|
||||
|
||||
RawYamlStream* = iterator(): YamlStreamEvent {.raises: [].} ## \
|
||||
## Stream of ``YamlStreamEvent``s returned by ``representObject`` procs.
|
||||
|
||||
YamlNodeKind* = enum
|
||||
yScalar, yMapping, ySequence
|
||||
|
||||
|
@ -342,6 +339,9 @@ type
|
|||
## parsing, because otherwise this information is not available to the
|
||||
## costruction proc.
|
||||
|
||||
RawYamlStream* = iterator(): YamlStreamEvent {.raises: [YamlStreamError].}## \
|
||||
## Stream of ``YamlStreamEvent``s returned by ``representObject`` procs.
|
||||
|
||||
const
|
||||
# failsafe schema
|
||||
|
||||
|
@ -406,6 +406,18 @@ const
|
|||
PresentationOptions(style: psDefault, indentationStep: 2,
|
||||
newlines: nlOSDefault)
|
||||
|
||||
# used throughout implementation code, therefore defined here
|
||||
template internalError(s: string) =
|
||||
when not defined(release):
|
||||
let ii = instantiationInfo()
|
||||
echo "! Error in file ", ii.filename, " at line ", ii.line, ":"
|
||||
echo "! ", s
|
||||
echo "! Please report this bug."
|
||||
quit 1
|
||||
template yAssert(e: typed) =
|
||||
when not defined(release):
|
||||
if not e: internalError(astToStr(e))
|
||||
|
||||
# interface
|
||||
|
||||
proc `==`*(left: YamlStreamEvent, right: YamlStreamEvent): bool {.raises: [].}
|
||||
|
@ -468,7 +480,6 @@ iterator items*(s: var YamlStream): YamlStreamEvent
|
|||
try:
|
||||
event = s.backend()
|
||||
if finished(s.backend): break
|
||||
except AssertionError: raise
|
||||
except YamlStreamError:
|
||||
let cur = getCurrentException()
|
||||
var e = newException(YamlStreamError, cur.msg)
|
||||
|
@ -625,8 +636,7 @@ proc representChild*(value: string, ts: TagStyle, c: SerializationContext):
|
|||
## Represents a Nim string. Supports nil strings.
|
||||
|
||||
proc representChild*[O](value: O, ts: TagStyle,
|
||||
c: SerializationContext):
|
||||
RawYamlStream {.raises: [].}
|
||||
c: SerializationContext): RawYamlStream {.raises: [].}
|
||||
## Represents an arbitrary Nim object as YAML object.
|
||||
|
||||
proc construct*[T](s: var YamlStream, target: var T)
|
||||
|
@ -638,13 +648,15 @@ proc load*[K](input: Stream, target: var K)
|
|||
## Loads a Nim value from a YAML character stream.
|
||||
|
||||
proc represent*[T](value: T, ts: TagStyle = tsRootOnly,
|
||||
a: AnchorStyle = asTidy): YamlStream {.raises: [].}
|
||||
a: AnchorStyle = asTidy): YamlStream
|
||||
{.raises: [YamlStreamError].}
|
||||
## Represents a Nim value as ``YamlStream``
|
||||
|
||||
proc dump*[K](value: K, target: Stream, tagStyle: TagStyle = tsRootOnly,
|
||||
anchorStyle: AnchorStyle = asTidy,
|
||||
options: PresentationOptions = defaultPresentationOptions)
|
||||
{.raises: [YamlPresenterJsonError, YamlPresenterOutputError].}
|
||||
{.raises: [YamlPresenterJsonError, YamlPresenterOutputError,
|
||||
YamlStreamError].}
|
||||
## Dump a Nim value as YAML character stream.
|
||||
|
||||
var
|
||||
|
|
Loading…
Reference in New Issue