mirror of https://github.com/status-im/NimYAML.git
Code cleanup; reduced compiler hints
This commit is contained in:
parent
f2f340c0b6
commit
33a7f93480
|
@ -158,20 +158,26 @@ var
|
|||
|
||||
block:
|
||||
multibench(cYaml1k, 100):
|
||||
var s = newStringStream(json1k)
|
||||
let res = constructJson(parser.parse(s))
|
||||
var
|
||||
s = newStringStream(json1k)
|
||||
events = parser.parse(s)
|
||||
let res = constructJson(events)
|
||||
assert res[0].kind == JObject
|
||||
|
||||
block:
|
||||
multibench(cYaml10k, 100):
|
||||
var s = newStringStream(json10k)
|
||||
let res = constructJson(parser.parse(s))
|
||||
var
|
||||
s = newStringStream(json10k)
|
||||
events = parser.parse(s)
|
||||
let res = constructJson(events)
|
||||
assert res[0].kind == JObject
|
||||
|
||||
block:
|
||||
multibench(cYaml100k, 100):
|
||||
var s = newStringStream(json100k)
|
||||
let res = constructJson(parser.parse(s))
|
||||
var
|
||||
s = newStringStream(json100k)
|
||||
events = parser.parse(s)
|
||||
let res = constructJson(events)
|
||||
assert res[0].kind == JObject
|
||||
|
||||
block:
|
||||
|
@ -209,17 +215,17 @@ proc writeResult(caption: string, num: int64) =
|
|||
|
||||
setForegroundColor(fgWhite)
|
||||
|
||||
writeStyled "Benchmark: Processing JSON input with YAML versus Nim's JSON implementation\n"
|
||||
writeStyled "===========================================================================\n"
|
||||
writeStyled "Benchmark: Processing JSON input\n"
|
||||
writeStyled "================================\n"
|
||||
writeStyled "1k input\n--------\n"
|
||||
writeResult "YAML: ", cYaml1k div 1000
|
||||
writeResult "NimYAML: ", cYaml1k div 1000
|
||||
writeResult "JSON: ", cJson1k div 1000
|
||||
writeResult "LibYAML: ", cLibYaml1k div 1000
|
||||
writeStyled "10k input\n---------\n"
|
||||
writeResult "YAML: ", cYaml10k div 1000
|
||||
writeResult "NimYAML: ", cYaml10k div 1000
|
||||
writeResult "JSON: ", cJson10k div 1000
|
||||
writeResult "LibYAML: ", cLibYaml10k div 1000
|
||||
writeStyled "100k input\n----------\n"
|
||||
writeResult "YAML: ", cYaml100k div 1000
|
||||
writeResult "NimYAML: ", cYaml100k div 1000
|
||||
writeResult "JSON: ", cJson100k div 1000
|
||||
writeResult "LibYAML: ", cLibYaml100k div 1000
|
||||
|
|
|
@ -24,7 +24,7 @@ type
|
|||
lpdeDirectivesEnd, lpdeSequenceItem, lpdeScalarContent
|
||||
|
||||
YamlContext = enum
|
||||
cFlowIn, cFlowOut, cFlowKey, cBlockKey, cBlockIn, cBlockOut
|
||||
cBlock, cFlow
|
||||
|
||||
const
|
||||
space = [' ', '\t']
|
||||
|
@ -658,7 +658,7 @@ proc isPlainSafe(lexer: BaseLexer, index: int, context: YamlContext): bool =
|
|||
of spaceOrLineEnd:
|
||||
result = false
|
||||
of flowIndicators:
|
||||
result = context in [cBlockIn, cBlockOut, cBlockKey]
|
||||
result = context == cBlock
|
||||
else:
|
||||
result = true
|
||||
|
||||
|
@ -691,7 +691,7 @@ template plainScalar(lexer: BaseLexer, content: var string,
|
|||
of '#':
|
||||
break outer
|
||||
of flowIndicators:
|
||||
if context in [cBlockOut, cBlockIn, cBlockKey]:
|
||||
if context == cBlock:
|
||||
content.add(after)
|
||||
content.add(c2)
|
||||
break
|
||||
|
@ -702,7 +702,7 @@ template plainScalar(lexer: BaseLexer, content: var string,
|
|||
content.add(c2)
|
||||
break
|
||||
of flowIndicators:
|
||||
if context in [cBlockOut, cBlockIn, cBlockKey]:
|
||||
if context == cBlock:
|
||||
content.add(c)
|
||||
else:
|
||||
break
|
||||
|
@ -719,13 +719,13 @@ template plainScalar(lexer: BaseLexer, content: var string,
|
|||
template continueMultilineScalar() {.dirty.} =
|
||||
content.add(if newlines == 1: " " else: repeat('\x0A', newlines - 1))
|
||||
startToken()
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
|
||||
template handleFlowPlainScalar() {.dirty.} =
|
||||
content = ""
|
||||
startToken()
|
||||
p.lexer.plainScalar(content, cFlowOut)
|
||||
p.lexer.plainScalar(content, cFlow)
|
||||
if p.lexer.buf[p.lexer.bufpos] in ['{', '}', '[', ']', ',', ':', '#']:
|
||||
discard
|
||||
else:
|
||||
|
@ -733,14 +733,14 @@ template handleFlowPlainScalar() {.dirty.} =
|
|||
while true:
|
||||
case p.lexer.buf[p.lexer.bufpos]
|
||||
of ':':
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cFlowOut):
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cFlow):
|
||||
if newlines == 1:
|
||||
content.add(' ')
|
||||
newlines = 0
|
||||
elif newlines > 1:
|
||||
content.add(repeat(' ', newlines - 1))
|
||||
newlines = 0
|
||||
p.lexer.plainScalar(content, cFlowOut)
|
||||
p.lexer.plainScalar(content, cFlow)
|
||||
elif explicitFlowKey:
|
||||
break
|
||||
else:
|
||||
|
@ -764,7 +764,7 @@ template handleFlowPlainScalar() {.dirty.} =
|
|||
elif newlines > 1:
|
||||
content.add(repeat(' ', newlines - 1))
|
||||
newlines = 0
|
||||
p.lexer.plainScalar(content, cFlowOut)
|
||||
p.lexer.plainScalar(content, cFlow)
|
||||
yield scalarEvent(content, tag, anchor)
|
||||
handleObjectEnd(fpFlowAfterObject)
|
||||
|
||||
|
@ -1098,7 +1098,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
state = fpBlockObjectStart
|
||||
of lpdeScalarContent:
|
||||
content = ""
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
else:
|
||||
yield startDocEvent()
|
||||
|
@ -1134,7 +1134,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
ancestry.add(level)
|
||||
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||
content = ""
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
of '.':
|
||||
var isDocumentEnd: bool
|
||||
|
@ -1160,7 +1160,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
ancestry.add(level)
|
||||
level = FastParseLevel(kind: fplUnknown, indentation: -1)
|
||||
content = ""
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
of ' ':
|
||||
p.lexer.skipIndentation()
|
||||
|
@ -1172,7 +1172,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
indentation = p.lexer.getColNumber(p.lexer.bufpos)
|
||||
if p.lexer.buf[p.lexer.bufpos] == '-' and not
|
||||
p.lexer.isPlainSafe(p.lexer.bufpos + 1, if flowdepth == 0:
|
||||
cBlockOut else: cFlowOut):
|
||||
cBlock else: cFlow):
|
||||
closeMoreIndentedLevels(true)
|
||||
else: closeMoreIndentedLevels()
|
||||
case level.kind
|
||||
|
@ -1212,7 +1212,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
newlines.inc()
|
||||
p.lexer.bufpos = p.lexer.handleCR(p.lexer.bufpos)
|
||||
of ':':
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlockOut):
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlock):
|
||||
continueMultilineScalar()
|
||||
else:
|
||||
startToken()
|
||||
|
@ -1294,7 +1294,6 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
debug("state: blockObjectStart")
|
||||
p.lexer.skipWhitespace()
|
||||
indentation = p.lexer.getColNumber(p.lexer.bufpos)
|
||||
let objectStart = p.lexer.getColNumber(p.lexer.bufpos)
|
||||
case p.lexer.buf[p.lexer.bufpos]
|
||||
of '\x0A':
|
||||
p.lexer.bufpos = p.lexer.handleLF(p.lexer.bufpos)
|
||||
|
@ -1340,11 +1339,11 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
yield scalarEvent(content, tag, anchor)
|
||||
handleObjectEnd(stateAfter)
|
||||
of '-':
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlockOut):
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlock):
|
||||
handleBlockItemStart()
|
||||
content = ""
|
||||
startToken()
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
else:
|
||||
p.lexer.bufpos.inc()
|
||||
|
@ -1362,21 +1361,21 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
handleBlockItemStart()
|
||||
state = fpFlow
|
||||
of '?':
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlockOut):
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlock):
|
||||
handleBlockItemStart()
|
||||
content = ""
|
||||
startToken()
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
else:
|
||||
p.lexer.bufpos.inc()
|
||||
handleMapKeyIndicator()
|
||||
of ':':
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlockOut):
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cBlock):
|
||||
handleBlockItemStart()
|
||||
content = ""
|
||||
startToken()
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
else:
|
||||
p.lexer.bufpos.inc()
|
||||
|
@ -1387,7 +1386,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
handleBlockItemStart()
|
||||
content = ""
|
||||
startToken()
|
||||
p.lexer.plainScalar(content, cBlockOut)
|
||||
p.lexer.plainScalar(content, cBlock)
|
||||
state = fpBlockAfterPlainScalar
|
||||
of fpExpectDocEnd:
|
||||
case p.lexer.buf[p.lexer.bufpos]
|
||||
|
@ -1506,7 +1505,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
p.lexer.bufpos.inc()
|
||||
of ':':
|
||||
assert(level.kind == fplUnknown)
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cFlowIn):
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cFlow):
|
||||
level = ancestry.pop()
|
||||
case level.kind
|
||||
of fplSequence, fplMapValue:
|
||||
|
@ -1548,7 +1547,7 @@ proc parse*(p: YamlParser, s: Stream): YamlStream =
|
|||
handleAlias()
|
||||
state = fpFlowAfterObject
|
||||
of '?':
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cFlowOut):
|
||||
if p.lexer.isPlainSafe(p.lexer.bufpos + 1, cFlow):
|
||||
handleFlowPlainScalar()
|
||||
elif explicitFlowKey:
|
||||
startToken()
|
||||
|
|
|
@ -27,7 +27,7 @@ type
|
|||
ythPointLowerIN, ythPointLowerN, ythPointLowerNA,
|
||||
|
||||
ythMinus, yth0, ythInt, ythDecimal, ythNumE, ythNumEPlusMinus,
|
||||
ythExponent, ythNone
|
||||
ythExponent
|
||||
|
||||
macro typeHintStateMachine(c: untyped, content: untyped): stmt =
|
||||
assert content.kind == nnkStmtList
|
||||
|
|
|
@ -297,9 +297,9 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
var nextState: DumperState
|
||||
case style
|
||||
of psDefault:
|
||||
type mapParseState = enum
|
||||
type MapParseState = enum
|
||||
mpInitial, mpKey, mpValue, mpNeedBlock
|
||||
var mps = mpInitial
|
||||
var mps: MapParseState = mpInitial
|
||||
while mps != mpNeedBlock:
|
||||
case s.peek().kind
|
||||
of yamlScalar, yamlAlias:
|
||||
|
@ -479,14 +479,13 @@ proc transform*(input: Stream, output: Stream, style: PresentationStyle,
|
|||
var e = getCurrentException()
|
||||
while e.parent of YamlStreamError: e = e.parent
|
||||
if e.parent of IOError:
|
||||
raise cast[ref IOError](e.parent)
|
||||
raise (ref IOError)(e.parent)
|
||||
elif e.parent of YamlParserError:
|
||||
raise cast[ref YamlParserError](e.parent)
|
||||
raise (ref YamlParserError)(e.parent)
|
||||
else:
|
||||
# never happens
|
||||
assert(false)
|
||||
except YamlPresenterJsonError, YamlPresenterOutputError:
|
||||
raise
|
||||
except Exception:
|
||||
# compiler bug: https://github.com/nim-lang/Nim/issues/3772
|
||||
assert(false)
|
||||
except YamlPresenterJsonError:
|
||||
raise (ref YamlPresenterJsonError)(getCurrentException())
|
||||
except YamlPresenterOutputError:
|
||||
raise (ref YamlPresenterOutputError)(getCurrentException())
|
|
@ -33,17 +33,6 @@ var
|
|||
## Should not be modified manually. Will be extended by
|
||||
## `serializable <#serializable,stmt,stmt>`_.
|
||||
|
||||
static:
|
||||
iterator objectFields(n: NimNode): tuple[name: NimNode, t: NimNode]
|
||||
{.raises: [].} =
|
||||
assert n.kind in [nnkRecList, nnkTupleTy]
|
||||
for identDefs in n.children:
|
||||
let numFields = identDefs.len - 2
|
||||
for i in 0..numFields - 1:
|
||||
yield (name: identDefs[i], t: identDefs[^2])
|
||||
|
||||
var existingTuples = newSeq[NimNode]()
|
||||
|
||||
template presentTag*(t: typedesc, ts: TagStyle): TagId =
|
||||
if ts == tsNone: yTagQuestionMark else: yamlTag(t)
|
||||
|
||||
|
@ -310,14 +299,8 @@ proc representObject*[T](value: seq[T], ts: TagStyle,
|
|||
|
||||
proc yamlTag*[K, V](T: typedesc[Table[K, V]]): TagId {.inline, raises: [].} =
|
||||
try:
|
||||
let
|
||||
keyUri = serializationTagLibrary.uri(yamlTag(K))
|
||||
valueUri = serializationTagLibrary.uri(yamlTag(V))
|
||||
keyIdent = if keyUri[0] == '!': keyUri[1..keyUri.len - 1] else:
|
||||
keyUri
|
||||
valueIdent = if valueUri[0] == '!':
|
||||
valueUri[1..valueUri.len - 1] else: valueUri
|
||||
uri = "!nim:tables:Table(" & keyUri & "," & valueUri & ")"
|
||||
let uri = "!nim:tables:Table(" & safeTagUri(yamlTag(K)) & "," &
|
||||
safeTagUri(yamlTag(V)) & ")"
|
||||
result = lazyLoadTag(uri)
|
||||
except KeyError:
|
||||
# cannot happen (theoretically, you known)
|
||||
|
@ -559,8 +542,12 @@ proc construct*[T](s: var YamlStream, target: var T)
|
|||
constructChild(s, context, target)
|
||||
e = s.next()
|
||||
assert(e.kind == yamlEndDocument)
|
||||
except YamlConstructionError, YamlStreamError, AssertionError:
|
||||
raise
|
||||
except YamlConstructionError:
|
||||
raise (ref YamlConstructionError)(getCurrentException())
|
||||
except YamlStreamError:
|
||||
raise (ref YamlStreamError)(getCurrentException())
|
||||
except AssertionError:
|
||||
raise (ref AssertionError)(getCurrentException())
|
||||
except Exception:
|
||||
# may occur while calling s()
|
||||
var ex = newException(YamlStreamError, "")
|
||||
|
@ -665,10 +652,4 @@ proc dump*[K](value: K, target: Stream, style: PresentationStyle = psDefault,
|
|||
present(events, target, serializationTagLibrary, style, indentationStep)
|
||||
except YamlStreamError:
|
||||
# serializing object does not raise any errors, so we can ignore this
|
||||
var e = getCurrentException()
|
||||
assert(false)
|
||||
except YamlPresenterJsonError, YamlPresenterOutputError, AssertionError, FieldError:
|
||||
raise
|
||||
except Exception:
|
||||
# cannot occur as represent() doesn't raise any errors
|
||||
assert(false)
|
||||
assert false, "Can never happen"
|
|
@ -56,9 +56,6 @@ proc newNode(v: string): ref Node =
|
|||
result.next = nil
|
||||
|
||||
suite "Serialization":
|
||||
setup:
|
||||
var tagLib = serializationTagLibrary
|
||||
|
||||
test "Serialization: Load string sequence":
|
||||
let input = newStringStream(" - a\n - b")
|
||||
var result: seq[string]
|
||||
|
|
7
yaml.nim
7
yaml.nim
|
@ -492,9 +492,7 @@ proc constructJson*(s: var YamlStream): seq[JsonNode]
|
|||
## check for these values and will output invalid JSON when rendering one
|
||||
## of these values into a JSON character stream.
|
||||
|
||||
proc loadToJson*(s: Stream): seq[JsonNode]
|
||||
{.raises: [IOError, YamlParserError, YamlConstructionError,
|
||||
OutOfMemError].}
|
||||
proc loadToJson*(s: Stream): seq[JsonNode] {.raises: [].}
|
||||
## Uses `YamlParser <#YamlParser>`_ and
|
||||
## `constructJson <#constructJson>`_ to construct an in-memory JSON tree
|
||||
## from a YAML character stream.
|
||||
|
@ -509,8 +507,7 @@ proc present*(s: var YamlStream, target: Stream, tagLib: TagLibrary,
|
|||
proc transform*(input: Stream, output: Stream, style: PresentationStyle,
|
||||
indentationStep: int = 2) {.raises: [IOError, YamlParserError,
|
||||
YamlPresenterJsonError,
|
||||
YamlPresenterOutputError,
|
||||
OutOfMemError].}
|
||||
YamlPresenterOutputError].}
|
||||
## Parser ``input`` as YAML character stream and then dump it to ``output``
|
||||
## while resolving non-specific tags to the ones in the YAML core tag
|
||||
## library.
|
||||
|
|
Loading…
Reference in New Issue