Added JSON compositor.

This commit is contained in:
Felix Krause 2015-12-24 15:21:49 +01:00
parent b8afc8b9d9
commit c83d488886
3 changed files with 105 additions and 5 deletions

94
src/private/json.nim Normal file
View File

@ -0,0 +1,94 @@
type
Level = tuple[node: JsonNode, key: string]
proc initLevel(node: JsonNode): Level = (node: node, key: nil)
proc jsonFromScalar(content: string, typeHint: YamlTypeHint): JsonNode =
new(result)
case typeHint
of yTypeInteger:
result.kind = JInt
result.num = parseBiggestInt(content)
of yTypeFloat:
result.kind = JFloat
result.fnum = parseFloat(content)
of yTypeBoolean:
result.kind = JBool
result.bval = parseBool(content)
of yTypeNull:
result.kind = JNull
else:
result.kind = JString
result.str = content
proc parseToJson*(s: string): seq[JsonNode] =
result = parseToJson(newStringStream(s))
proc parseToJson*(s: Stream): seq[JsonNode] =
newSeq(result, 0)
var
levels = newSeq[Level]()
parser = newParser()
tagStr = parser.registerUri("tag:yaml.org,2002:str")
tagBool = parser.registerUri("tag:yaml.org,2002:bool")
tagNull = parser.registerUri("tag:yaml.org,2002:null")
tagInt = parser.registerUri("tag:yaml.org,2002:int")
tagFloat = parser.registerUri("tag:yaml.org,2002:float")
events = parser.parse(s)
for event in events():
case event.kind
of yamlStartDocument:
# we don't need to do anything here; root node will be created
# by first scalar, sequence or map event
discard
of yamlEndDocument:
# we can savely assume that levels has e length of exactly 1.
result.add(levels.pop().node)
of yamlStartSequence:
levels.add((node: newJArray(), key: cast[string](nil)))
of yamlStartMap:
levels.add((node: newJObject(), key: cast[string](nil)))
of yamlScalar:
case levels[levels.high].node.kind
of JArray:
levels[levels.high].node.elems.add(
jsonFromScalar(event.scalarContent, event.scalarType))
of JObject:
if isNil(levels[levels.high].key):
# JSON only allows strings as keys
levels[levels.high].key = event.scalarContent
else:
levels[levels.high].node.fields.add(
(key: levels[levels.high].key, val: jsonFromScalar(
event.scalarContent, event.scalarType)))
levels[levels.high].key = nil
else:
discard # will never happen
of yamlEndSequence, yamlEndMap:
if levels.len > 1:
let level = levels.pop()
case levels[levels.high].node.kind
of JArray:
levels[levels.high].node.elems.add(level.node)
of JObject:
if isNil(levels[levels.high].key):
raise newException(ValueError,
"non-scalar as key not allowed in JSON")
else:
levels[levels.high].node.fields.add(
(key: levels[levels.high].key, val: level.node))
levels[levels.high].key = nil
else:
discard # will never happen
else:
discard # wait for yamlEndDocument
of yamlWarning:
echo "YAML warning at line ", event.line, ", column ", event.column,
": ", event.description
of yamlError:
echo "YAML error at line ", event.line, ", column ", event.column,
": ", event.description
of yamlAlias:
discard # todo

View File

@ -1,4 +1,4 @@
import streams, unicode, lexbase, tables, strutils
import streams, unicode, lexbase, tables, strutils, json
type
YamlTypeHint* = enum
@ -41,7 +41,11 @@ type
proc parse*(parser: YamlSequentialParser, s: Stream):
iterator(): YamlParserEvent
proc parseToJson*(s: Stream): seq[JsonNode]
proc parseToJson*(s: string): seq[JsonNode]
# implementation
include private.lexer
include private.sequential
include private.sequential
include private.json

View File

@ -162,9 +162,11 @@ suite "Parsing":
scalar("a true"), scalar("null d"), scalar("e 42"), scalar(""),
endMap(), endDoc())
test "Parsing: Map in Sequence":
ensure(" - key: value", startDoc(), startSequence(), startMap(),
scalar("key"), scalar("value"), endMap(), endSequence(),
endDoc())
ensure(" - key: value\n key2: value2\n -\n key3: value3",
startDoc(), startSequence(), startMap(), scalar("key"),
scalar("value"), scalar("key2"), scalar("value2"), endMap(),
startMap(), scalar("key3"), scalar("value3"), endMap(),
endSequence(), endDoc())
test "Parsing: Sequence in Map":
ensure("key:\n - item1\n - item2", startDoc(), startMap(),
scalar("key"), startSequence(), scalar("item1"), scalar("item2"),