wip
This commit is contained in:
parent
230e226da0
commit
be24c9cd93
|
@ -3,4 +3,3 @@ import
|
||||||
|
|
||||||
export
|
export
|
||||||
serialization, format, reader, writer
|
serialization, format, reader, writer
|
||||||
|
|
||||||
|
|
|
@ -8,3 +8,14 @@ template supports*(_: type Json, T: type): bool =
|
||||||
# The JSON format should support every type
|
# The JSON format should support every type
|
||||||
true
|
true
|
||||||
|
|
||||||
|
template useAutomaticObjectSerialization*(T: type DefaultFlavor): bool = true
|
||||||
|
|
||||||
|
template createJsonFlavor*(FlavorName: untyped,
|
||||||
|
mimeTypeValue = "application/json",
|
||||||
|
automaticObjectSerialization = false) {.dirty.} =
|
||||||
|
type FlavorName* = object
|
||||||
|
template Reader*(T: type FlavorName): type = Reader(Json, FlavorName)
|
||||||
|
template Writer*(T: type FlavorName): type = Writer(Json, FlavorName)
|
||||||
|
template PreferredOutputType*(T: type FlavorName): type = string
|
||||||
|
template mimeType*(T: type FlavorName): string = mimeTypeValue
|
||||||
|
template useAutomaticObjectSerialization*(T: type FlavorName): bool = automaticObjectSerialization
|
||||||
|
|
|
@ -1,13 +1,15 @@
|
||||||
|
{.push raises: [].}
|
||||||
|
|
||||||
import
|
import
|
||||||
std/[unicode, json],
|
std/[json, unicode],
|
||||||
faststreams/inputs,
|
faststreams/inputs,
|
||||||
types
|
types
|
||||||
|
|
||||||
|
from std/strutils import isDigit
|
||||||
|
|
||||||
export
|
export
|
||||||
inputs, types
|
inputs, types
|
||||||
|
|
||||||
{.push raises: [].}
|
|
||||||
|
|
||||||
type
|
type
|
||||||
CustomIntHandler* = ##\
|
CustomIntHandler* = ##\
|
||||||
## Custom decimal integer parser, result values need to be captured
|
## Custom decimal integer parser, result values need to be captured
|
||||||
|
@ -113,6 +115,7 @@ proc renderTok*(lexer: var JsonLexer, output: var string)
|
||||||
lexer.scanString
|
lexer.scanString
|
||||||
else:
|
else:
|
||||||
discard
|
discard
|
||||||
|
|
||||||
# The real stuff
|
# The real stuff
|
||||||
case lexer.tokKind
|
case lexer.tokKind
|
||||||
of tkError, tkEof, tkNumeric, tkExInt, tkExNegInt, tkQuoted, tkExBlob:
|
of tkError, tkEof, tkNumeric, tkExInt, tkExNegInt, tkQuoted, tkExBlob:
|
||||||
|
@ -153,23 +156,20 @@ template peek(s: InputStream): char =
|
||||||
template read(s: InputStream): char =
|
template read(s: InputStream): char =
|
||||||
char inputs.read(s)
|
char inputs.read(s)
|
||||||
|
|
||||||
proc hexCharValue(c: char): int =
|
func hexCharValue(c: char): int =
|
||||||
case c
|
case c
|
||||||
of '0'..'9': ord(c) - ord('0')
|
of '0'..'9': ord(c) - ord('0')
|
||||||
of 'a'..'f': ord(c) - ord('a') + 10
|
of 'a'..'f': ord(c) - ord('a') + 10
|
||||||
of 'A'..'F': ord(c) - ord('A') + 10
|
of 'A'..'F': ord(c) - ord('A') + 10
|
||||||
else: -1
|
else: -1
|
||||||
|
|
||||||
proc isDigit(c: char): bool =
|
func col*(lexer: JsonLexer): int =
|
||||||
return (c >= '0' and c <= '9')
|
|
||||||
|
|
||||||
proc col*(lexer: JsonLexer): int =
|
|
||||||
lexer.stream.pos - lexer.lineStartPos
|
lexer.stream.pos - lexer.lineStartPos
|
||||||
|
|
||||||
proc tokenStartCol*(lexer: JsonLexer): int =
|
func tokenStartCol*(lexer: JsonLexer): int =
|
||||||
1 + lexer.tokenStart - lexer.lineStartPos
|
1 + lexer.tokenStart - lexer.lineStartPos
|
||||||
|
|
||||||
proc init*(T: type JsonLexer, stream: InputStream, mode = defaultJsonMode): T =
|
func init*(T: type JsonLexer, stream: InputStream, mode = defaultJsonMode): T =
|
||||||
T(stream: stream,
|
T(stream: stream,
|
||||||
mode: mode,
|
mode: mode,
|
||||||
line: 1,
|
line: 1,
|
||||||
|
@ -205,7 +205,7 @@ proc scanHexRune(lexer: var JsonLexer): int
|
||||||
if hexValue == -1: error errHexCharExpected
|
if hexValue == -1: error errHexCharExpected
|
||||||
result = (result shl 4) or hexValue
|
result = (result shl 4) or hexValue
|
||||||
|
|
||||||
proc scanString(lexer: var JsonLexer) =
|
proc scanString(lexer: var JsonLexer) {.raises: [IOError].} =
|
||||||
lexer.tokKind = tkString
|
lexer.tokKind = tkString
|
||||||
lexer.strVal.setLen 0
|
lexer.strVal.setLen 0
|
||||||
lexer.tokenStart = lexer.stream.pos
|
lexer.tokenStart = lexer.stream.pos
|
||||||
|
@ -256,7 +256,7 @@ proc scanString(lexer: var JsonLexer) =
|
||||||
else:
|
else:
|
||||||
lexer.strVal.add c
|
lexer.strVal.add c
|
||||||
|
|
||||||
proc handleLF(lexer: var JsonLexer) =
|
func handleLF(lexer: var JsonLexer) =
|
||||||
advance lexer.stream
|
advance lexer.stream
|
||||||
lexer.line += 1
|
lexer.line += 1
|
||||||
lexer.lineStartPos = lexer.stream.pos
|
lexer.lineStartPos = lexer.stream.pos
|
||||||
|
@ -343,7 +343,7 @@ proc scanSign(lexer: var JsonLexer): int
|
||||||
elif c == '+':
|
elif c == '+':
|
||||||
requireMoreNumberChars: result = 0
|
requireMoreNumberChars: result = 0
|
||||||
advance lexer.stream
|
advance lexer.stream
|
||||||
return 1
|
1
|
||||||
|
|
||||||
proc scanInt(lexer: var JsonLexer): (uint64,bool)
|
proc scanInt(lexer: var JsonLexer): (uint64,bool)
|
||||||
{.gcsafe, raises: [IOError].} =
|
{.gcsafe, raises: [IOError].} =
|
||||||
|
@ -371,7 +371,6 @@ proc scanInt(lexer: var JsonLexer): (uint64,bool)
|
||||||
# Fetch next digit
|
# Fetch next digit
|
||||||
c = eatDigitAndPeek() # implicit auto-return
|
c = eatDigitAndPeek() # implicit auto-return
|
||||||
|
|
||||||
|
|
||||||
proc scanNumber(lexer: var JsonLexer)
|
proc scanNumber(lexer: var JsonLexer)
|
||||||
{.gcsafe, raises: [IOError].} =
|
{.gcsafe, raises: [IOError].} =
|
||||||
var sign = lexer.scanSign()
|
var sign = lexer.scanSign()
|
||||||
|
@ -422,9 +421,10 @@ proc scanNumber(lexer: var JsonLexer)
|
||||||
lexer.floatVal = lexer.floatVal / powersOfTen[exponent]
|
lexer.floatVal = lexer.floatVal / powersOfTen[exponent]
|
||||||
|
|
||||||
proc scanIdentifier(lexer: var JsonLexer,
|
proc scanIdentifier(lexer: var JsonLexer,
|
||||||
expectedIdent: string, expectedTok: TokKind) =
|
expectedIdent: string, expectedTok: TokKind)
|
||||||
|
{.raises: [IOError].} =
|
||||||
for c in expectedIdent:
|
for c in expectedIdent:
|
||||||
if c != lexer.stream.read():
|
if c != requireNextChar():
|
||||||
lexer.tokKind = tkError
|
lexer.tokKind = tkError
|
||||||
return
|
return
|
||||||
lexer.tokKind = expectedTok
|
lexer.tokKind = expectedTok
|
||||||
|
@ -492,11 +492,10 @@ proc tok*(lexer: var JsonLexer): TokKind
|
||||||
lexer.accept
|
lexer.accept
|
||||||
lexer.tokKind
|
lexer.tokKind
|
||||||
|
|
||||||
proc lazyTok*(lexer: JsonLexer): TokKind =
|
func lazyTok*(lexer: JsonLexer): TokKind =
|
||||||
## Preliminary token state unless accepted, already
|
## Preliminary token state unless accepted, already
|
||||||
lexer.tokKind
|
lexer.tokKind
|
||||||
|
|
||||||
|
|
||||||
proc customIntHandler*(lexer: var JsonLexer; handler: CustomIntHandler)
|
proc customIntHandler*(lexer: var JsonLexer; handler: CustomIntHandler)
|
||||||
{.gcsafe, raises: [IOError].} =
|
{.gcsafe, raises: [IOError].} =
|
||||||
## Apply the `handler` argument function for parsing a `tkNumeric` type
|
## Apply the `handler` argument function for parsing a `tkNumeric` type
|
||||||
|
|
|
@ -97,55 +97,84 @@ method formatMsg*(err: ref IncompleteObjectError, filename: string):
|
||||||
string {.gcsafe, raises: [].} =
|
string {.gcsafe, raises: [].} =
|
||||||
tryFmt: fmt"{filename}({err.line}, {err.col}) Not all required fields were specified when reading '{err.objectType}'"
|
tryFmt: fmt"{filename}({err.line}, {err.col}) Not all required fields were specified when reading '{err.objectType}'"
|
||||||
|
|
||||||
proc assignLineNumber*(ex: ref JsonReaderError, r: JsonReader) =
|
func assignLineNumber*(ex: ref JsonReaderError, lexer: JsonLexer) =
|
||||||
ex.line = r.lexer.line
|
ex.line = lexer.line
|
||||||
ex.col = r.lexer.tokenStartCol
|
ex.col = lexer.tokenStartCol
|
||||||
|
|
||||||
proc raiseUnexpectedToken*(r: JsonReader, expected: ExpectedTokenCategory)
|
func raiseUnexpectedToken*(lexer: JsonLexer, expected: ExpectedTokenCategory)
|
||||||
{.noreturn, raises: [JsonReaderError].} =
|
{.noreturn, raises: [JsonReaderError].} =
|
||||||
var ex = new UnexpectedTokenError
|
var ex = new UnexpectedTokenError
|
||||||
ex.assignLineNumber(r)
|
ex.assignLineNumber(lexer)
|
||||||
ex.encountedToken = r.lexer.lazyTok
|
ex.encountedToken = lexer.lazyTok
|
||||||
ex.expectedToken = expected
|
ex.expectedToken = expected
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
proc raiseUnexpectedValue*(r: JsonReader, msg: string) {.noreturn, raises: [JsonReaderError].} =
|
template raiseUnexpectedToken*(reader: JsonReader, expected: ExpectedTokenCategory) =
|
||||||
|
raiseUnexpectedToken(reader.lexer, expected)
|
||||||
|
|
||||||
|
func raiseUnexpectedValue*(
|
||||||
|
lexer: JsonLexer, msg: string) {.noreturn, raises: [JsonReaderError].} =
|
||||||
var ex = new UnexpectedValueError
|
var ex = new UnexpectedValueError
|
||||||
ex.assignLineNumber(r)
|
ex.assignLineNumber(lexer)
|
||||||
ex.msg = msg
|
ex.msg = msg
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
proc raiseIntOverflow*(r: JsonReader, absIntVal: BiggestUint, isNegative: bool) {.noreturn, raises: [JsonReaderError].} =
|
template raiseUnexpectedValue*(r: JsonReader, msg: string) =
|
||||||
|
raiseUnexpectedValue(r.lexer, msg)
|
||||||
|
|
||||||
|
func raiseIntOverflow*(
|
||||||
|
lexer: JsonLexer, absIntVal: BiggestUint, isNegative: bool)
|
||||||
|
{.noreturn, raises: [JsonReaderError].} =
|
||||||
var ex = new IntOverflowError
|
var ex = new IntOverflowError
|
||||||
ex.assignLineNumber(r)
|
ex.assignLineNumber(lexer)
|
||||||
ex.absIntVal = absIntVal
|
ex.absIntVal = absIntVal
|
||||||
ex.isNegative = isNegative
|
ex.isNegative = isNegative
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
proc raiseUnexpectedField*(r: JsonReader, fieldName: string, deserializedType: cstring) {.noreturn, raises: [JsonReaderError].} =
|
template raiseIntOverflow*(r: JsonReader, absIntVal: BiggestUint, isNegative: bool) =
|
||||||
|
raiseIntOverflow(r.lexer, absIntVal, isNegative)
|
||||||
|
|
||||||
|
func raiseUnexpectedField*(
|
||||||
|
lexer: JsonLexer, fieldName: string, deserializedType: cstring)
|
||||||
|
{.noreturn, raises: [JsonReaderError].} =
|
||||||
var ex = new UnexpectedField
|
var ex = new UnexpectedField
|
||||||
ex.assignLineNumber(r)
|
ex.assignLineNumber(lexer)
|
||||||
ex.encounteredField = fieldName
|
ex.encounteredField = fieldName
|
||||||
ex.deserializedType = deserializedType
|
ex.deserializedType = deserializedType
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
proc raiseIncompleteObject*(r: JsonReader, objectType: cstring) {.noreturn, raises: [JsonReaderError].} =
|
template raiseUnexpectedField*(r: JsonReader, fieldName: string, deserializedType: cstring) =
|
||||||
|
raiseUnexpectedField(r.lexer, fieldName, deserializedType)
|
||||||
|
|
||||||
|
func raiseIncompleteObject*(
|
||||||
|
lexer: JsonLexer, objectType: cstring)
|
||||||
|
{.noreturn, raises: [JsonReaderError].} =
|
||||||
var ex = new IncompleteObjectError
|
var ex = new IncompleteObjectError
|
||||||
ex.assignLineNumber(r)
|
ex.assignLineNumber(lexer)
|
||||||
ex.objectType = objectType
|
ex.objectType = objectType
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
proc handleReadException*(r: JsonReader,
|
template raiseIncompleteObject*(r: JsonReader, objectType: cstring) =
|
||||||
|
raiseIncompleteObject(r.lexer, objectType)
|
||||||
|
|
||||||
|
func handleReadException*(lexer: JsonLexer,
|
||||||
Record: type,
|
Record: type,
|
||||||
fieldName: string,
|
fieldName: string,
|
||||||
field: auto,
|
field: auto,
|
||||||
err: ref CatchableError) {.raises: [JsonReaderError].} =
|
err: ref CatchableError) {.raises: [JsonReaderError].} =
|
||||||
var ex = new GenericJsonReaderError
|
var ex = new GenericJsonReaderError
|
||||||
ex.assignLineNumber(r)
|
ex.assignLineNumber(lexer)
|
||||||
ex.deserializedField = fieldName
|
ex.deserializedField = fieldName
|
||||||
ex.innerException = err
|
ex.innerException = err
|
||||||
raise ex
|
raise ex
|
||||||
|
|
||||||
|
template handleReadException*(r: JsonReader,
|
||||||
|
Record: type,
|
||||||
|
fieldName: string,
|
||||||
|
field: auto,
|
||||||
|
err: ref CatchableError) =
|
||||||
|
handleReadException(r.lexer, Record, fieldName, field, err)
|
||||||
|
|
||||||
proc init*(T: type JsonReader,
|
proc init*(T: type JsonReader,
|
||||||
stream: InputStream,
|
stream: InputStream,
|
||||||
mode = defaultJsonMode,
|
mode = defaultJsonMode,
|
||||||
|
@ -156,9 +185,9 @@ proc init*(T: type JsonReader,
|
||||||
result.lexer = JsonLexer.init(stream, mode)
|
result.lexer = JsonLexer.init(stream, mode)
|
||||||
result.lexer.next()
|
result.lexer.next()
|
||||||
|
|
||||||
proc requireToken*(r: var JsonReader, tk: TokKind) {.raises: [IOError, JsonReaderError].} =
|
proc requireToken*(lexer: var JsonLexer, tk: TokKind) {.raises: [IOError, JsonReaderError].} =
|
||||||
if r.lexer.tok != tk:
|
if lexer.tok != tk:
|
||||||
r.raiseUnexpectedToken case tk
|
lexer.raiseUnexpectedToken case tk
|
||||||
of tkString: etString
|
of tkString: etString
|
||||||
of tkInt, tkNegativeInt: etInt
|
of tkInt, tkNegativeInt: etInt
|
||||||
of tkComma: etComma
|
of tkComma: etComma
|
||||||
|
@ -169,9 +198,9 @@ proc requireToken*(r: var JsonReader, tk: TokKind) {.raises: [IOError, JsonReade
|
||||||
of tkColon: etColon
|
of tkColon: etColon
|
||||||
else: (doAssert false; etBool)
|
else: (doAssert false; etBool)
|
||||||
|
|
||||||
proc skipToken*(r: var JsonReader, tk: TokKind) {.raises: [IOError, JsonReaderError].} =
|
proc skipToken*(lexer: var JsonLexer, tk: TokKind) {.raises: [IOError, JsonReaderError].} =
|
||||||
r.requireToken tk
|
lexer.requireToken tk
|
||||||
r.lexer.next()
|
lexer.next()
|
||||||
|
|
||||||
proc parseJsonNode(r: var JsonReader): JsonNode
|
proc parseJsonNode(r: var JsonReader): JsonNode
|
||||||
{.gcsafe, raises: [IOError, JsonReaderError].}
|
{.gcsafe, raises: [IOError, JsonReaderError].}
|
||||||
|
@ -182,7 +211,7 @@ proc readJsonNodeField(r: var JsonReader, field: var JsonNode)
|
||||||
r.raiseUnexpectedValue("Unexpected duplicated field name")
|
r.raiseUnexpectedValue("Unexpected duplicated field name")
|
||||||
|
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
r.skipToken tkColon
|
r.lexer.skipToken tkColon
|
||||||
|
|
||||||
field = r.parseJsonNode()
|
field = r.parseJsonNode()
|
||||||
|
|
||||||
|
@ -203,7 +232,7 @@ proc parseJsonNode(r: var JsonReader): JsonNode =
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
r.skipToken tkCurlyRi
|
r.lexer.skipToken tkCurlyRi
|
||||||
|
|
||||||
of tkBracketLe:
|
of tkBracketLe:
|
||||||
result = JsonNode(kind: JArray)
|
result = JsonNode(kind: JArray)
|
||||||
|
@ -214,7 +243,7 @@ proc parseJsonNode(r: var JsonReader): JsonNode =
|
||||||
if r.lexer.tok == tkBracketRi:
|
if r.lexer.tok == tkBracketRi:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
r.skipToken tkComma
|
r.lexer.skipToken tkComma
|
||||||
# Skip over the last tkBracketRi
|
# Skip over the last tkBracketRi
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
|
|
||||||
|
@ -260,40 +289,40 @@ proc parseJsonNode(r: var JsonReader): JsonNode =
|
||||||
of tkQuoted, tkExBlob, tkNumeric, tkExInt, tkExNegInt:
|
of tkQuoted, tkExBlob, tkNumeric, tkExInt, tkExNegInt:
|
||||||
raiseAssert "generic type " & $r.lexer.lazyTok & " is not applicable"
|
raiseAssert "generic type " & $r.lexer.lazyTok & " is not applicable"
|
||||||
|
|
||||||
proc skipSingleJsValue*(r: var JsonReader) {.raises: [IOError, JsonReaderError].} =
|
proc skipSingleJsValue*(lexer: var JsonLexer) {.raises: [IOError, JsonReaderError].} =
|
||||||
case r.lexer.tok
|
case lexer.tok
|
||||||
of tkCurlyLe:
|
of tkCurlyLe:
|
||||||
r.lexer.next()
|
lexer.next()
|
||||||
if r.lexer.tok != tkCurlyRi:
|
if lexer.tok != tkCurlyRi:
|
||||||
while true:
|
while true:
|
||||||
r.skipToken tkString
|
lexer.skipToken tkString
|
||||||
r.skipToken tkColon
|
lexer.skipToken tkColon
|
||||||
r.skipSingleJsValue()
|
lexer.skipSingleJsValue()
|
||||||
if r.lexer.tok == tkCurlyRi:
|
if lexer.tok == tkCurlyRi:
|
||||||
break
|
break
|
||||||
r.skipToken tkComma
|
lexer.skipToken tkComma
|
||||||
# Skip over the last tkCurlyRi
|
# Skip over the last tkCurlyRi
|
||||||
r.lexer.next()
|
lexer.next()
|
||||||
|
|
||||||
of tkBracketLe:
|
of tkBracketLe:
|
||||||
r.lexer.next()
|
lexer.next()
|
||||||
if r.lexer.tok != tkBracketRi:
|
if lexer.tok != tkBracketRi:
|
||||||
while true:
|
while true:
|
||||||
r.skipSingleJsValue()
|
lexer.skipSingleJsValue()
|
||||||
if r.lexer.tok == tkBracketRi:
|
if lexer.tok == tkBracketRi:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
r.skipToken tkComma
|
lexer.skipToken tkComma
|
||||||
# Skip over the last tkBracketRi
|
# Skip over the last tkBracketRi
|
||||||
r.lexer.next()
|
lexer.next()
|
||||||
|
|
||||||
of tkColon, tkComma, tkEof, tkError, tkBracketRi, tkCurlyRi:
|
of tkColon, tkComma, tkEof, tkError, tkBracketRi, tkCurlyRi:
|
||||||
r.raiseUnexpectedToken etValue
|
lexer.raiseUnexpectedToken etValue
|
||||||
|
|
||||||
of tkString, tkQuoted, tkExBlob,
|
of tkString, tkQuoted, tkExBlob,
|
||||||
tkInt, tkNegativeInt, tkFloat, tkNumeric, tkExInt, tkExNegInt,
|
tkInt, tkNegativeInt, tkFloat, tkNumeric, tkExInt, tkExNegInt,
|
||||||
tkTrue, tkFalse, tkNull:
|
tkTrue, tkFalse, tkNull:
|
||||||
r.lexer.next()
|
lexer.next()
|
||||||
|
|
||||||
proc captureSingleJsValue(r: var JsonReader, output: var string) {.raises: [IOError, SerializationError].} =
|
proc captureSingleJsValue(r: var JsonReader, output: var string) {.raises: [IOError, SerializationError].} =
|
||||||
r.lexer.renderTok output
|
r.lexer.renderTok output
|
||||||
|
@ -303,15 +332,15 @@ proc captureSingleJsValue(r: var JsonReader, output: var string) {.raises: [IOEr
|
||||||
if r.lexer.tok != tkCurlyRi:
|
if r.lexer.tok != tkCurlyRi:
|
||||||
while true:
|
while true:
|
||||||
r.lexer.renderTok output
|
r.lexer.renderTok output
|
||||||
r.skipToken tkString
|
r.lexer.skipToken tkString
|
||||||
r.lexer.renderTok output
|
r.lexer.renderTok output
|
||||||
r.skipToken tkColon
|
r.lexer.skipToken tkColon
|
||||||
r.captureSingleJsValue(output)
|
r.captureSingleJsValue(output)
|
||||||
r.lexer.renderTok output
|
r.lexer.renderTok output
|
||||||
if r.lexer.tok == tkCurlyRi:
|
if r.lexer.tok == tkCurlyRi:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
r.skipToken tkComma
|
r.lexer.skipToken tkComma
|
||||||
else:
|
else:
|
||||||
output.add '}'
|
output.add '}'
|
||||||
# Skip over the last tkCurlyRi
|
# Skip over the last tkCurlyRi
|
||||||
|
@ -326,7 +355,7 @@ proc captureSingleJsValue(r: var JsonReader, output: var string) {.raises: [IOEr
|
||||||
if r.lexer.tok == tkBracketRi:
|
if r.lexer.tok == tkBracketRi:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
r.skipToken tkComma
|
r.lexer.skipToken tkComma
|
||||||
else:
|
else:
|
||||||
output.add ']'
|
output.add ']'
|
||||||
# Skip over the last tkBracketRi
|
# Skip over the last tkBracketRi
|
||||||
|
@ -340,16 +369,16 @@ proc captureSingleJsValue(r: var JsonReader, output: var string) {.raises: [IOEr
|
||||||
tkTrue, tkFalse, tkNull:
|
tkTrue, tkFalse, tkNull:
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
|
|
||||||
proc allocPtr[T](p: var ptr T) =
|
func allocPtr[T](p: var ptr T) =
|
||||||
p = create(T)
|
p = create(T)
|
||||||
|
|
||||||
proc allocPtr[T](p: var ref T) =
|
func allocPtr[T](p: var ref T) =
|
||||||
p = new(T)
|
p = new(T)
|
||||||
|
|
||||||
iterator readArray*(r: var JsonReader, ElemType: typedesc): ElemType {.raises: [IOError, SerializationError].} =
|
iterator readArray*(r: var JsonReader, ElemType: typedesc): ElemType {.raises: [IOError, SerializationError].} =
|
||||||
mixin readValue
|
mixin readValue
|
||||||
|
|
||||||
r.skipToken tkBracketLe
|
r.lexer.skipToken tkBracketLe
|
||||||
if r.lexer.lazyTok != tkBracketRi:
|
if r.lexer.lazyTok != tkBracketRi:
|
||||||
while true:
|
while true:
|
||||||
var res: ElemType
|
var res: ElemType
|
||||||
|
@ -357,13 +386,13 @@ iterator readArray*(r: var JsonReader, ElemType: typedesc): ElemType {.raises: [
|
||||||
yield res
|
yield res
|
||||||
if r.lexer.tok != tkComma: break
|
if r.lexer.tok != tkComma: break
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
r.skipToken tkBracketRi
|
r.lexer.skipToken tkBracketRi
|
||||||
|
|
||||||
iterator readObjectFields*(r: var JsonReader,
|
iterator readObjectFields*(r: var JsonReader,
|
||||||
KeyType: type): KeyType {.raises: [IOError, SerializationError].} =
|
KeyType: type): KeyType {.raises: [IOError, SerializationError].} =
|
||||||
mixin readValue
|
mixin readValue
|
||||||
|
|
||||||
r.skipToken tkCurlyLe
|
r.lexer.skipToken tkCurlyLe
|
||||||
if r.lexer.lazyTok != tkCurlyRi:
|
if r.lexer.lazyTok != tkCurlyRi:
|
||||||
while true:
|
while true:
|
||||||
var key: KeyType
|
var key: KeyType
|
||||||
|
@ -373,7 +402,7 @@ iterator readObjectFields*(r: var JsonReader,
|
||||||
yield key
|
yield key
|
||||||
if r.lexer.lazyTok != tkComma: break
|
if r.lexer.lazyTok != tkComma: break
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
r.skipToken tkCurlyRi
|
r.lexer.skipToken tkCurlyRi
|
||||||
|
|
||||||
iterator readObject*(r: var JsonReader,
|
iterator readObject*(r: var JsonReader,
|
||||||
KeyType: type,
|
KeyType: type,
|
||||||
|
@ -385,8 +414,8 @@ iterator readObject*(r: var JsonReader,
|
||||||
readValue(r, value)
|
readValue(r, value)
|
||||||
yield (fieldName, value)
|
yield (fieldName, value)
|
||||||
|
|
||||||
proc isNotNilCheck[T](x: ref T not nil) {.compileTime.} = discard
|
func isNotNilCheck[T](x: ref T not nil) {.compileTime.} = discard
|
||||||
proc isNotNilCheck[T](x: ptr T not nil) {.compileTime.} = discard
|
func isNotNilCheck[T](x: ptr T not nil) {.compileTime.} = discard
|
||||||
|
|
||||||
func isFieldExpected*(T: type): bool {.compileTime.} =
|
func isFieldExpected*(T: type): bool {.compileTime.} =
|
||||||
T isnot Option
|
T isnot Option
|
||||||
|
@ -422,7 +451,7 @@ func expectedFieldsBitmask*(TT: type): auto {.compileTime.} =
|
||||||
res[i div bitsPerWord].setBitInWord(i mod bitsPerWord)
|
res[i div bitsPerWord].setBitInWord(i mod bitsPerWord)
|
||||||
inc i
|
inc i
|
||||||
|
|
||||||
return res
|
res
|
||||||
|
|
||||||
template setBitInArray[N](data: var array[N, uint], bitIdx: int) =
|
template setBitInArray[N](data: var array[N, uint], bitIdx: int) =
|
||||||
when data.len > 1:
|
when data.len > 1:
|
||||||
|
@ -486,6 +515,66 @@ proc parseEnum[T](
|
||||||
of EnumStyle.AssociatedStrings:
|
of EnumStyle.AssociatedStrings:
|
||||||
r.raiseUnexpectedToken etEnumString
|
r.raiseUnexpectedToken etEnumString
|
||||||
|
|
||||||
|
proc readRecordValue*[T](r: var JsonReader, value: var T)
|
||||||
|
{.raises: [SerializationError, IOError].} =
|
||||||
|
type
|
||||||
|
ReaderType {.used.} = type r
|
||||||
|
T = type value
|
||||||
|
|
||||||
|
r.lexer.skipToken tkCurlyLe
|
||||||
|
|
||||||
|
when T.totalSerializedFields > 0:
|
||||||
|
let
|
||||||
|
fieldsTable = T.fieldReadersTable(ReaderType)
|
||||||
|
|
||||||
|
const
|
||||||
|
expectedFields = T.expectedFieldsBitmask
|
||||||
|
|
||||||
|
var
|
||||||
|
encounteredFields: typeof(expectedFields)
|
||||||
|
mostLikelyNextField = 0
|
||||||
|
|
||||||
|
while true:
|
||||||
|
# Have the assignment parsed of the AVP
|
||||||
|
if r.lexer.lazyTok == tkQuoted:
|
||||||
|
r.lexer.accept
|
||||||
|
if r.lexer.lazyTok != tkString:
|
||||||
|
break
|
||||||
|
|
||||||
|
when T is tuple:
|
||||||
|
let fieldIdx = mostLikelyNextField
|
||||||
|
mostLikelyNextField += 1
|
||||||
|
else:
|
||||||
|
let fieldIdx = findFieldIdx(fieldsTable[],
|
||||||
|
r.lexer.strVal,
|
||||||
|
mostLikelyNextField)
|
||||||
|
if fieldIdx != -1:
|
||||||
|
let reader = fieldsTable[][fieldIdx].reader
|
||||||
|
r.lexer.next()
|
||||||
|
r.lexer.skipToken tkColon
|
||||||
|
reader(value, r)
|
||||||
|
encounteredFields.setBitInArray(fieldIdx)
|
||||||
|
elif r.allowUnknownFields:
|
||||||
|
r.lexer.next()
|
||||||
|
r.lexer.skipToken tkColon
|
||||||
|
r.lexer.skipSingleJsValue()
|
||||||
|
else:
|
||||||
|
const typeName = typetraits.name(T)
|
||||||
|
r.raiseUnexpectedField(r.lexer.strVal, cstring typeName)
|
||||||
|
|
||||||
|
if r.lexer.lazyTok == tkComma:
|
||||||
|
r.lexer.next()
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
if r.requireAllFields and
|
||||||
|
not expectedFields.isBitwiseSubsetOf(encounteredFields):
|
||||||
|
const typeName = typetraits.name(T)
|
||||||
|
r.raiseIncompleteObject(typeName)
|
||||||
|
|
||||||
|
r.lexer.accept
|
||||||
|
r.lexer.skipToken tkCurlyRi
|
||||||
|
|
||||||
proc readValue*[T](r: var JsonReader, value: var T)
|
proc readValue*[T](r: var JsonReader, value: var T)
|
||||||
{.gcsafe, raises: [SerializationError, IOError].} =
|
{.gcsafe, raises: [SerializationError, IOError].} =
|
||||||
## Master filed/object parser. This function relies on customised sub-mixins for particular
|
## Master filed/object parser. This function relies on customised sub-mixins for particular
|
||||||
|
@ -523,7 +612,6 @@ proc readValue*[T](r: var JsonReader, value: var T)
|
||||||
## reader.lexer.next
|
## reader.lexer.next
|
||||||
##
|
##
|
||||||
mixin readValue
|
mixin readValue
|
||||||
type ReaderType {.used.} = type r
|
|
||||||
|
|
||||||
when value is (object or tuple):
|
when value is (object or tuple):
|
||||||
let tok {.used.} = r.lexer.lazyTok
|
let tok {.used.} = r.lexer.lazyTok
|
||||||
|
@ -537,19 +625,19 @@ proc readValue*[T](r: var JsonReader, value: var T)
|
||||||
value = r.parseJsonNode()
|
value = r.parseJsonNode()
|
||||||
|
|
||||||
elif value is string:
|
elif value is string:
|
||||||
r.requireToken tkString
|
r.lexer.requireToken tkString
|
||||||
value = r.lexer.strVal
|
value = r.lexer.strVal
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
|
|
||||||
elif value is seq[char]:
|
elif value is seq[char]:
|
||||||
r.requireToken tkString
|
r.lexer.requireToken tkString
|
||||||
value.setLen(r.lexer.strVal.len)
|
value.setLen(r.lexer.strVal.len)
|
||||||
for i in 0..<r.lexer.strVal.len:
|
for i in 0..<r.lexer.strVal.len:
|
||||||
value[i] = r.lexer.strVal[i]
|
value[i] = r.lexer.strVal[i]
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
|
|
||||||
elif isCharArray(value):
|
elif isCharArray(value):
|
||||||
r.requireToken tkString
|
r.lexer.requireToken tkString
|
||||||
if r.lexer.strVal.len != value.len:
|
if r.lexer.strVal.len != value.len:
|
||||||
# Raise tkString because we expected a `"` earlier
|
# Raise tkString because we expected a `"` earlier
|
||||||
r.raiseUnexpectedToken(etString)
|
r.raiseUnexpectedToken(etString)
|
||||||
|
@ -630,7 +718,7 @@ proc readValue*[T](r: var JsonReader, value: var T)
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
|
|
||||||
elif value is seq:
|
elif value is seq:
|
||||||
r.skipToken tkBracketLe
|
r.lexer.skipToken tkBracketLe
|
||||||
if r.lexer.tok != tkBracketRi:
|
if r.lexer.tok != tkBracketRi:
|
||||||
while true:
|
while true:
|
||||||
let lastPos = value.len
|
let lastPos = value.len
|
||||||
|
@ -638,78 +726,36 @@ proc readValue*[T](r: var JsonReader, value: var T)
|
||||||
readValue(r, value[lastPos])
|
readValue(r, value[lastPos])
|
||||||
if r.lexer.tok != tkComma: break
|
if r.lexer.tok != tkComma: break
|
||||||
r.lexer.next()
|
r.lexer.next()
|
||||||
r.skipToken tkBracketRi
|
r.lexer.skipToken tkBracketRi
|
||||||
|
|
||||||
elif value is array:
|
elif value is array:
|
||||||
r.skipToken tkBracketLe
|
r.lexer.skipToken tkBracketLe
|
||||||
for i in low(value) ..< high(value):
|
for i in low(value) ..< high(value):
|
||||||
# TODO: dont's ask. this makes the code compile
|
# TODO: dont's ask. this makes the code compile
|
||||||
if false: value[i] = value[i]
|
if false: value[i] = value[i]
|
||||||
readValue(r, value[i])
|
readValue(r, value[i])
|
||||||
r.skipToken tkComma
|
r.lexer.skipToken tkComma
|
||||||
readValue(r, value[high(value)])
|
readValue(r, value[high(value)])
|
||||||
r.skipToken tkBracketRi
|
r.lexer.skipToken tkBracketRi
|
||||||
|
|
||||||
elif value is (object or tuple):
|
elif value is (object or tuple):
|
||||||
type T = type(value)
|
type Flavor = JsonReader.Flavor
|
||||||
r.skipToken tkCurlyLe
|
const isAutomatic =
|
||||||
|
useAutomaticObjectSerialization(Flavor)
|
||||||
|
|
||||||
when T.totalSerializedFields > 0:
|
when not isAutomatic:
|
||||||
let
|
const typeName = typetraits.name(T)
|
||||||
fieldsTable = T.fieldReadersTable(ReaderType)
|
{.error: "Please override readValue for the " & typeName & " type (or import the module where the override is provided)".}
|
||||||
|
|
||||||
const
|
|
||||||
expectedFields = T.expectedFieldsBitmask
|
|
||||||
|
|
||||||
var
|
|
||||||
encounteredFields: typeof(expectedFields)
|
|
||||||
mostLikelyNextField = 0
|
|
||||||
|
|
||||||
while true:
|
|
||||||
# Have the assignment parsed of the AVP
|
|
||||||
if r.lexer.lazyTok == tkQuoted:
|
|
||||||
r.lexer.accept
|
|
||||||
if r.lexer.lazyTok != tkString:
|
|
||||||
break
|
|
||||||
|
|
||||||
when T is tuple:
|
|
||||||
let fieldIdx = mostLikelyNextField
|
|
||||||
mostLikelyNextField += 1
|
|
||||||
else:
|
|
||||||
let fieldIdx = findFieldIdx(fieldsTable[],
|
|
||||||
r.lexer.strVal,
|
|
||||||
mostLikelyNextField)
|
|
||||||
if fieldIdx != -1:
|
|
||||||
let reader = fieldsTable[][fieldIdx].reader
|
|
||||||
r.lexer.next()
|
|
||||||
r.skipToken tkColon
|
|
||||||
reader(value, r)
|
|
||||||
encounteredFields.setBitInArray(fieldIdx)
|
|
||||||
elif r.allowUnknownFields:
|
|
||||||
r.lexer.next()
|
|
||||||
r.skipToken tkColon
|
|
||||||
r.skipSingleJsValue()
|
|
||||||
else:
|
|
||||||
const typeName = typetraits.name(T)
|
|
||||||
r.raiseUnexpectedField(r.lexer.strVal, cstring typeName)
|
|
||||||
|
|
||||||
if r.lexer.lazyTok == tkComma:
|
|
||||||
r.lexer.next()
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
if r.requireAllFields and
|
|
||||||
not expectedFields.isBitwiseSubsetOf(encounteredFields):
|
|
||||||
const typeName = typetraits.name(T)
|
|
||||||
r.raiseIncompleteObject(typeName)
|
|
||||||
|
|
||||||
r.lexer.accept
|
|
||||||
r.skipToken tkCurlyRi
|
|
||||||
|
|
||||||
|
readRecordValue(r, value)
|
||||||
else:
|
else:
|
||||||
const typeName = typetraits.name(T)
|
const typeName = typetraits.name(T)
|
||||||
{.error: "Failed to convert to JSON an unsupported type: " & typeName.}
|
{.error: "Failed to convert to JSON an unsupported type: " & typeName.}
|
||||||
|
|
||||||
|
template useDefaultSerializationIn*(T: type[object|tuple], Flavor: type) =
|
||||||
|
template readValue*(r: var JsonReader[Flavor], value: var T) =
|
||||||
|
readRecordValue(r, value)
|
||||||
|
|
||||||
iterator readObjectFields*(r: var JsonReader): string {.
|
iterator readObjectFields*(r: var JsonReader): string {.
|
||||||
raises: [IOError, SerializationError].} =
|
raises: [IOError, SerializationError].} =
|
||||||
for key in readObjectFields(r, string):
|
for key in readObjectFields(r, string):
|
||||||
|
|
|
@ -20,4 +20,3 @@ const
|
||||||
|
|
||||||
template `==`*(lhs, rhs: JsonString): bool =
|
template `==`*(lhs, rhs: JsonString): bool =
|
||||||
string(lhs) == string(rhs)
|
string(lhs) == string(rhs)
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ type
|
||||||
Json.setWriter JsonWriter,
|
Json.setWriter JsonWriter,
|
||||||
PreferredOutput = string
|
PreferredOutput = string
|
||||||
|
|
||||||
proc init*(W: type JsonWriter, stream: OutputStream,
|
func init*(W: type JsonWriter, stream: OutputStream,
|
||||||
pretty = false, typeAnnotations = false): W =
|
pretty = false, typeAnnotations = false): W =
|
||||||
W(stream: stream,
|
W(stream: stream,
|
||||||
hasPrettyOutput: pretty,
|
hasPrettyOutput: pretty,
|
||||||
|
@ -152,13 +152,11 @@ template writeObjectField*[FieldType, RecordType](w: var JsonWriter,
|
||||||
field: FieldType): bool =
|
field: FieldType): bool =
|
||||||
mixin writeFieldIMPL, writeValue
|
mixin writeFieldIMPL, writeValue
|
||||||
|
|
||||||
type
|
|
||||||
R = type record
|
|
||||||
|
|
||||||
w.writeFieldName(fieldName)
|
w.writeFieldName(fieldName)
|
||||||
when RecordType is tuple:
|
when RecordType is tuple:
|
||||||
w.writeValue(field)
|
w.writeValue(field)
|
||||||
else:
|
else:
|
||||||
|
type R = type record
|
||||||
w.writeFieldIMPL(FieldTag[R, fieldName], field, record)
|
w.writeFieldIMPL(FieldTag[R, fieldName], field, record)
|
||||||
true
|
true
|
||||||
|
|
||||||
|
@ -236,6 +234,16 @@ proc writeValue*(w: var JsonWriter, value: auto) {.gcsafe, raises: [IOError].} =
|
||||||
w.writeArray(value)
|
w.writeArray(value)
|
||||||
|
|
||||||
elif value is (object or tuple):
|
elif value is (object or tuple):
|
||||||
|
mixin useAutomaticObjectSerialization
|
||||||
|
|
||||||
|
type Flavor = JsonWriter.Flavor
|
||||||
|
const isAutomatic =
|
||||||
|
useAutomaticObjectSerialization(Flavor)
|
||||||
|
|
||||||
|
when not isAutomatic:
|
||||||
|
const typeName = typetraits.name(type value)
|
||||||
|
{.error: "Please override writeValue for the " & typeName & " type (or import the module where the override is provided)".}
|
||||||
|
|
||||||
type RecordType = type value
|
type RecordType = type value
|
||||||
w.beginRecord RecordType
|
w.beginRecord RecordType
|
||||||
value.enumInstanceSerializedFields(fieldName, field):
|
value.enumInstanceSerializedFields(fieldName, field):
|
||||||
|
@ -251,10 +259,11 @@ proc writeValue*(w: var JsonWriter, value: auto) {.gcsafe, raises: [IOError].} =
|
||||||
proc toJson*(v: auto, pretty = false, typeAnnotations = false): string =
|
proc toJson*(v: auto, pretty = false, typeAnnotations = false): string =
|
||||||
mixin writeValue
|
mixin writeValue
|
||||||
|
|
||||||
var s = memoryOutput()
|
var
|
||||||
var w = JsonWriter[DefaultFlavor].init(s, pretty, typeAnnotations)
|
s = memoryOutput()
|
||||||
|
w = JsonWriter[DefaultFlavor].init(s, pretty, typeAnnotations)
|
||||||
w.writeValue v
|
w.writeValue v
|
||||||
return s.getOutput(string)
|
s.getOutput(string)
|
||||||
|
|
||||||
template serializesAsTextInJson*(T: type[enum]) =
|
template serializesAsTextInJson*(T: type[enum]) =
|
||||||
template writeValue*(w: var JsonWriter, val: T) =
|
template writeValue*(w: var JsonWriter, val: T) =
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import
|
import
|
||||||
test_lexer,
|
test_lexer,
|
||||||
test_serialization
|
test_serialization
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
unittest,
|
unittest,
|
||||||
../json_serialization/lexer, ./utils
|
../json_serialization/lexer, ./utils
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
{.used.}
|
||||||
|
|
||||||
import
|
import
|
||||||
strutils, unittest2, json,
|
strutils, unittest2, json,
|
||||||
serialization/object_serialization,
|
serialization/object_serialization,
|
||||||
|
@ -305,21 +307,18 @@ Meter.borrowSerialization int
|
||||||
template reject(code) {.used.} =
|
template reject(code) {.used.} =
|
||||||
static: doAssert(not compiles(code))
|
static: doAssert(not compiles(code))
|
||||||
|
|
||||||
proc `==`(lhs, rhs: Meter): bool =
|
func `==`(lhs, rhs: Meter): bool =
|
||||||
int(lhs) == int(rhs)
|
int(lhs) == int(rhs)
|
||||||
|
|
||||||
proc `==`(lhs, rhs: ref Simple): bool =
|
func `==`(lhs, rhs: ref Simple): bool =
|
||||||
if lhs.isNil: return rhs.isNil
|
if lhs.isNil: return rhs.isNil
|
||||||
if rhs.isNil: return false
|
if rhs.isNil: return false
|
||||||
return lhs[] == rhs[]
|
lhs[] == rhs[]
|
||||||
|
|
||||||
executeReaderWriterTests Json
|
executeReaderWriterTests Json
|
||||||
|
|
||||||
proc newSimple(x: int, y: string, d: Meter): ref Simple =
|
func newSimple(x: int, y: string, d: Meter): ref Simple =
|
||||||
new result
|
(ref Simple)(x: x, y: y, distance: d)
|
||||||
result.x = x
|
|
||||||
result.y = y
|
|
||||||
result.distance = d
|
|
||||||
|
|
||||||
var invalid = Invalid(distance: Mile(100))
|
var invalid = Invalid(distance: Mile(100))
|
||||||
# The compiler cannot handle this check at the moment
|
# The compiler cannot handle this check at the moment
|
||||||
|
@ -360,6 +359,23 @@ EnumTestO.configureJsonDeserialization(
|
||||||
allowNumericRepr = true,
|
allowNumericRepr = true,
|
||||||
stringNormalizer = nimIdentNormalize)
|
stringNormalizer = nimIdentNormalize)
|
||||||
|
|
||||||
|
createJsonFlavor MyJson
|
||||||
|
|
||||||
|
type
|
||||||
|
HasMyJsonDefaultBehavior = object
|
||||||
|
simple: Simple
|
||||||
|
|
||||||
|
HasMyJsonOverride = object
|
||||||
|
simple: Simple
|
||||||
|
|
||||||
|
HasMyJsonDefaultBehavior.useDefaultSerializationIn MyJson
|
||||||
|
|
||||||
|
proc readValue*(r: var JsonReader[MyJson], value: var HasMyJsonOverride) =
|
||||||
|
r.readRecordValue(value.simple)
|
||||||
|
|
||||||
|
proc writeValue*(w: var JsonWriter[MyJson], value: HasMyJsonOverride) =
|
||||||
|
w.writeRecordValue(value.simple)
|
||||||
|
|
||||||
suite "toJson tests":
|
suite "toJson tests":
|
||||||
test "encode primitives":
|
test "encode primitives":
|
||||||
check:
|
check:
|
||||||
|
@ -531,6 +547,28 @@ suite "toJson tests":
|
||||||
decoded.y == "test"
|
decoded.y == "test"
|
||||||
decoded.distance.int == 20
|
decoded.distance.int == 20
|
||||||
|
|
||||||
|
test "Custom flavor with explicit serialization":
|
||||||
|
var s = Simple(x: 10, y: "test", distance: Meter(20))
|
||||||
|
|
||||||
|
reject:
|
||||||
|
discard MyJson.encode(s)
|
||||||
|
|
||||||
|
let hasDefaultBehavior = HasMyJsonDefaultBehavior(simple: s)
|
||||||
|
let hasOverride = HasMyJsonOverride(simple: s)
|
||||||
|
|
||||||
|
let json1 = MyJson.encode(hasDefaultBehavior)
|
||||||
|
let json2 = MyJson.encode(hasOverride)
|
||||||
|
|
||||||
|
reject:
|
||||||
|
let decodedAsMyJson = MyJson.decode(json2, Simple)
|
||||||
|
|
||||||
|
check:
|
||||||
|
json1 == """{"distance":20,"x":10,"y":"test"}"""
|
||||||
|
json2 == """{"distance":20,"x":10,"y":"test"}"""
|
||||||
|
|
||||||
|
MyJson.decode(json1, HasMyJsonDefaultBehavior) == hasDefaultBehavior
|
||||||
|
MyJson.decode(json2, HasMyJsonOverride) == hasOverride
|
||||||
|
|
||||||
test "handle additional fields":
|
test "handle additional fields":
|
||||||
let json = test_dedent"""
|
let json = test_dedent"""
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,14 +1,12 @@
|
||||||
import
|
import strutils
|
||||||
strutils
|
|
||||||
|
|
||||||
# `dedent` exists in newer nim version
|
# `dedent` exists in newer Nim version and doesn't behave the same
|
||||||
# and doesn't behave the same
|
func test_dedent*(s: string): string =
|
||||||
proc test_dedent*(s: string): string =
|
var
|
||||||
var s = s.strip(leading = false)
|
s = s.strip(leading = false)
|
||||||
var minIndent = high(int)
|
minIndent = high(int)
|
||||||
for l in s.splitLines:
|
for l in s.splitLines:
|
||||||
let indent = count(l, ' ')
|
let indent = count(l, ' ')
|
||||||
if indent == 0: continue
|
if indent == 0: continue
|
||||||
if indent < minIndent: minIndent = indent
|
if indent < minIndent: minIndent = indent
|
||||||
result = s.unindent(minIndent)
|
s.unindent(minIndent)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue