nim-json-serialization/tests/test_serialization.nim

513 lines
13 KiB
Nim
Raw Normal View History

import
2020-07-24 19:49:30 +00:00
strutils, unittest, json,
serialization/object_serialization,
2018-12-17 23:01:06 +00:00
serialization/testing/generic_suite,
../json_serialization, ./utils,
Lazy JSON parser (#42) * Proper error handling when parsed number exceeds uint64 details: Returns an "errNonPortableInt" error * need legacy flag for unit tests * lazy numeric token parser why: Numeric data may have a custom format. In particular,numeric data may be Uint256 which is not a JSON standard and might lead to an overflow. details: Numeric values are assigned a preliminary token type tkNumeric without being fully parsed. This can be used to insert a custom parser. Otherwise the value is parsed implicitly when querying/fetching the token type. + tok: replaced by getter tok() resolving lazy stuff (if necessary) + tokKind: current type without auto-resolving This lazy scheme could be extended to other custom types as long as the first token letter determines the custom type. * activate lazy parsing in reader howto: + no code change if a custom reader refers to an existing reader type FancyInt = distinct int proc readValue(reader: var JsonReader, value: var FancyInt) = value = reader.readValue(int).FancyInt + bespoke reader for cusom parsing type FancyUint = distinct uint proc readValue(reader: var JsonReader, value: var FancyUint) = if reader.lexer.lazyTok == tkNumeric: var accu: FancyUint reader.lexer.customIntValueIt: accu = accu * 10 + it.u256 value = accu elif reader.lexer.tok == tkString: value = reader.lexer.strVal.parseUint.FancyUint ... reader.lexer.next + full code explanation at json_serialisation/reader.readValue() * Add lazy parsing for customised string objects why: This allows parsing large or specialised strings without storing it in the lexer state descriptor. details: Similar logic applies as for the cusomised number parser. For mostly all practical cases, a DSL template is available serving as wrapper around the character/byte item processor code. * fix typo in unit test
2022-05-05 16:33:40 +00:00
../json_serialization/lexer,
../json_serialization/std/[options, sets, tables]
type
Foo = object
i: int
b {.dontSerialize.}: Bar
s: string
Bar = object
sf: seq[Foo]
z: ref Simple
2019-03-13 21:20:58 +00:00
Invalid = object
distance: Mile
HasUnusualFieldNames = object
# Using Nim reserved keyword
`type`: string
renamedField {.serializedFieldName("renamed").}: string
2020-04-09 17:46:48 +00:00
MyKind = enum
Apple
Banana
MyCaseObject = object
name: string
case kind: MyKind
of Banana: banana: int
of Apple: apple: string
MyUseCaseObject = object
field: MyCaseObject
HasJsonString = object
name: string
data: JsonString
id: int
2020-07-24 19:49:30 +00:00
HasJsonNode = object
name: string
data: JsonNode
id: int
2021-11-01 16:59:12 +00:00
HasCstring = object
notNilStr: cstring
nilStr: cstring
Lazy JSON parser (#42) * Proper error handling when parsed number exceeds uint64 details: Returns an "errNonPortableInt" error * need legacy flag for unit tests * lazy numeric token parser why: Numeric data may have a custom format. In particular,numeric data may be Uint256 which is not a JSON standard and might lead to an overflow. details: Numeric values are assigned a preliminary token type tkNumeric without being fully parsed. This can be used to insert a custom parser. Otherwise the value is parsed implicitly when querying/fetching the token type. + tok: replaced by getter tok() resolving lazy stuff (if necessary) + tokKind: current type without auto-resolving This lazy scheme could be extended to other custom types as long as the first token letter determines the custom type. * activate lazy parsing in reader howto: + no code change if a custom reader refers to an existing reader type FancyInt = distinct int proc readValue(reader: var JsonReader, value: var FancyInt) = value = reader.readValue(int).FancyInt + bespoke reader for cusom parsing type FancyUint = distinct uint proc readValue(reader: var JsonReader, value: var FancyUint) = if reader.lexer.lazyTok == tkNumeric: var accu: FancyUint reader.lexer.customIntValueIt: accu = accu * 10 + it.u256 value = accu elif reader.lexer.tok == tkString: value = reader.lexer.strVal.parseUint.FancyUint ... reader.lexer.next + full code explanation at json_serialisation/reader.readValue() * Add lazy parsing for customised string objects why: This allows parsing large or specialised strings without storing it in the lexer state descriptor. details: Similar logic applies as for the cusomised number parser. For mostly all practical cases, a DSL template is available serving as wrapper around the character/byte item processor code. * fix typo in unit test
2022-05-05 16:33:40 +00:00
# Customised parser tests
FancyInt = distinct int
FancyUInt = distinct uint
FancyText = distinct string
HasFancyInt = object
name: string
data: FancyInt
HasFancyUInt = object
name: string
data: FancyUInt
HasFancyText = object
name: string
data: FancyText
TokenRegistry = tuple
entry, exit: TokKind
dup: bool
var
customVisit: TokenRegistry
template registerVisit(reader: var JsonReader; body: untyped): untyped =
if customVisit.entry == tkError:
customVisit.entry = reader.lexer.lazyTok
body
customVisit.exit = reader.lexer.lazyTok
else:
customVisit.dup = true
# Customised parser referring to other parser
proc readValue(reader: var JsonReader, value: var FancyInt) =
reader.registerVisit:
value = reader.readValue(int).FancyInt
# Customised numeric parser for integer and stringified integer
proc readValue(reader: var JsonReader, value: var FancyUInt) =
reader.registerVisit:
var accu = 0u
case reader.lexer.lazyTok
of tkNumeric:
reader.lexer.customIntValueIt:
accu = accu * 10u + it.uint
of tkQuoted:
var s = ""
reader.lexer.customTextValueIt:
s &= it
accu = s.parseUInt
else:
discard
value = accu.FancyUInt
reader.lexer.next
# Customised numeric parser for text, accepts embedded quote
proc readValue(reader: var JsonReader, value: var FancyText) =
reader.registerVisit:
var (s, esc) = ("",false)
reader.lexer.customBlobValueIt:
let c = it.chr
if esc:
s &= c
esc = false
elif c == '\\':
esc = true
elif c != '"':
s &= c
else:
doNext = StopSwallowByte
value = s.FancyText
reader.lexer.next
# TODO `borrowSerialization` still doesn't work
# properly when it's placed in another module:
Meter.borrowSerialization int
2019-08-01 14:12:31 +00:00
2019-11-04 18:42:34 +00:00
template reject(code) {.used.} =
static: doAssert(not compiles(code))
2019-03-13 21:20:58 +00:00
proc `==`(lhs, rhs: Meter): bool =
int(lhs) == int(rhs)
proc `==`(lhs, rhs: ref Simple): bool =
if lhs.isNil: return rhs.isNil
if rhs.isNil: return false
return lhs[] == rhs[]
2018-12-17 23:01:06 +00:00
executeReaderWriterTests Json
proc newSimple(x: int, y: string, d: Meter): ref Simple =
new result
result.x = x
result.y = y
result.distance = d
2019-11-04 18:42:34 +00:00
var invalid = Invalid(distance: Mile(100))
# The compiler cannot handle this check at the moment
# {.fatal.} seems fatal even in `compiles` context
when false: reject invalid.toJson
else: discard invalid
2019-03-13 21:20:58 +00:00
2018-12-17 23:01:06 +00:00
suite "toJson tests":
test "encode primitives":
check:
1.toJson == "1"
"".toJson == "\"\""
"abc".toJson == "\"abc\""
test "simple objects":
2019-03-13 21:20:58 +00:00
var s = Simple(x: 10, y: "test", distance: Meter(20))
check:
s.toJson == """{"distance":20,"x":10,"y":"test"}"""
s.toJson(typeAnnotations = true) == """{"$type":"Simple","distance":20,"x":10,"y":"test"}"""
2021-12-15 09:34:49 +00:00
s.toJson(pretty = true) == test_dedent"""
{
"distance": 20,
"x": 10,
"y": "test"
}
"""
2019-04-08 12:29:11 +00:00
test "handle missing fields":
2021-12-15 09:34:49 +00:00
let json = test_dedent"""
2019-04-08 12:29:11 +00:00
{
"distance": 20,
"y": "test"
2019-04-08 12:29:11 +00:00
}
"""
let decoded = Json.decode(json, Simple)
check:
decoded.x == 0
decoded.y == "test"
decoded.distance.int == 20
test "handle additional fields":
2021-12-15 09:34:49 +00:00
let json = test_dedent"""
{
"x": -20,
"futureObject": {"a": -1, "b": [1, 2.0, 3.1], "c": null, "d": true},
"futureBool": false,
"y": "y value"
}
"""
let decoded = Json.decode(json, Simple, allowUnknownFields = true)
check:
decoded.x == -20
decoded.y == "y value"
decoded.distance.int == 0
expect UnexpectedField:
let shouldNotDecode = Json.decode(json, Simple)
echo "This should not have decoded ", shouldNotDecode
test "all fields are required and present":
2021-12-15 09:34:49 +00:00
let json = test_dedent"""
{
"x": 20,
"distance": 10,
"y": "y value"
}
"""
let decoded = Json.decode(json, Simple, requireAllFields = true)
check:
decoded.x == 20
decoded.y == "y value"
decoded.distance.int == 10
test "all fields were required, but not all were provided":
2021-12-15 09:34:49 +00:00
let json = test_dedent"""
{
"x": -20,
"distance": 10
}
"""
expect IncompleteObjectError:
let shouldNotDecode = Json.decode(json, Simple, requireAllFields = true)
echo "This should not have decoded ", shouldNotDecode
test "all fields were required, but not all were provided (additional fields present instead)":
2021-12-15 09:34:49 +00:00
let json = test_dedent"""
{
"futureBool": false,
"y": "y value",
"futureObject": {"a": -1, "b": [1, 2.0, 3.1], "c": null, "d": true},
"distance": 10
}
"""
expect IncompleteObjectError:
let shouldNotDecode = Json.decode(json, Simple,
requireAllFields = true,
allowUnknownFields = true)
echo "This should not have decoded ", shouldNotDecode
test "all fields were required, but none were provided":
let json = "{}"
expect IncompleteObjectError:
let shouldNotDecode = Json.decode(json, Simple, requireAllFields = true)
echo "This should not have decoded ", shouldNotDecode
test "all fields are required and provided, and additional ones are present":
2021-12-15 09:34:49 +00:00
let json = test_dedent"""
{
"x": 20,
"distance": 10,
"futureBool": false,
"y": "y value",
"futureObject": {"a": -1, "b": [1, 2.0, 3.1], "c": null, "d": true},
}
"""
let decoded = try:
Json.decode(json, Simple, requireAllFields = true, allowUnknownFields = true)
except SerializationError as err:
checkpoint "Unexpected deserialization failure: " & err.formatMsg("<input>")
raise
check:
decoded.x == 20
decoded.y == "y value"
decoded.distance.int == 10
expect UnexpectedField:
let shouldNotDecode = Json.decode(json, Simple,
requireAllFields = true,
allowUnknownFields = false)
echo "This should not have decoded ", shouldNotDecode
test "arrays are printed correctly":
var x = HoldsArray(data: @[1, 2, 3, 4])
check:
2021-12-15 09:34:49 +00:00
x.toJson(pretty = true) == test_dedent"""
{
"data": [
1,
2,
3,
4
]
}
"""
test "max unsigned value":
var uintVal = not uint64(0)
let jsonValue = Json.encode(uintVal)
check:
jsonValue == "18446744073709551615"
Json.decode(jsonValue, uint64) == uintVal
expect JsonReaderError:
discard Json.decode(jsonValue, uint64, mode = Portable)
test "Unusual field names":
let r = HasUnusualFieldNames(`type`: "uint8", renamedField: "field")
check:
2021-12-15 09:34:49 +00:00
r.toJson == """{"type":"uint8","renamed":"field"}"""
r == Json.decode("""{"type":"uint8", "renamed":"field"}""", HasUnusualFieldNames)
test "Option types":
let
h1 = HoldsOption(o: some Simple(x: 1, y: "2", distance: Meter(3)))
h2 = HoldsOption(r: newSimple(1, "2", Meter(3)))
Json.roundtripTest h1, """{"r":null,"o":{"distance":3,"x":1,"y":"2"}}"""
Json.roundtripTest h2, """{"r":{"distance":3,"x":1,"y":"2"},"o":null}"""
2020-04-09 17:46:48 +00:00
test "Case object as field":
let
original = MyUseCaseObject(field: MyCaseObject(name: "hello",
kind: Apple,
apple: "world"))
decoded = Json.decode(Json.encode(original), MyUseCaseObject)
check:
$original == $decoded
2020-04-29 08:21:20 +00:00
test "stringLike":
2020-04-24 13:42:27 +00:00
check:
"abc" == Json.decode(Json.encode(['a', 'b', 'c']), string)
"abc" == Json.decode(Json.encode(@['a', 'b', 'c']), string)
2020-04-29 08:21:20 +00:00
['a', 'b', 'c'] == Json.decode(Json.encode(@['a', 'b', 'c']), seq[char])
['a', 'b', 'c'] == Json.decode(Json.encode("abc"), seq[char])
['a', 'b', 'c'] == Json.decode(Json.encode(@['a', 'b', 'c']), array[3, char])
2020-04-24 13:42:27 +00:00
2020-04-29 08:21:20 +00:00
expect JsonReaderError: # too short
discard Json.decode(Json.encode(@['a', 'b']), array[3, char])
expect JsonReaderError: # too long
discard Json.decode(Json.encode(@['a', 'b']), array[1, char])
2020-07-24 19:49:30 +00:00
proc testJsonHolders(HasJsonData: type) =
let
2021-12-15 09:34:49 +00:00
data1 = test_dedent"""
{
"name": "Data 1",
"data": [1, 2, 3, 4],
"id": 101
}
"""
let
2021-12-15 09:34:49 +00:00
data2 = test_dedent"""
{
"name": "Data 2",
"data": "some string",
"id": 1002
}
"""
let
2021-12-15 09:34:49 +00:00
data3 = test_dedent"""
{
"name": "Data 3",
"data": {"field1": 10, "field2": [1, 2, 3], "field3": "test"},
"id": 10003
}
"""
try:
let
2020-07-24 19:49:30 +00:00
d1 = Json.decode(data1, HasJsonData)
d2 = Json.decode(data2, HasJsonData)
d3 = Json.decode(data3, HasJsonData)
check:
d1.name == "Data 1"
2020-07-24 19:49:30 +00:00
$d1.data == "[1,2,3,4]"
d1.id == 101
d2.name == "Data 2"
2020-07-24 19:49:30 +00:00
$d2.data == "\"some string\""
d2.id == 1002
d3.name == "Data 3"
2020-07-24 19:49:30 +00:00
$d3.data == """{"field1":10,"field2":[1,2,3],"field3":"test"}"""
d3.id == 10003
2020-07-24 19:49:30 +00:00
let
d1Encoded = Json.encode(d1)
d2Encoded = Json.encode(d2)
d3Encoded = Json.encode(d3)
check:
d1Encoded == $parseJson(data1)
d2Encoded == $parseJson(data2)
d3Encoded == $parseJson(data3)
except SerializationError as e:
2020-07-24 19:49:30 +00:00
echo e.getStackTrace
echo e.formatMsg("<>")
2020-07-24 19:49:30 +00:00
raise e
test "Holders of JsonString":
testJsonHolders HasJsonString
test "Holders of JsonNode":
testJsonHolders HasJsonNode
test "Json with comments":
const jsonContent = staticRead "./cases/comments.json"
try:
let decoded = Json.decode(jsonContent, JsonNode)
check decoded["tasks"][0]["label"] == newJString("nim-beacon-chain build")
except SerializationError as err:
checkpoint err.formatMsg("./cases/comments.json")
2020-12-24 09:32:59 +00:00
check false
2021-11-01 16:59:12 +00:00
test "A nil cstring":
let
obj1 = HasCstring(notNilStr: "foo", nilStr: nil)
obj2 = HasCstring(notNilStr: "", nilStr: nil)
str: cstring = "some value"
check:
Json.encode(obj1) == """{"notNilStr":"foo","nilStr":null}"""
Json.encode(obj2) == """{"notNilStr":"","nilStr":null}"""
Json.encode(str) == "\"some value\""
Json.encode(cstring nil) == "null"
reject:
# Decoding cstrings is not supported due to lack of
# clarity regarding the memory allocation approach
Json.decode("null", cstring)
Lazy JSON parser (#42) * Proper error handling when parsed number exceeds uint64 details: Returns an "errNonPortableInt" error * need legacy flag for unit tests * lazy numeric token parser why: Numeric data may have a custom format. In particular,numeric data may be Uint256 which is not a JSON standard and might lead to an overflow. details: Numeric values are assigned a preliminary token type tkNumeric without being fully parsed. This can be used to insert a custom parser. Otherwise the value is parsed implicitly when querying/fetching the token type. + tok: replaced by getter tok() resolving lazy stuff (if necessary) + tokKind: current type without auto-resolving This lazy scheme could be extended to other custom types as long as the first token letter determines the custom type. * activate lazy parsing in reader howto: + no code change if a custom reader refers to an existing reader type FancyInt = distinct int proc readValue(reader: var JsonReader, value: var FancyInt) = value = reader.readValue(int).FancyInt + bespoke reader for cusom parsing type FancyUint = distinct uint proc readValue(reader: var JsonReader, value: var FancyUint) = if reader.lexer.lazyTok == tkNumeric: var accu: FancyUint reader.lexer.customIntValueIt: accu = accu * 10 + it.u256 value = accu elif reader.lexer.tok == tkString: value = reader.lexer.strVal.parseUint.FancyUint ... reader.lexer.next + full code explanation at json_serialisation/reader.readValue() * Add lazy parsing for customised string objects why: This allows parsing large or specialised strings without storing it in the lexer state descriptor. details: Similar logic applies as for the cusomised number parser. For mostly all practical cases, a DSL template is available serving as wrapper around the character/byte item processor code. * fix typo in unit test
2022-05-05 16:33:40 +00:00
suite "Custom parser tests":
test "Fall back to int parser":
customVisit = TokenRegistry.default
let
jData = test_dedent"""
{
"name": "FancyInt",
"data": -12345
}
"""
dData = Json.decode(jData, HasFancyInt)
check dData.name == "FancyInt"
check dData.data.int == -12345
check customVisit == (tkNumeric, tkCurlyRi, false)
test "Uint parser on negative integer":
customVisit = TokenRegistry.default
let
jData = test_dedent"""
{
"name": "FancyUInt",
"data": -12345
}
"""
dData = Json.decode(jData, HasFancyUInt)
check dData.name == "FancyUInt"
check dData.data.uint == 12345u # abs value
check customVisit == (tkNumeric, tkExNegInt, false)
test "Uint parser on string integer":
customVisit = TokenRegistry.default
let
jData = test_dedent"""
{
"name": "FancyUInt",
"data": "12345"
}
"""
dData = Json.decode(jData, HasFancyUInt)
check dData.name == "FancyUInt"
check dData.data.uint == 12345u
check customVisit == (tkQuoted, tkExBlob, false)
test "Parser on text blob with embedded quote (backlash escape support)":
customVisit = TokenRegistry.default
let
jData = test_dedent"""
{
"name": "FancyText",
"data": "a\bc\"\\def"
}
"""
dData = Json.decode(jData, HasFancyText)
check dData.name == "FancyText"
check dData.data.string == "abc\"\\def"
check customVisit == (tkQuoted, tkExBlob, false)