Handle the latest OutputStream changes
This commit is contained in:
parent
edda2577a3
commit
49c07ca512
|
@ -40,7 +40,7 @@ type
|
|||
errNonPortableInt = "number is outside the range of portable values"
|
||||
|
||||
JsonLexer* = object
|
||||
stream: AsciiStream
|
||||
stream: ref AsciiStream
|
||||
mode*: JsonMode
|
||||
|
||||
line*: int
|
||||
|
@ -73,9 +73,9 @@ proc isDigit(c: char): bool {.inline.} =
|
|||
return (c >= '0' and c <= '9')
|
||||
|
||||
proc col*(lexer: JsonLexer): int =
|
||||
lexer.stream.pos - lexer.lineStartPos
|
||||
lexer.stream[].pos - lexer.lineStartPos
|
||||
|
||||
proc init*(T: type JsonLexer, stream: AsciiStream, mode = defaultJsonMode): T =
|
||||
proc init*(T: type JsonLexer, stream: ref AsciiStream, mode = defaultJsonMode): T =
|
||||
T(stream: stream,
|
||||
mode: mode,
|
||||
line: 0,
|
||||
|
@ -87,8 +87,9 @@ proc init*(T: type JsonLexer, stream: AsciiStream, mode = defaultJsonMode): T =
|
|||
floatVal: 0'f,
|
||||
strVal: "")
|
||||
|
||||
proc init*(T: type JsonLexer, stream: ByteStream, mode = defaultJsonMode): auto =
|
||||
init(JsonLexer, AsciiStream(stream), mode)
|
||||
proc init*(T: type JsonLexer, stream: ref ByteStream, mode = defaultJsonMode): auto =
|
||||
type AS = ref AsciiStream
|
||||
init(JsonLexer, AS(stream), mode)
|
||||
|
||||
template error(error: JsonError) {.dirty.} =
|
||||
lexer.err = error
|
||||
|
@ -96,11 +97,11 @@ template error(error: JsonError) {.dirty.} =
|
|||
return
|
||||
|
||||
template checkForUnexpectedEof {.dirty.} =
|
||||
if lexer.stream.eof: error errUnexpectedEof
|
||||
if lexer.stream[].eof: error errUnexpectedEof
|
||||
|
||||
template requireNextChar(): char =
|
||||
checkForUnexpectedEof()
|
||||
lexer.stream.read()
|
||||
lexer.stream[].read()
|
||||
|
||||
template checkForNonPortableInt(val: uint64) =
|
||||
if lexer.mode == Portable and val > uint64(maxPortableInt):
|
||||
|
@ -115,9 +116,9 @@ proc scanHexRune(lexer: var JsonLexer): int =
|
|||
proc scanString(lexer: var JsonLexer) =
|
||||
lexer.tok = tkString
|
||||
lexer.strVal.setLen 0
|
||||
lexer.tokenStart = lexer.stream.pos
|
||||
lexer.tokenStart = lexer.stream[].pos
|
||||
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
|
||||
while true:
|
||||
var c = requireNextChar()
|
||||
|
@ -164,32 +165,32 @@ proc scanString(lexer: var JsonLexer) =
|
|||
lexer.strVal.add c
|
||||
|
||||
proc handleLF(lexer: var JsonLexer) {.inline.} =
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.line += 1
|
||||
lexer.lineStartPos = lexer.stream.pos
|
||||
lexer.lineStartPos = lexer.stream[].pos
|
||||
|
||||
proc skipWhitespace(lexer: var JsonLexer) =
|
||||
template handleCR =
|
||||
# Beware: this is a template, because the return
|
||||
# statement has to exit `skipWhitespace`.
|
||||
advance lexer.stream
|
||||
if lexer.stream.eof: return
|
||||
if lexer.stream.peek() == '\n': advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
if lexer.stream[].eof: return
|
||||
if lexer.stream[].peek() == '\n': advance lexer.stream[]
|
||||
lexer.line += 1
|
||||
lexer.lineStartPos = lexer.stream.pos
|
||||
lexer.lineStartPos = lexer.stream[].pos
|
||||
|
||||
while true:
|
||||
if lexer.stream.eof: return
|
||||
case lexer.stream.peek()
|
||||
if lexer.stream[].eof: return
|
||||
case lexer.stream[].peek()
|
||||
of '/':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
checkForUnexpectedEof()
|
||||
case lexer.stream.peek()
|
||||
case lexer.stream[].peek()
|
||||
of '/':
|
||||
while true:
|
||||
advance lexer.stream
|
||||
if lexer.stream.eof: return
|
||||
case lexer.stream.peek()
|
||||
advance lexer.stream[]
|
||||
if lexer.stream[].eof: return
|
||||
case lexer.stream[].peek()
|
||||
of '\r':
|
||||
handleCR()
|
||||
of '\n':
|
||||
|
@ -198,25 +199,25 @@ proc skipWhitespace(lexer: var JsonLexer) =
|
|||
discard
|
||||
of '*':
|
||||
while true:
|
||||
advance lexer.stream
|
||||
if lexer.stream.eof: return
|
||||
case lexer.stream.peek()
|
||||
advance lexer.stream[]
|
||||
if lexer.stream[].eof: return
|
||||
case lexer.stream[].peek()
|
||||
of '\r':
|
||||
handleCR()
|
||||
of '\n':
|
||||
lexer.handleLF()
|
||||
of '*':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
checkForUnexpectedEof()
|
||||
if lexer.stream.peek() == '/':
|
||||
advance lexer.stream
|
||||
if lexer.stream[].peek() == '/':
|
||||
advance lexer.stream[]
|
||||
return
|
||||
else:
|
||||
discard
|
||||
else:
|
||||
error errCommentExpected
|
||||
of ' ', '\t':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
of '\r':
|
||||
handleCR()
|
||||
of '\n':
|
||||
|
@ -225,32 +226,32 @@ proc skipWhitespace(lexer: var JsonLexer) =
|
|||
break
|
||||
|
||||
template requireMoreNumberChars(elseClause) =
|
||||
if lexer.stream.eof:
|
||||
if lexer.stream[].eof:
|
||||
elseClause
|
||||
error errNumberExpected
|
||||
|
||||
template eatDigitAndPeek: char =
|
||||
advance lexer.stream
|
||||
if lexer.stream.eof: return
|
||||
lexer.stream.peek()
|
||||
advance lexer.stream[]
|
||||
if lexer.stream[].eof: return
|
||||
lexer.stream[].peek()
|
||||
|
||||
proc scanSign(lexer: var JsonLexer): int =
|
||||
# Returns +1 or -1
|
||||
# If a sign character is present, it must be followed
|
||||
# by more characters representing the number. If this
|
||||
# is not the case, the return value will be 0.
|
||||
let c = lexer.stream.peek()
|
||||
let c = lexer.stream[].peek()
|
||||
if c == '-':
|
||||
requireMoreNumberChars: result = 0
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
return -1
|
||||
elif c == '+':
|
||||
requireMoreNumberChars: result = 0
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
return 1
|
||||
|
||||
proc scanInt(lexer: var JsonLexer): uint64 =
|
||||
var c = lexer.stream.peek()
|
||||
var c = lexer.stream[].peek()
|
||||
result = uint64(ord(c) - ord('0'))
|
||||
|
||||
c = eatDigitAndPeek()
|
||||
|
@ -261,20 +262,20 @@ proc scanInt(lexer: var JsonLexer): uint64 =
|
|||
proc scanNumber(lexer: var JsonLexer) =
|
||||
var sign = lexer.scanSign()
|
||||
if sign == 0: return
|
||||
var c = lexer.stream.peek()
|
||||
var c = lexer.stream[].peek()
|
||||
|
||||
if c == '.':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
requireMoreNumberChars: discard
|
||||
lexer.tok = tkFloat
|
||||
c = lexer.stream.peek()
|
||||
c = lexer.stream[].peek()
|
||||
elif c.isDigit:
|
||||
lexer.tok = tkInt
|
||||
let scannedValue = lexer.scanInt()
|
||||
checkForNonPortableInt scannedValue
|
||||
lexer.intVal = int(scannedValue)
|
||||
if lexer.stream.eof: return
|
||||
c = lexer.stream.peek()
|
||||
if lexer.stream[].eof: return
|
||||
c = lexer.stream[].peek()
|
||||
if c == '.':
|
||||
lexer.tok = tkFloat
|
||||
lexer.floatVal = float(lexer.intVal)
|
||||
|
@ -289,11 +290,11 @@ proc scanNumber(lexer: var JsonLexer) =
|
|||
c = eatDigitAndPeek()
|
||||
|
||||
if c in {'E', 'e'}:
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
requireMoreNumberChars: discard
|
||||
let sign = lexer.scanSign()
|
||||
if sign == 0: return
|
||||
if not isDigit lexer.stream.peek():
|
||||
if not isDigit lexer.stream[].peek():
|
||||
error errNumberExpected
|
||||
|
||||
let exponent = lexer.scanInt()
|
||||
|
@ -308,7 +309,7 @@ proc scanNumber(lexer: var JsonLexer) =
|
|||
proc scanIdentifier(lexer: var JsonLexer,
|
||||
expectedIdent: string, expectedTok: TokKind) =
|
||||
for c in expectedIdent:
|
||||
if c != lexer.stream.read():
|
||||
if c != lexer.stream[].read():
|
||||
lexer.tok = tkError
|
||||
return
|
||||
lexer.tok = expectedTok
|
||||
|
@ -316,33 +317,33 @@ proc scanIdentifier(lexer: var JsonLexer,
|
|||
proc next*(lexer: var JsonLexer) =
|
||||
lexer.skipWhitespace()
|
||||
|
||||
if lexer.stream.eof:
|
||||
if lexer.stream[].eof:
|
||||
lexer.tok = tkEof
|
||||
return
|
||||
|
||||
let c = lexer.stream.peek()
|
||||
let c = lexer.stream[].peek()
|
||||
case c
|
||||
of '+', '-', '.', '0'..'9':
|
||||
lexer.scanNumber()
|
||||
of '"':
|
||||
lexer.scanString()
|
||||
of '[':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.tok = tkBracketLe
|
||||
of '{':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.tok = tkCurlyLe
|
||||
of ']':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.tok = tkBracketRi
|
||||
of '}':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.tok = tkCurlyRi
|
||||
of ',':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.tok = tkComma
|
||||
of ':':
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.tok = tkColon
|
||||
of '\0':
|
||||
lexer.tok = tkEof
|
||||
|
@ -350,6 +351,6 @@ proc next*(lexer: var JsonLexer) =
|
|||
of 't': lexer.scanIdentifier("true", tkTrue)
|
||||
of 'f': lexer.scanIdentifier("false", tkFalse)
|
||||
else:
|
||||
advance lexer.stream
|
||||
advance lexer.stream[]
|
||||
lexer.tok = tkError
|
||||
|
||||
|
|
|
@ -33,12 +33,12 @@ type
|
|||
encountedToken*: TokKind
|
||||
expectedToken*: ExpectedTokenCategory
|
||||
|
||||
proc init*(T: type JsonReader, stream: AsciiStream, mode = defaultJsonMode): T =
|
||||
proc init*(T: type JsonReader, stream: AsciiStreamVar, mode = defaultJsonMode): T =
|
||||
result.lexer = JsonLexer.init(stream, mode)
|
||||
result.lexer.next()
|
||||
|
||||
template init*(T: type JsonReader, stream: ByteStream, mode = defaultJsonMode): auto =
|
||||
init JsonReader, AsciiStream(stream), mode
|
||||
template init*(T: type JsonReader, stream: ByteStreamVar, mode = defaultJsonMode): auto =
|
||||
init JsonReader, AsciiStreamVar(stream), mode
|
||||
|
||||
proc setParsed[T: enum](e: var T, s: string) =
|
||||
e = parseEnum[T](s)
|
||||
|
|
|
@ -9,13 +9,13 @@ type
|
|||
AfterField
|
||||
|
||||
JsonWriter* = object
|
||||
stream*: ptr OutputStream
|
||||
stream*: OutputStreamVar
|
||||
hasTypeAnnotations: bool
|
||||
hasPrettyOutput*: bool # read-only
|
||||
nestingLevel*: int # read-only
|
||||
state: JsonWriterState
|
||||
|
||||
proc init*(T: type JsonWriter, stream: ptr OutputStream,
|
||||
proc init*(T: type JsonWriter, stream: OutputStreamVar,
|
||||
pretty = false, typeAnnotations = false): T =
|
||||
result.stream = stream
|
||||
result.hasPrettyOutput = pretty
|
||||
|
@ -32,7 +32,7 @@ proc beginRecord*(w: var JsonWriter, T: type)
|
|||
proc beginRecord*(w: var JsonWriter)
|
||||
|
||||
template append(x: untyped) =
|
||||
w.stream[].append x
|
||||
w.stream.append x
|
||||
|
||||
template indent =
|
||||
for i in 0 ..< w.nestingLevel:
|
||||
|
@ -141,9 +141,9 @@ proc writeImpl(w: var JsonWriter, value: auto) =
|
|||
elif value is bool:
|
||||
append if value: "true" else: "false"
|
||||
elif value is enum:
|
||||
w.stream[].appendNumber ord(value)
|
||||
w.stream.appendNumber ord(value)
|
||||
elif value is SomeInteger:
|
||||
w.stream[].appendNumber value
|
||||
w.stream.appendNumber value
|
||||
elif value is SomeFloat:
|
||||
append $value
|
||||
elif value is (seq or array):
|
||||
|
@ -160,8 +160,8 @@ proc writeImpl(w: var JsonWriter, value: auto) =
|
|||
proc toJson*(v: auto, pretty = false, typeAnnotations = false): string =
|
||||
mixin writeValue
|
||||
|
||||
var s = init StringOutputStream
|
||||
var w = JsonWriter.init(addr s, pretty, typeAnnotations)
|
||||
var s = init OutputStream
|
||||
var w = JsonWriter.init(s, pretty, typeAnnotations)
|
||||
w.writeValue v
|
||||
return s.getOutput
|
||||
return s.getOutput(string)
|
||||
|
||||
|
|
Loading…
Reference in New Issue