Simplify encoding/decoding of signed integers
Fixes error in Nim 1.4.8
This commit is contained in:
parent
a0fad53523
commit
971cdad25d
|
@ -4,6 +4,7 @@ import pkg/stew/byteutils
|
|||
import pkg/questionable/results
|
||||
import pkg/upraises
|
||||
import ./encoding
|
||||
import ./integers
|
||||
|
||||
push: {.upraises:[].}
|
||||
|
||||
|
@ -19,6 +20,7 @@ type
|
|||
padLeft,
|
||||
padRight
|
||||
UInt = SomeUnsignedInt | StUint
|
||||
Int = SomeSignedInt | StInt
|
||||
|
||||
func read*(decoder: var AbiDecoder, T: type): ?!T
|
||||
|
||||
|
@ -79,21 +81,8 @@ func read(decoder: var AbiDecoder, amount: int, padding=padLeft): ?!seq[byte] =
|
|||
func decode(decoder: var AbiDecoder, T: type UInt): ?!T =
|
||||
success T.fromBytesBE(?decoder.read(sizeof(T)))
|
||||
|
||||
template unsigned(T: type SomeSignedInt): type SomeUnsignedInt =
|
||||
when T is int8: uint8
|
||||
elif T is int16: uint16
|
||||
elif T is int32: uint32
|
||||
elif T is int64: uint64
|
||||
else: {.error "unsupported signed integer type".}
|
||||
|
||||
func decode(decoder: var AbiDecoder, T: type SomeSignedInt): ?!T =
|
||||
let bytes = ?decoder.read(sizeof(T))
|
||||
let unsigned = T.unsigned.fromBytesBE(bytes)
|
||||
let signed = cast[T](unsigned)
|
||||
success signed
|
||||
|
||||
func decode[bits](decoder: var AbiDecoder, T: type StInt[bits]): ?!T =
|
||||
let unsigned = ?decoder.read(StUint[bits])
|
||||
func decode(decoder: var AbiDecoder, T: type Int): ?!T =
|
||||
let unsigned = ?decoder.read(T.unsigned)
|
||||
success cast[T](unsigned)
|
||||
|
||||
template basetype(Range: type range): untyped =
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import pkg/stint
|
||||
import pkg/upraises
|
||||
import pkg/stew/byteutils
|
||||
import ./integers
|
||||
|
||||
export stint
|
||||
|
||||
|
@ -83,14 +84,11 @@ func padright(encoder: var AbiEncoder, bytes: openArray[byte], padding=0'u8) =
|
|||
func encode(encoder: var AbiEncoder, value: SomeUnsignedInt | StUint) =
|
||||
encoder.padleft(value.toBytesBE)
|
||||
|
||||
func encode[bits](encoder: var AbiEncoder, value: StInt[bits]) =
|
||||
let bytes = value.stuint(bits).toBytesBE
|
||||
func encode(encoder: var AbiEncoder, value: SomeSignedInt | StInt) =
|
||||
let bytes = value.unsigned.toBytesBE
|
||||
let padding = if value.isNegative: 0xFF'u8 else: 0x00'u8
|
||||
encoder.padleft(bytes, padding)
|
||||
|
||||
func encode(encoder: var AbiEncoder, value: SomeSignedInt) =
|
||||
encoder.write(value.to(StInt[64]))
|
||||
|
||||
func encode(encoder: var AbiEncoder, value: bool) =
|
||||
encoder.encode(if value: 1'u8 else: 0'u8)
|
||||
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
import pkg/stint
|
||||
|
||||
template unsigned*(T: type SomeSignedInt): type SomeUnsignedInt =
|
||||
when T is int8: uint8
|
||||
elif T is int16: uint16
|
||||
elif T is int32: uint32
|
||||
elif T is int64: uint64
|
||||
else: {.error "unsupported signed integer type".}
|
||||
|
||||
template unsigned*(T: type StInt): type StUint =
|
||||
StUint[T.bits]
|
||||
|
||||
func unsigned*(value: SomeSignedInt): SomeUnsignedInt =
|
||||
cast[typeof(value).unsigned](value)
|
||||
|
||||
func unsigned*[bits](value: StInt[bits]): StUint[bits] =
|
||||
value.stuint(bits)
|
Loading…
Reference in New Issue