Compile-time creation of Uint (#53)

* Start on allowing compile-time stint. Works for Uint256

* remove and reorder initUintImpl, zero and one procs

* Fix assignation of uint64 to 2xuint32, remove unsafeConv and use const for base/radix

* assign_least_significant_words shouldn't implicitly capture result

* Simplify least significant words macros
This commit is contained in:
Mamy Ratsimbazafy 2018-06-16 18:54:40 +02:00 committed by GitHub
parent 35c57de63d
commit 184e22b659
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 64 additions and 107 deletions

View File

@ -10,7 +10,6 @@
import
./private/datatypes,
./private/int_negabs,
./private/initialization,
./private/[as_words, as_signed_words],
./int_public, ./uint_public,
typetraits, algorithm
@ -25,39 +24,43 @@ template static_check_size(T: typedesc[SomeInteger], bits: static[int]) =
"\nUse a smaller input type instead. This is a compile-time check" &
" to avoid a costly run-time bit_length check at each StUint initialization."
template assign_least_significant_words[T: SomeInteger](result: var (Stuint|Stint), n: T) =
template lsw_result: untyped = least_significant_word(result.data)
template slsw_result: untyped = second_least_significant_word(result.data)
const wordSize = lsw_result.getSize
when sizeof(T) * 8 <= wordSize:
lsw_result = (type lsw_result)(n)
else: # We try to store an int64 in 2 x uint32 or 4 x uint16
# For now we only support assignation from 64 to 2x32 bit
const
size = getSize(T)
halfSize = size div 2
halfMask = (1.T shl halfSize) - 1.T
lsw_result = (type lsw_result)(n and halfMask)
slsw_result = (type slsw_result)(n shr halfSize)
func stuint*[T: SomeInteger](n: T, bits: static[int]): StUint[bits] {.inline.}=
## Converts an integer to an arbitrary precision integer.
assert n >= 0.T
when result.data is UintImpl:
static_check_size(T, bits)
let r_ptr = cast[ptr array[bits div (sizeof(T) * 8), T]](result.addr)
when system.cpuEndian == littleEndian:
# "Least significant byte is at the beginning"
r_ptr[0] = n
else:
r_ptr[r_ptr[].len - 1] = n
assign_least_significant_words(result, n)
else:
result.data = (type result.data)(n)
func stint*[T: SomeInteger](n: T, bits: static[int]): StInt[bits] {.inline.}=
## Converts an integer to an arbitrary precision signed integer.
when result.data is IntImpl:
static_check_size(T, bits)
let r_ptr = cast[ptr array[bits div (sizeof(T) * 8), T]](result.addr)
when system.cpuEndian == littleEndian:
# "Least significant byte is at the beginning"
if n < 0:
r_ptr[0] = -n
result = -result
else:
r_ptr[0] = n
if n < 0:
assign_least_significant_words(result, -n)
result = -result
else:
if n < 0:
r_ptr[r_ptr[].len - 1] = -n
result = -result
else:
r_ptr[r_ptr[].len - 1] = n
assign_least_significant_words(result, n)
else:
result.data = (type result.data)(n)
@ -104,20 +107,7 @@ func readDecChar(c: range['0'..'9']): int {.inline.}=
# specialization without branching for base <= 10.
ord(c) - ord('0')
func unsafeConv[bits: static[int]](n: range[0..16], T: typedesc[Stint[bits]|Stuint[bits]]): T =
## Fast convert a small int to a Stint/Stuint
## This assumes that the int always fit.
## Purpose:
## - Converting bases in the range [2..16]
## - Converting decimal/hexadecimal in range [0..15]
let r_ptr = cast[ptr array[bits div 8, byte]](result.addr)
when system.cpuEndian == littleEndian:
r_ptr[0] = n.byte
else:
r_ptr[r_ptr[].len - 1] = n.byte
func parse*[bits: static[int]](input: string, T: typedesc[Stuint[bits]], base: static[int]): T =
func parse*[bits: static[int]](input: string, T: typedesc[Stuint[bits]], base: static[uint8]): T =
## Parse a string and store the result in a Stint[bits] or Stuint[bits].
static: assert (base >= 2) and base <= 16, "Only base from 2..16 are supported"
@ -126,19 +116,19 @@ func parse*[bits: static[int]](input: string, T: typedesc[Stuint[bits]], base: s
# TODO: we can special case hex result/input as an array of bytes
# and be much faster
let radix = unsafeConv(base, T)
const radix = base.uint8.stuint(bits)
var curr = 0 # Current index in the string
skipPrefixes(curr, input, base)
while curr < input.len:
# TODO: overflow detection
when base <= 10:
result = result * radix + input[curr].readDecChar.unsafeConv(T)
result = result * radix + input[curr].readDecChar.stuint(bits)
else:
result = result * radix + input[curr].readHexChar.unsafeConv(T)
result = result * radix + input[curr].readHexChar.stuint(bits)
nextNonBlank(curr, input)
func parse*[bits: static[int]](input: string, T: typedesc[Stint[bits]], base: static[int]): T =
func parse*[bits: static[int]](input: string, T: typedesc[Stint[bits]], base: static[int8]): T =
## Parse a string and store the result in a Stint[bits] or Stuint[bits].
static: assert (base >= 2) and base <= 16, "Only base from 2..16 are supported"
@ -148,7 +138,7 @@ func parse*[bits: static[int]](input: string, T: typedesc[Stint[bits]], base: st
# and be much faster
# For conversion we require overflowing operations (for example for negative hex numbers)
let radix = unsafeConv(base, Stuint[bits])
const radix = base.int8.stuint(bits)
var
curr = 0 # Current index in the string
@ -165,9 +155,9 @@ func parse*[bits: static[int]](input: string, T: typedesc[Stint[bits]], base: st
while curr < input.len:
# TODO: overflow detection
when base <= 10:
no_overflow = no_overflow * radix + input[curr].readDecChar.unsafeConv(Stuint[bits])
no_overflow = no_overflow * radix + input[curr].readDecChar.stuint(bits)
else:
no_overflow = no_overflow * radix + input[curr].readHexChar.unsafeConv(Stuint[bits])
no_overflow = no_overflow * radix + input[curr].readHexChar.stuint(bits)
nextNonBlank(curr, input)
# TODO: we can't create the lowest int this way
@ -187,7 +177,7 @@ func hexToUint*[bits: static[int]](hexString: string): Stuint[bits] {.inline.} =
## Convert an hex string to the corresponding unsigned integer
parse(hexString, type result, base = 16)
func toString*[bits: static[int]](num: StUint[bits], base: static[int]): string =
func toString*[bits: static[int]](num: StUint[bits], base: static[uint8]): string =
## Convert a Stint or Stuint to string.
## In case of negative numbers:
## - they are prefixed with "-" for base 10.
@ -197,7 +187,7 @@ func toString*[bits: static[int]](num: StUint[bits], base: static[int]): string
# TODO: use static[range[2 .. 16]], not supported at the moment (2018-04-26)
const hexChars = "0123456789abcdef"
let radix = unsafeConv(base, type num)
const radix = base.uint8.stuint(bits)
result = ""
var (q, r) = divmod(num, radix)
@ -210,7 +200,7 @@ func toString*[bits: static[int]](num: StUint[bits], base: static[int]): string
reverse(result)
func toString*[bits: static[int]](num: Stint[bits], base: static[int]): string =
func toString*[bits: static[int]](num: Stint[bits], base: static[int8]): string =
## Convert a Stint or Stuint to string.
## In case of negative numbers:
## - they are prefixed with "-" for base 10.
@ -220,7 +210,7 @@ func toString*[bits: static[int]](num: Stint[bits], base: static[int]): string =
# TODO: use static[range[2 .. 16]], not supported at the moment (2018-04-26)
const hexChars = "0123456789abcdef"
let radix = unsafeConv(base, type num)
const radix = base.int8.stint(bits)
result = ""

View File

@ -56,21 +56,6 @@ macro most_significant_word*(x: IntImpl): untyped =
# overflow checking
cast[int](cast[`optim_type`](`x`)[`msw_pos`])
macro least_significant_word*(x: IntImpl): untyped =
let optim_type = optimInt(x)
if optim_type.isInt:
result = quote do:
cast[`optim_type`](`x`)
else:
when system.cpuEndian == littleEndian:
let size = getSize(x)
let msw_pos = 0
else:
let msw_pos = size div 8 - 1
result = quote do:
cast[int](cast[`optim_type`](`x`)[`msw_pos`])
macro asSignedWordsZip*[T](
x, y: IntImpl[T],
loopBody: untyped): untyped =

View File

@ -60,20 +60,25 @@ proc replaceNodes*(ast: NimNode, replacing: NimNode, to_replace: NimNode): NimNo
return rTree
result = inspect(ast)
macro least_significant_word*(x: UintImpl): untyped =
proc least_significant_two_words*(x: NimNode): tuple[lo, hi: NimNode] =
var node = x.getTypeInst
var result_lo = x
let optim_type = optimUInt(x)
if optim_type.isUInt:
result = quote do:
cast[`optim_type`](`x`)
else:
when system.cpuEndian == littleEndian:
let size = getSize(x)
let msw_pos = 0
else:
let msw_pos = size div 64 - 1
result = quote do:
cast[`optim_type`](`x`)[`msw_pos`]
while node.kind == nnkBracketExpr:
assert eqIdent(node[0], "UintImpl") or eqIdent(node[0], "IntImpl"), (
"least_significant_word only supports primitive integers, Stint and Stuint")
result_lo = quote do: `result_lo`.lo
node = node[1]
var result_hi = result_lo.copyNimTree # ⚠ Aliasing: NimNodes are ref objects
result_hi[1] = newIdentNode("hi") # replace the last lo by hi
result = (result_lo, result_hi)
macro second_least_significant_word*(x: UintImpl or IntImpl): untyped =
result = least_significant_two_words(x).hi
macro least_significant_word*(x: UintImpl or IntImpl): untyped =
result = least_significant_two_words(x).lo
macro asWords*(n: UintImpl or IntImpl, ignoreEndianness: static[bool], loopBody: untyped): untyped =
## Iterates over n, as an array of words.

View File

@ -7,36 +7,13 @@
#
# at your option. This file may not be copied, modified, or distributed except according to those terms.
import ./datatypes, typetraits
func initUintImpl*[InType, OutType](x: InType, _: typedesc[OutType]): OutType {.inline.} =
const
size_in = getSize(x)
size_out = getSize(result)
static:
assert size_out >= size_in, "The result type size (" & $size_out &
" for " & $OutType.name &
") should be equal or bigger than the input type size (" & $size_in &
" for " & $InType.name & ")."
when OutType is SomeUnsignedInt:
result = x.OutType
elif size_in == size_out:
result = cast[type result](x)
else:
result.lo = initUintImpl(x, type result.lo)
import ./datatypes, ./as_words, typetraits
func zero*[T: BaseUint](_: typedesc[T]): T {.inline.}=
discard
func one*[T: BaseUint or IntImpl](_: typedesc[T]): T {.inline.}=
when T is SomeInteger:
result = T(1)
else:
let r_ptr = cast[ptr array[getSize(result) div 8, byte]](result.addr)
when system.cpuEndian == littleEndian:
r_ptr[0] = 1
else:
r_ptr[r_ptr[].len - 1] = 1
func one*(T: typedesc[UintImpl or IntImpl]): T {.inline.} =
least_significant_word(result) = 1
func one*(T: typedesc[SomeInteger]): T {.inline.} =
1

View File

@ -92,12 +92,12 @@ func countLeadingZeroBits*(x: StUint): int {.inline.} =
import ./private/initialization
func one*[bits: static[int]](T: typedesc[Stuint[bits] or Stint[bits]]): T {.inline.} =
result.data = one(type result.data)
func zero*[bits: static[int]](T: typedesc[Stuint[bits] or Stint[bits]]): T {.inline.} =
discard
func one*[bits: static[int]](T: typedesc[Stuint[bits]]): T {.inline.} =
result.data = one(type result.data)
import ./private/uint_exp, math
func pow*(x: StUint, y: Natural): StUint {.inline.} =