diff --git a/src/scalar.h b/src/scalar.h index 2469302..a69a4cc 100644 --- a/src/scalar.h +++ b/src/scalar.h @@ -24,12 +24,18 @@ /** Clear a scalar to prevent the leak of sensitive data. */ static void secp256k1_scalar_clear(secp256k1_scalar_t *r); -/** Access bits from a scalar. */ -static int secp256k1_scalar_get_bits(const secp256k1_scalar_t *a, int offset, int count); +/** Access bits from a scalar. All requested bits must belong to the same 32-bit limb. */ +static unsigned int secp256k1_scalar_get_bits(const secp256k1_scalar_t *a, unsigned int offset, unsigned int count); + +/** Access bits from a scalar. Not constant time. */ +static unsigned int secp256k1_scalar_get_bits_var(const secp256k1_scalar_t *a, unsigned int offset, unsigned int count); /** Set a scalar from a big endian byte array. */ static void secp256k1_scalar_set_b32(secp256k1_scalar_t *r, const unsigned char *bin, int *overflow); +/** Set a scalar to an unsigned integer. */ +static void secp256k1_scalar_set_int(secp256k1_scalar_t *r, unsigned int v); + /** Convert a scalar to a byte array. */ static void secp256k1_scalar_get_b32(unsigned char *bin, const secp256k1_scalar_t* a); diff --git a/src/scalar_4x64_impl.h b/src/scalar_4x64_impl.h index 02ae318..fff9119 100644 --- a/src/scalar_4x64_impl.h +++ b/src/scalar_4x64_impl.h @@ -33,9 +33,27 @@ SECP256K1_INLINE static void secp256k1_scalar_clear(secp256k1_scalar_t *r) { r->d[3] = 0; } -SECP256K1_INLINE static int secp256k1_scalar_get_bits(const secp256k1_scalar_t *a, int offset, int count) { - VERIFY_CHECK((offset + count - 1) / 64 == offset / 64); - return (a->d[offset / 64] >> (offset % 64)) & ((((uint64_t)1) << count) - 1); +SECP256K1_INLINE static void secp256k1_scalar_set_int(secp256k1_scalar_t *r, unsigned int v) { + r->d[0] = v; + r->d[1] = 0; + r->d[2] = 0; + r->d[3] = 0; +} + +SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits(const secp256k1_scalar_t *a, unsigned int offset, unsigned int count) { + VERIFY_CHECK((offset + count - 1) >> 6 == offset >> 6); + return (a->d[offset >> 6] >> (offset & 0x3F)) & ((((uint64_t)1) << count) - 1); +} + +SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits_var(const secp256k1_scalar_t *a, unsigned int offset, unsigned int count) { + VERIFY_CHECK(count < 32); + VERIFY_CHECK(offset + count <= 256); + if ((offset + count - 1) >> 6 == offset >> 6) { + return secp256k1_scalar_get_bits(a, offset, count); + } else { + VERIFY_CHECK((offset >> 6) + 1 < 4); + return ((a->d[offset >> 6] >> (offset & 0x3F)) | (a->d[(offset >> 6) + 1] << (64 - (offset & 0x3F)))) & ((((uint64_t)1) << count) - 1); + } } SECP256K1_INLINE static int secp256k1_scalar_check_overflow(const secp256k1_scalar_t *a) { diff --git a/src/scalar_8x32_impl.h b/src/scalar_8x32_impl.h index cad1065..2a4afc8 100644 --- a/src/scalar_8x32_impl.h +++ b/src/scalar_8x32_impl.h @@ -45,9 +45,31 @@ SECP256K1_INLINE static void secp256k1_scalar_clear(secp256k1_scalar_t *r) { r->d[7] = 0; } -SECP256K1_INLINE static int secp256k1_scalar_get_bits(const secp256k1_scalar_t *a, int offset, int count) { - VERIFY_CHECK((offset + count - 1) / 32 == offset / 32); - return (a->d[offset / 32] >> (offset % 32)) & ((1 << count) - 1); +SECP256K1_INLINE static void secp256k1_scalar_set_int(secp256k1_scalar_t *r, unsigned int v) { + r->d[0] = v; + r->d[1] = 0; + r->d[2] = 0; + r->d[3] = 0; + r->d[4] = 0; + r->d[5] = 0; + r->d[6] = 0; + r->d[7] = 0; +} + +SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits(const secp256k1_scalar_t *a, unsigned int offset, unsigned int count) { + VERIFY_CHECK((offset + count - 1) >> 5 == offset >> 5); + return (a->d[offset >> 5] >> (offset & 0x1F)) & ((1 << count) - 1); +} + +SECP256K1_INLINE static unsigned int secp256k1_scalar_get_bits_var(const secp256k1_scalar_t *a, unsigned int offset, unsigned int count) { + VERIFY_CHECK(count < 32); + VERIFY_CHECK(offset + count <= 256); + if ((offset + count - 1) >> 5 == offset >> 5) { + return secp256k1_scalar_get_bits(a, offset, count); + } else { + VERIFY_CHECK((offset >> 5) + 1 < 8); + return ((a->d[offset >> 5] >> (offset & 0x1F)) | (a->d[(offset >> 5) + 1] << (32 - (offset & 0x1F)))) & ((((uint32_t)1) << count) - 1); + } } SECP256K1_INLINE static int secp256k1_scalar_check_overflow(const secp256k1_scalar_t *a) { diff --git a/src/tests.c b/src/tests.c index e964449..6e81118 100644 --- a/src/tests.c +++ b/src/tests.c @@ -278,15 +278,38 @@ void scalar_test(void) { { /* Test that fetching groups of 4 bits from a scalar and recursing n(i)=16*n(i-1)+p(i) reconstructs it. */ - secp256k1_num_t n, t, m; - secp256k1_num_set_int(&n, 0); - secp256k1_num_set_int(&m, 16); + secp256k1_scalar_t n; + secp256k1_scalar_set_int(&n, 0); for (int i = 0; i < 256; i += 4) { - secp256k1_num_set_int(&t, secp256k1_scalar_get_bits(&s, 256 - 4 - i, 4)); - secp256k1_num_mul(&n, &n, &m); - secp256k1_num_add(&n, &n, &t); + secp256k1_scalar_t t; + secp256k1_scalar_set_int(&t, secp256k1_scalar_get_bits(&s, 256 - 4 - i, 4)); + for (int j = 0; j < 4; j++) { + secp256k1_scalar_add(&n, &n, &n); + } + secp256k1_scalar_add(&n, &n, &t); } - CHECK(secp256k1_num_eq(&n, &snum)); + CHECK(secp256k1_scalar_eq(&n, &s)); + } + + { + /* Test that fetching groups of randomly-sized bits from a scalar and recursing n(i)=b*n(i-1)+p(i) reconstructs it. */ + secp256k1_scalar_t n; + secp256k1_scalar_set_int(&n, 0); + int i = 0; + while (i < 256) { + int now = (secp256k1_rand32() % 15) + 1; + if (now + i > 256) { + now = 256 - i; + } + secp256k1_scalar_t t; + secp256k1_scalar_set_int(&t, secp256k1_scalar_get_bits_var(&s, 256 - now - i, now)); + for (int j = 0; j < now; j++) { + secp256k1_scalar_add(&n, &n, &n); + } + secp256k1_scalar_add(&n, &n, &t); + i += now; + } + CHECK(secp256k1_scalar_eq(&n, &s)); } { @@ -386,8 +409,7 @@ void scalar_test(void) { /* Test add_bit. */ int bit = secp256k1_rand32() % 256; secp256k1_scalar_t b; - secp256k1_scalar_clear(&b); - secp256k1_scalar_add_bit(&b, 0); + secp256k1_scalar_set_int(&b, 1); CHECK(secp256k1_scalar_is_one(&b)); for (int i = 0; i < bit; i++) { secp256k1_scalar_add(&b, &b, &b);