Prepare for manual memory management in preallocated memory

* Determine ALIGNMENT more cleverly and move it to util.h
 * Implement manual_malloc() helper function
This commit is contained in:
Tim Ruffing 2018-10-18 19:09:51 +02:00
parent 36698dcfee
commit 1bf7c056ba
2 changed files with 43 additions and 7 deletions

View File

@ -7,14 +7,9 @@
#ifndef _SECP256K1_SCRATCH_IMPL_H_
#define _SECP256K1_SCRATCH_IMPL_H_
#include "util.h"
#include "scratch.h"
/* Using 16 bytes alignment because common architectures never have alignment
* requirements above 8 for any of the types we care about. In addition we
* leave some room because currently we don't care about a few bytes.
* TODO: Determine this at configure time. */
#define ALIGNMENT 16
static secp256k1_scratch* secp256k1_scratch_create(const secp256k1_callback* error_callback, size_t max_size) {
secp256k1_scratch* ret = (secp256k1_scratch*)checked_malloc(error_callback, sizeof(*ret));
if (ret != NULL) {
@ -71,7 +66,7 @@ static void secp256k1_scratch_deallocate_frame(secp256k1_scratch* scratch) {
static void *secp256k1_scratch_alloc(secp256k1_scratch* scratch, size_t size) {
void *ret;
size_t frame = scratch->frame - 1;
size = ((size + ALIGNMENT - 1) / ALIGNMENT) * ALIGNMENT;
size = ROUND_TO_ALIGN(size);
if (scratch->frame == 0 || size + scratch->offset[frame] > scratch->frame_size[frame]) {
return NULL;

View File

@ -84,6 +84,47 @@ static SECP256K1_INLINE void *checked_realloc(const secp256k1_callback* cb, void
return ret;
}
#if defined(__BIGGEST_ALIGNMENT__)
#define ALIGNMENT __BIGGEST_ALIGNMENT__
#else
/* Using 16 bytes alignment because common architectures never have alignment
* requirements above 8 for any of the types we care about. In addition we
* leave some room because currently we don't care about a few bytes. */
#define ALIGNMENT 16
#endif
#define ROUND_TO_ALIGN(size) (((size + ALIGNMENT - 1) / ALIGNMENT) * ALIGNMENT)
/* Assume there is a contiguous memory object with bounds [base, base + max_size)
* of which the memory range [base, *prealloc_ptr) is already allocated for usage,
* where *prealloc_ptr is an aligned pointer. In that setting, this functions
* reserves the subobject [*prealloc_ptr, *prealloc_ptr + alloc_size) of
* alloc_size bytes by increasing *prealloc_ptr accordingly, taking into account
* alignment requirements.
*
* The function returns an aligned pointer to the newly allocated subobject.
*
* This is useful for manual memory management: if we're simply given a block
* [base, base + max_size), the caller can use this function to allocate memory
* in this block and keep track of the current allocation state with *prealloc_ptr.
*
* It is VERIFY_CHECKed that there is enough space left in the memory object and
* *prealloc_ptr is aligned relative to base.
*/
static SECP256K1_INLINE void *manual_alloc(void** prealloc_ptr, size_t alloc_size, void* base, size_t max_size) {
size_t aligned_alloc_size = ROUND_TO_ALIGN(alloc_size);
void* ret;
VERIFY_CHECK(prealloc_ptr != NULL);
VERIFY_CHECK(*prealloc_ptr != NULL);
VERIFY_CHECK(base != NULL);
VERIFY_CHECK((unsigned char*)*prealloc_ptr >= (unsigned char*)base);
VERIFY_CHECK(((unsigned char*)*prealloc_ptr - (unsigned char*)base) % ALIGNMENT == 0);
VERIFY_CHECK((unsigned char*)*prealloc_ptr - (unsigned char*)base + aligned_alloc_size <= max_size);
ret = *prealloc_ptr;
*((unsigned char**)prealloc_ptr) += aligned_alloc_size;
return ret;
}
/* Macro for restrict, when available and not in a VERIFY build. */
#if defined(SECP256K1_BUILD) && defined(VERIFY)
# define SECP256K1_RESTRICT