Skip to content

Commit 1bf7c05

Browse files
Prepare for manual memory management in preallocated memory
* Determine ALIGNMENT more cleverly and move it to util.h * Implement manual_malloc() helper function
1 parent 36698dc commit 1bf7c05

File tree

2 files changed

+43
-7
lines changed

2 files changed

+43
-7
lines changed

src/scratch_impl.h

+2-7
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,9 @@
77
#ifndef _SECP256K1_SCRATCH_IMPL_H_
88
#define _SECP256K1_SCRATCH_IMPL_H_
99

10+
#include "util.h"
1011
#include "scratch.h"
1112

12-
/* Using 16 bytes alignment because common architectures never have alignment
13-
* requirements above 8 for any of the types we care about. In addition we
14-
* leave some room because currently we don't care about a few bytes.
15-
* TODO: Determine this at configure time. */
16-
#define ALIGNMENT 16
17-
1813
static secp256k1_scratch* secp256k1_scratch_create(const secp256k1_callback* error_callback, size_t max_size) {
1914
secp256k1_scratch* ret = (secp256k1_scratch*)checked_malloc(error_callback, sizeof(*ret));
2015
if (ret != NULL) {
@@ -71,7 +66,7 @@ static void secp256k1_scratch_deallocate_frame(secp256k1_scratch* scratch) {
7166
static void *secp256k1_scratch_alloc(secp256k1_scratch* scratch, size_t size) {
7267
void *ret;
7368
size_t frame = scratch->frame - 1;
74-
size = ((size + ALIGNMENT - 1) / ALIGNMENT) * ALIGNMENT;
69+
size = ROUND_TO_ALIGN(size);
7570

7671
if (scratch->frame == 0 || size + scratch->offset[frame] > scratch->frame_size[frame]) {
7772
return NULL;

src/util.h

+41
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,47 @@ static SECP256K1_INLINE void *checked_realloc(const secp256k1_callback* cb, void
8484
return ret;
8585
}
8686

87+
#if defined(__BIGGEST_ALIGNMENT__)
88+
#define ALIGNMENT __BIGGEST_ALIGNMENT__
89+
#else
90+
/* Using 16 bytes alignment because common architectures never have alignment
91+
* requirements above 8 for any of the types we care about. In addition we
92+
* leave some room because currently we don't care about a few bytes. */
93+
#define ALIGNMENT 16
94+
#endif
95+
96+
#define ROUND_TO_ALIGN(size) (((size + ALIGNMENT - 1) / ALIGNMENT) * ALIGNMENT)
97+
98+
/* Assume there is a contiguous memory object with bounds [base, base + max_size)
99+
* of which the memory range [base, *prealloc_ptr) is already allocated for usage,
100+
* where *prealloc_ptr is an aligned pointer. In that setting, this functions
101+
* reserves the subobject [*prealloc_ptr, *prealloc_ptr + alloc_size) of
102+
* alloc_size bytes by increasing *prealloc_ptr accordingly, taking into account
103+
* alignment requirements.
104+
*
105+
* The function returns an aligned pointer to the newly allocated subobject.
106+
*
107+
* This is useful for manual memory management: if we're simply given a block
108+
* [base, base + max_size), the caller can use this function to allocate memory
109+
* in this block and keep track of the current allocation state with *prealloc_ptr.
110+
*
111+
* It is VERIFY_CHECKed that there is enough space left in the memory object and
112+
* *prealloc_ptr is aligned relative to base.
113+
*/
114+
static SECP256K1_INLINE void *manual_alloc(void** prealloc_ptr, size_t alloc_size, void* base, size_t max_size) {
115+
size_t aligned_alloc_size = ROUND_TO_ALIGN(alloc_size);
116+
void* ret;
117+
VERIFY_CHECK(prealloc_ptr != NULL);
118+
VERIFY_CHECK(*prealloc_ptr != NULL);
119+
VERIFY_CHECK(base != NULL);
120+
VERIFY_CHECK((unsigned char*)*prealloc_ptr >= (unsigned char*)base);
121+
VERIFY_CHECK(((unsigned char*)*prealloc_ptr - (unsigned char*)base) % ALIGNMENT == 0);
122+
VERIFY_CHECK((unsigned char*)*prealloc_ptr - (unsigned char*)base + aligned_alloc_size <= max_size);
123+
ret = *prealloc_ptr;
124+
*((unsigned char**)prealloc_ptr) += aligned_alloc_size;
125+
return ret;
126+
}
127+
87128
/* Macro for restrict, when available and not in a VERIFY build. */
88129
#if defined(SECP256K1_BUILD) && defined(VERIFY)
89130
# define SECP256K1_RESTRICT

0 commit comments

Comments
 (0)