@@ -37,22 +37,22 @@ static mi_decl_restrict void* mi_heap_malloc_guarded_aligned(mi_heap_t* heap, si
3737 return p ;
3838}
3939
40- static void * mi_heap_malloc_zero_no_guarded (mi_heap_t * heap , size_t size , bool zero ) {
40+ static void * mi_heap_malloc_zero_no_guarded (mi_heap_t * heap , size_t size , bool zero , size_t * usable ) {
4141 const size_t rate = heap -> guarded_sample_rate ;
4242 // only write if `rate!=0` so we don't write to the constant `_mi_heap_empty`
4343 if (rate != 0 ) { heap -> guarded_sample_rate = 0 ; }
44- void * p = _mi_heap_malloc_zero (heap , size , zero );
44+ void * p = _mi_heap_malloc_zero_ex (heap , size , zero , 0 , usable );
4545 if (rate != 0 ) { heap -> guarded_sample_rate = rate ; }
4646 return p ;
4747}
4848#else
49- static void * mi_heap_malloc_zero_no_guarded (mi_heap_t * heap , size_t size , bool zero ) {
50- return _mi_heap_malloc_zero (heap , size , zero );
49+ static void * mi_heap_malloc_zero_no_guarded (mi_heap_t * heap , size_t size , bool zero , size_t * usable ) {
50+ return _mi_heap_malloc_zero_ex (heap , size , zero , 0 , usable );
5151}
5252#endif
5353
5454// Fallback aligned allocation that over-allocates -- split out for better codegen
55- static mi_decl_noinline void * mi_heap_malloc_zero_aligned_at_overalloc (mi_heap_t * const heap , const size_t size , const size_t alignment , const size_t offset , const bool zero ) mi_attr_noexcept
55+ static mi_decl_noinline void * mi_heap_malloc_zero_aligned_at_overalloc (mi_heap_t * const heap , const size_t size , const size_t alignment , const size_t offset , const bool zero , size_t * usable ) mi_attr_noexcept
5656{
5757 mi_assert_internal (size <= (MI_MAX_ALLOC_SIZE - MI_PADDING_SIZE ));
5858 mi_assert_internal (alignment != 0 && _mi_is_power_of_two (alignment ));
@@ -72,14 +72,14 @@ static mi_decl_noinline void* mi_heap_malloc_zero_aligned_at_overalloc(mi_heap_t
7272 }
7373 oversize = (size <= MI_SMALL_SIZE_MAX ? MI_SMALL_SIZE_MAX + 1 /* ensure we use generic malloc path */ : size );
7474 // note: no guarded as alignment > 0
75- p = _mi_heap_malloc_zero_ex (heap , oversize , false, alignment ); // the page block size should be large enough to align in the single huge page block
75+ p = _mi_heap_malloc_zero_ex (heap , oversize , false, alignment , usable ); // the page block size should be large enough to align in the single huge page block
7676 // zero afterwards as only the area from the aligned_p may be committed!
7777 if (p == NULL ) return NULL ;
7878 }
7979 else {
8080 // otherwise over-allocate
8181 oversize = (size < MI_MAX_ALIGN_SIZE ? MI_MAX_ALIGN_SIZE : size ) + alignment - 1 ; // adjust for size <= 16; with size 0 and aligment 64k, we would allocate a 64k block and pointing just beyond that.
82- p = mi_heap_malloc_zero_no_guarded (heap , oversize , zero );
82+ p = mi_heap_malloc_zero_no_guarded (heap , oversize , zero , usable );
8383 if (p == NULL ) return NULL ;
8484 }
8585 mi_page_t * page = _mi_ptr_page (p );
@@ -132,7 +132,7 @@ static mi_decl_noinline void* mi_heap_malloc_zero_aligned_at_overalloc(mi_heap_t
132132}
133133
134134// Generic primitive aligned allocation -- split out for better codegen
135- static mi_decl_noinline void * mi_heap_malloc_zero_aligned_at_generic (mi_heap_t * const heap , const size_t size , const size_t alignment , const size_t offset , const bool zero ) mi_attr_noexcept
135+ static mi_decl_noinline void * mi_heap_malloc_zero_aligned_at_generic (mi_heap_t * const heap , const size_t size , const size_t alignment , const size_t offset , const bool zero , size_t * usable ) mi_attr_noexcept
136136{
137137 mi_assert_internal (alignment != 0 && _mi_is_power_of_two (alignment ));
138138 // we don't allocate more than MI_MAX_ALLOC_SIZE (see <https://sourceware.org/ml/libc-announce/2019/msg00001.html>)
@@ -147,7 +147,7 @@ static mi_decl_noinline void* mi_heap_malloc_zero_aligned_at_generic(mi_heap_t*
147147 // this is important to try as the fast path in `mi_heap_malloc_zero_aligned` only works when there exist
148148 // a page with the right block size, and if we always use the over-alloc fallback that would never happen.
149149 if (offset == 0 && mi_malloc_is_naturally_aligned (size ,alignment )) {
150- void * p = mi_heap_malloc_zero_no_guarded (heap , size , zero );
150+ void * p = mi_heap_malloc_zero_no_guarded (heap , size , zero , usable );
151151 mi_assert_internal (p == NULL || ((uintptr_t )p % alignment ) == 0 );
152152 const bool is_aligned_or_null = (((uintptr_t )p ) & (alignment - 1 ))== 0 ;
153153 if mi_likely (is_aligned_or_null ) {
@@ -161,12 +161,14 @@ static mi_decl_noinline void* mi_heap_malloc_zero_aligned_at_generic(mi_heap_t*
161161 }
162162
163163 // fall back to over-allocation
164- return mi_heap_malloc_zero_aligned_at_overalloc (heap ,size ,alignment ,offset ,zero );
164+ return mi_heap_malloc_zero_aligned_at_overalloc (heap ,size ,alignment ,offset ,zero , usable );
165165}
166166
167167
168168// Primitive aligned allocation
169- static void * mi_heap_malloc_zero_aligned_at (mi_heap_t * const heap , const size_t size , const size_t alignment , const size_t offset , const bool zero ) mi_attr_noexcept
169+ static void * mi_heap_malloc_zero_aligned_at (mi_heap_t * const heap , const size_t size ,
170+ const size_t alignment , const size_t offset , const bool zero ,
171+ size_t * usable ) mi_attr_noexcept
170172{
171173 // note: we don't require `size > offset`, we just guarantee that the address at offset is aligned regardless of the allocated size.
172174 if mi_unlikely (alignment == 0 || !_mi_is_power_of_two (alignment )) { // require power-of-two (see <https://en.cppreference.com/w/c/memory/aligned_alloc>)
@@ -191,6 +193,7 @@ static void* mi_heap_malloc_zero_aligned_at(mi_heap_t* const heap, const size_t
191193 const bool is_aligned = (((uintptr_t )page -> free + offset ) & align_mask )== 0 ;
192194 if mi_likely (is_aligned )
193195 {
196+ if (usable != NULL ) { * usable = mi_page_usable_block_size (page ); }
194197 void * p = (zero ? _mi_page_malloc_zeroed (heap ,page ,padsize ) : _mi_page_malloc (heap ,page ,padsize )); // call specific page malloc for better codegen
195198 mi_assert_internal (p != NULL );
196199 mi_assert_internal (((uintptr_t )p + offset ) % alignment == 0 );
@@ -201,7 +204,7 @@ static void* mi_heap_malloc_zero_aligned_at(mi_heap_t* const heap, const size_t
201204 }
202205
203206 // fallback to generic aligned allocation
204- return mi_heap_malloc_zero_aligned_at_generic (heap , size , alignment , offset , zero );
207+ return mi_heap_malloc_zero_aligned_at_generic (heap , size , alignment , offset , zero , usable );
205208}
206209
207210
@@ -210,7 +213,7 @@ static void* mi_heap_malloc_zero_aligned_at(mi_heap_t* const heap, const size_t
210213// ------------------------------------------------------
211214
212215mi_decl_nodiscard mi_decl_restrict void * mi_heap_malloc_aligned_at (mi_heap_t * heap , size_t size , size_t alignment , size_t offset ) mi_attr_noexcept {
213- return mi_heap_malloc_zero_aligned_at (heap , size , alignment , offset , false);
216+ return mi_heap_malloc_zero_aligned_at (heap , size , alignment , offset , false, NULL );
214217}
215218
216219mi_decl_nodiscard mi_decl_restrict void * mi_heap_malloc_aligned (mi_heap_t * heap , size_t size , size_t alignment ) mi_attr_noexcept {
@@ -227,7 +230,7 @@ void* _mi_extern_heap_malloc_aligned = (void*)&mi_heap_malloc_aligned;
227230// ------------------------------------------------------
228231
229232mi_decl_nodiscard mi_decl_restrict void * mi_heap_zalloc_aligned_at (mi_heap_t * heap , size_t size , size_t alignment , size_t offset ) mi_attr_noexcept {
230- return mi_heap_malloc_zero_aligned_at (heap , size , alignment , offset , true);
233+ return mi_heap_malloc_zero_aligned_at (heap , size , alignment , offset , true, NULL );
231234}
232235
233236mi_decl_nodiscard mi_decl_restrict void * mi_heap_zalloc_aligned (mi_heap_t * heap , size_t size , size_t alignment ) mi_attr_noexcept {
@@ -252,6 +255,10 @@ mi_decl_nodiscard mi_decl_restrict void* mi_malloc_aligned(size_t size, size_t a
252255 return mi_heap_malloc_aligned (mi_prim_get_default_heap (), size , alignment );
253256}
254257
258+ mi_decl_nodiscard mi_decl_restrict void * mi_umalloc_aligned (size_t size , size_t alignment , size_t * block_size ) mi_attr_noexcept {
259+ return mi_heap_malloc_zero_aligned_at (mi_prim_get_default_heap (), size , alignment , 0 , false, block_size );
260+ }
261+
255262mi_decl_nodiscard mi_decl_restrict void * mi_zalloc_aligned_at (size_t size , size_t alignment , size_t offset ) mi_attr_noexcept {
256263 return mi_heap_zalloc_aligned_at (mi_prim_get_default_heap (), size , alignment , offset );
257264}
@@ -260,6 +267,10 @@ mi_decl_nodiscard mi_decl_restrict void* mi_zalloc_aligned(size_t size, size_t a
260267 return mi_heap_zalloc_aligned (mi_prim_get_default_heap (), size , alignment );
261268}
262269
270+ mi_decl_nodiscard mi_decl_restrict void * mi_uzalloc_aligned (size_t size , size_t alignment , size_t * block_size ) mi_attr_noexcept {
271+ return mi_heap_malloc_zero_aligned_at (mi_prim_get_default_heap (), size , alignment , 0 , true, block_size );
272+ }
273+
263274mi_decl_nodiscard mi_decl_restrict void * mi_calloc_aligned_at (size_t count , size_t size , size_t alignment , size_t offset ) mi_attr_noexcept {
264275 return mi_heap_calloc_aligned_at (mi_prim_get_default_heap (), count , size , alignment , offset );
265276}
@@ -275,8 +286,8 @@ mi_decl_nodiscard mi_decl_restrict void* mi_calloc_aligned(size_t count, size_t
275286
276287static void * mi_heap_realloc_zero_aligned_at (mi_heap_t * heap , void * p , size_t newsize , size_t alignment , size_t offset , bool zero ) mi_attr_noexcept {
277288 mi_assert (alignment > 0 );
278- if (alignment <= sizeof (uintptr_t )) return _mi_heap_realloc_zero (heap ,p ,newsize ,zero );
279- if (p == NULL ) return mi_heap_malloc_zero_aligned_at (heap ,newsize ,alignment ,offset ,zero );
289+ if (alignment <= sizeof (uintptr_t )) return _mi_heap_realloc_zero (heap ,p ,newsize ,zero , NULL , NULL );
290+ if (p == NULL ) return mi_heap_malloc_zero_aligned_at (heap ,newsize ,alignment ,offset ,zero , NULL );
280291 size_t size = mi_usable_size (p );
281292 if (newsize <= size && newsize >= (size - (size / 2 ))
282293 && (((uintptr_t )p + offset ) % alignment ) == 0 ) {
@@ -300,7 +311,7 @@ static void* mi_heap_realloc_zero_aligned_at(mi_heap_t* heap, void* p, size_t ne
300311
301312static void * mi_heap_realloc_zero_aligned (mi_heap_t * heap , void * p , size_t newsize , size_t alignment , bool zero ) mi_attr_noexcept {
302313 mi_assert (alignment > 0 );
303- if (alignment <= sizeof (uintptr_t )) return _mi_heap_realloc_zero (heap ,p ,newsize ,zero );
314+ if (alignment <= sizeof (uintptr_t )) return _mi_heap_realloc_zero (heap ,p ,newsize ,zero , NULL , NULL );
304315 size_t offset = ((uintptr_t )p % alignment ); // use offset of previous allocation (p can be NULL)
305316 return mi_heap_realloc_zero_aligned_at (heap ,p ,newsize ,alignment ,offset ,zero );
306317}
0 commit comments