From cf449b3df4d1c611f168a6b47687091a14da2494 Mon Sep 17 00:00:00 2001 From: Daniel Micay Date: Tue, 18 Sep 2018 17:28:52 -0400 Subject: [PATCH] add sized deallocation support --- malloc.c | 32 ++++++++++++++++++++++++++------ malloc.h | 3 +++ 2 files changed, 29 insertions(+), 6 deletions(-) diff --git a/malloc.c b/malloc.c index ef5cdbb..a000acd 100644 --- a/malloc.c +++ b/malloc.c @@ -402,11 +402,14 @@ static void enqueue_free_slab(struct size_class *c, struct slab_metadata *metada c->free_slabs_tail = metadata; } -static inline void deallocate_small(void *p) { +static inline void deallocate_small(void *p, size_t *expected_size) { size_t class = slab_size_class(p); struct size_class *c = &size_class_metadata[class]; size_t size = size_classes[class]; + if (expected_size && size != *expected_size) { + fatal_error("sized deallocation mismatch"); + } bool is_zero_size = size == 0; if (is_zero_size) { size = 16; @@ -751,7 +754,7 @@ static void *allocate(size_t size) { return p; } -static void deallocate_large(void *p) { +static void deallocate_large(void *p, size_t *expected_size) { enforce_init(); mutex_lock(®ions_lock); @@ -760,6 +763,9 @@ static void deallocate_large(void *p) { fatal_error("invalid free"); } size_t size = region->size; + if (expected_size && size != *expected_size) { + fatal_error("sized deallocation mismatch"); + } size_t guard_size = region->guard_size; regions_delete(region); mutex_unlock(®ions_lock); @@ -894,9 +900,9 @@ EXPORT void *h_realloc(void *old, size_t size) { } memcpy(new, old, copy_size); if (old_size <= max_slab_size_class) { - deallocate_small(old); + deallocate_small(old, NULL); } else { - deallocate_large(old); + deallocate_large(old, NULL); } return new; } @@ -987,15 +993,29 @@ EXPORT void h_free(void *p) { } if (p >= ro.slab_region_start && p < ro.slab_region_end) { - deallocate_small(p); + deallocate_small(p, NULL); return; } - deallocate_large(p); + deallocate_large(p, NULL); } EXPORT void h_cfree(void *ptr) ALIAS(h_free); +EXPORT void h_free_sized(void *p, size_t expected_size) { + if (p == NULL) { + return; + } + + if (p >= ro.slab_region_start && p < ro.slab_region_end) { + expected_size = get_size_info(adjust_size_for_canaries(expected_size)).size; + deallocate_small(p, &expected_size); + return; + } + + deallocate_large(p, &expected_size); +} + EXPORT size_t h_malloc_usable_size(void *p) { if (p == NULL) { return 0; diff --git a/malloc.h b/malloc.h index 9073c29..402f95b 100644 --- a/malloc.h +++ b/malloc.h @@ -34,6 +34,7 @@ #define h_malloc_object_size malloc_object_size #define h_malloc_object_size_fast malloc_object_size_fast +#define h_free_sized free_sized #endif // C standard @@ -84,4 +85,6 @@ size_t h_malloc_object_size(void *ptr); // similar to malloc_object_size, but avoiding locking so the results are much more limited size_t h_malloc_object_size_fast(void *ptr); +void h_free_sized(void *ptr, size_t expected_size); + #endif