treat zero size malloc as unlikely

Calls to malloc with a zero size are extremely rare relative to normal
usage of the API. It's generally only done by inefficient C code with
open coded dynamic array implementations where they aren't handling zero
size as a special case for their usage of malloc/realloc. Efficient code
wouldn't be making these allocations. It doesn't make sense to optimize
for the performance of rare edge cases caused by inefficient code.
This commit is contained in:
Daniel Micay 2022-01-21 18:21:38 -05:00
parent ae2524bf88
commit 3cffc1e1af

View File

@ -190,7 +190,7 @@ struct size_info {
}; };
static inline struct size_info get_size_info(size_t size) { static inline struct size_info get_size_info(size_t size) {
if (size == 0) { if (unlikely(size == 0)) {
return (struct size_info){0, 0}; return (struct size_info){0, 0};
} }
// size <= 64 is needed for correctness and raising it to size <= 128 is an optimization // size <= 64 is needed for correctness and raising it to size <= 128 is an optimization
@ -510,7 +510,7 @@ static inline void stats_slab_deallocate(UNUSED struct size_class *c, UNUSED siz
static inline void *allocate_small(unsigned arena, size_t requested_size) { static inline void *allocate_small(unsigned arena, size_t requested_size) {
struct size_info info = get_size_info(requested_size); struct size_info info = get_size_info(requested_size);
size_t size = info.size ? info.size : 16; size_t size = likely(info.size) ? info.size : 16;
struct size_class *c = &ro.size_class_metadata[arena][info.class]; struct size_class *c = &ro.size_class_metadata[arena][info.class];
size_t slots = get_slots(info.class); size_t slots = get_slots(info.class);
@ -670,7 +670,7 @@ static inline void deallocate_small(void *p, const size_t *expected_size) {
fatal_error("sized deallocation mismatch (small)"); fatal_error("sized deallocation mismatch (small)");
} }
bool is_zero_size = size == 0; bool is_zero_size = size == 0;
if (is_zero_size) { if (unlikely(is_zero_size)) {
size = 16; size = 16;
} }
size_t slots = get_slots(class); size_t slots = get_slots(class);
@ -692,7 +692,7 @@ static inline void deallocate_small(void *p, const size_t *expected_size) {
fatal_error("double free"); fatal_error("double free");
} }
if (!is_zero_size) { if (likely(!is_zero_size)) {
check_canary(metadata, p, size); check_canary(metadata, p, size);
if (ZERO_ON_FREE) { if (ZERO_ON_FREE) {
@ -1581,7 +1581,7 @@ static inline void memory_corruption_check_small(const void *p) {
struct size_class *c = &ro.size_class_metadata[size_class_info.arena][class]; struct size_class *c = &ro.size_class_metadata[size_class_info.arena][class];
size_t size = size_classes[class]; size_t size = size_classes[class];
bool is_zero_size = size == 0; bool is_zero_size = size == 0;
if (is_zero_size) { if (unlikely(is_zero_size)) {
size = 16; size = 16;
} }
size_t slab_size = get_slab_size(get_slots(class), size); size_t slab_size = get_slab_size(get_slots(class), size);
@ -1600,7 +1600,7 @@ static inline void memory_corruption_check_small(const void *p) {
fatal_error("invalid malloc_usable_size"); fatal_error("invalid malloc_usable_size");
} }
if (!is_zero_size) { if (likely(!is_zero_size)) {
check_canary(metadata, p, size); check_canary(metadata, p, size);
} }