micro-optimize initialization with arenas

This commit is contained in:
Daniel Micay 2019-04-10 08:06:56 -04:00
parent 62c73d8b41
commit d5f18c47b3
1 changed files with 9 additions and 6 deletions

View File

@ -468,12 +468,6 @@ static inline void *allocate_small(size_t requested_size) {
struct size_info info = get_size_info(requested_size);
size_t size = info.size ? info.size : 16;
#if N_ARENA > 1
if (unlikely(thread_arena >= N_ARENA)) {
thread_arena = thread_arena_counter++ % N_ARENA;
}
#endif
struct size_class *c = &ro.size_class_metadata[thread_arena][info.class];
size_t slots = size_class_slots[info.class];
size_t slab_size = get_slab_size(slots, size);
@ -1136,9 +1130,18 @@ COLD static void init_slow_path(void) {
}
static inline void init(void) {
#if N_ARENA > 1
if (unlikely(thread_arena >= N_ARENA)) {
thread_arena = thread_arena_counter++ % N_ARENA;
if (unlikely(!is_init())) {
init_slow_path();
}
}
#else
if (unlikely(!is_init())) {
init_slow_path();
}
#endif
}
// trigger early initialization to set up pthread_atfork and protect state as soon as possible