initialize size class CSPRNGs from init CSPRNG

This avoids making a huge number of getrandom system calls during
initialization. The init CSPRNG is unmapped before initialization
finishes and these are still reseeded from the OS. The purpose of the
independent CSPRNGs is simply to avoid the massive performance hit of
synchronization and there's no harm in doing it this way.

Keeping around the init CSPRNG and reseeding from it would defeat the
purpose of reseeding, and it isn't a measurable performance issue since
it can just be tuned to reseed less often.
This commit is contained in:
Daniel Micay 2019-04-15 06:29:57 -04:00
parent c7e2cb82f4
commit a13db3fc68
3 changed files with 13 additions and 2 deletions

View File

@ -1093,7 +1093,7 @@ COLD static void init_slow_path(void) {
struct region_allocator *ra = ro.region_allocator; struct region_allocator *ra = ro.region_allocator;
mutex_init(&ra->lock); mutex_init(&ra->lock);
random_state_init(&ra->rng); random_state_init_from_random_state(&ra->rng, rng);
ro.regions[0] = allocator_state->regions_a; ro.regions[0] = allocator_state->regions_a;
ro.regions[1] = allocator_state->regions_b; ro.regions[1] = allocator_state->regions_b;
ra->regions = ro.regions[0]; ra->regions = ro.regions[0];
@ -1116,7 +1116,7 @@ COLD static void init_slow_path(void) {
struct size_class *c = &ro.size_class_metadata[arena][class]; struct size_class *c = &ro.size_class_metadata[arena][class];
mutex_init(&c->lock); mutex_init(&c->lock);
random_state_init(&c->rng); random_state_init_from_random_state(&c->rng, rng);
size_t bound = (REAL_CLASS_REGION_SIZE - CLASS_REGION_SIZE) / PAGE_SIZE - 1; size_t bound = (REAL_CLASS_REGION_SIZE - CLASS_REGION_SIZE) / PAGE_SIZE - 1;
size_t gap = (get_random_u64_uniform(rng, bound) + 1) * PAGE_SIZE; size_t gap = (get_random_u64_uniform(rng, bound) + 1) * PAGE_SIZE;

View File

@ -44,6 +44,16 @@ void random_state_init(struct random_state *state) {
state->reseed = 0; state->reseed = 0;
} }
void random_state_init_from_random_state(struct random_state *state, struct random_state *source) {
u8 rnd[CHACHA_KEY_SIZE + CHACHA_IV_SIZE];
get_random_bytes(source, rnd, sizeof(rnd));
chacha_keysetup(&state->ctx, rnd);
chacha_ivsetup(&state->ctx, rnd + CHACHA_KEY_SIZE);
chacha_keystream_bytes(&state->ctx, state->cache, RANDOM_CACHE_SIZE);
state->index = 0;
state->reseed = 0;
}
static void refill(struct random_state *state) { static void refill(struct random_state *state) {
if (state->reseed < RANDOM_RESEED_SIZE) { if (state->reseed < RANDOM_RESEED_SIZE) {
chacha_keystream_bytes(&state->ctx, state->cache, RANDOM_CACHE_SIZE); chacha_keystream_bytes(&state->ctx, state->cache, RANDOM_CACHE_SIZE);

View File

@ -15,6 +15,7 @@ struct random_state {
}; };
void random_state_init(struct random_state *state); void random_state_init(struct random_state *state);
void random_state_init_from_random_state(struct random_state *state, struct random_state *source);
void get_random_bytes(struct random_state *state, void *buf, size_t size); void get_random_bytes(struct random_state *state, void *buf, size_t size);
u16 get_random_u16(struct random_state *state); u16 get_random_u16(struct random_state *state);
u16 get_random_u16_uniform(struct random_state *state, u16 bound); u16 get_random_u16_uniform(struct random_state *state, u16 bound);