hardened_malloc/random.c

139 lines
3.9 KiB
C
Raw Normal View History

2018-08-22 00:53:22 +05:30
#include <errno.h>
2018-08-24 02:00:44 +05:30
#include <string.h>
2018-08-22 00:53:22 +05:30
2018-10-11 03:49:56 +05:30
#include "chacha.h"
2018-08-22 00:53:22 +05:30
#include "random.h"
#include "util.h"
#if __has_include(<sys/random.h>)
// glibc 2.25 and later
#include <sys/random.h>
#else
#include <unistd.h>
#include <sys/syscall.h>
static ssize_t getrandom(void *buf, size_t buflen, unsigned int flags) {
return syscall(SYS_getrandom, buf, buflen, flags);
}
#endif
static void get_random_seed(void *buf, size_t size) {
2019-04-15 14:22:18 +05:30
while (size) {
2018-08-22 00:53:22 +05:30
ssize_t r;
do {
r = getrandom(buf, size, 0);
} while (r == -1 && errno == EINTR);
if (r <= 0) {
fatal_error("getrandom failed");
}
buf = (char *)buf + r;
size -= r;
}
}
2018-08-24 02:00:44 +05:30
void random_state_init(struct random_state *state) {
u8 rnd[CHACHA_KEY_SIZE + CHACHA_IV_SIZE];
2018-08-26 08:30:00 +05:30
get_random_seed(rnd, sizeof(rnd));
chacha_keysetup(&state->ctx, rnd);
chacha_ivsetup(&state->ctx, rnd + CHACHA_KEY_SIZE);
state->index = RANDOM_CACHE_SIZE;
2018-08-26 08:30:00 +05:30
state->reseed = 0;
2018-08-22 00:53:22 +05:30
}
void random_state_init_from_random_state(struct random_state *state, struct random_state *source) {
u8 rnd[CHACHA_KEY_SIZE + CHACHA_IV_SIZE];
get_random_bytes(source, rnd, sizeof(rnd));
chacha_keysetup(&state->ctx, rnd);
chacha_ivsetup(&state->ctx, rnd + CHACHA_KEY_SIZE);
state->index = RANDOM_CACHE_SIZE;
state->reseed = 0;
}
2018-08-26 09:32:35 +05:30
static void refill(struct random_state *state) {
if (state->reseed >= RANDOM_RESEED_SIZE) {
2018-08-26 09:32:35 +05:30
random_state_init(state);
2018-08-24 02:00:44 +05:30
}
chacha_keystream_bytes(&state->ctx, state->cache, RANDOM_CACHE_SIZE);
state->index = 0;
state->reseed += RANDOM_CACHE_SIZE;
2018-08-22 00:53:22 +05:30
}
2019-04-15 14:22:18 +05:30
void get_random_bytes(struct random_state *state, void *buf, size_t size) {
// avoid needless copying to and from the cache as an optimization
if (size > RANDOM_CACHE_SIZE / 2) {
chacha_keystream_bytes(&state->ctx, buf, size);
return;
}
while (size) {
if (state->index == RANDOM_CACHE_SIZE) {
refill(state);
}
size_t remaining = RANDOM_CACHE_SIZE - state->index;
size_t copy_size = min(size, remaining);
memcpy(buf, state->cache + state->index, copy_size);
state->index += copy_size;
buf = (char *)buf + copy_size;
size -= copy_size;
}
}
u16 get_random_u16(struct random_state *state) {
u16 value;
unsigned remaining = RANDOM_CACHE_SIZE - state->index;
2018-08-26 09:32:35 +05:30
if (remaining < sizeof(value)) {
refill(state);
}
memcpy(&value, state->cache + state->index, sizeof(value));
state->index += sizeof(value);
return value;
2018-08-22 00:53:22 +05:30
}
// See Fast Random Integer Generation in an Interval by Daniel Lemire
u16 get_random_u16_uniform(struct random_state *state, u16 bound) {
u32 random = get_random_u16(state);
u32 multiresult = random * bound;
u16 leftover = multiresult;
if (leftover < bound) {
u16 threshold = -bound % bound;
while (leftover < threshold) {
2019-11-06 15:58:12 +05:30
random = get_random_u16(state);
multiresult = random * bound;
leftover = (u16)multiresult;
}
2018-08-22 00:53:22 +05:30
}
return multiresult >> 16;
}
u64 get_random_u64(struct random_state *state) {
u64 value;
unsigned remaining = RANDOM_CACHE_SIZE - state->index;
2018-08-26 09:32:35 +05:30
if (remaining < sizeof(value)) {
refill(state);
}
memcpy(&value, state->cache + state->index, sizeof(value));
state->index += sizeof(value);
return value;
}
// See Fast Random Integer Generation in an Interval by Daniel Lemire
u64 get_random_u64_uniform(struct random_state *state, u64 bound) {
u128 random = get_random_u64(state);
u128 multiresult = random * bound;
u64 leftover = multiresult;
if (leftover < bound) {
u64 threshold = -bound % bound;
while (leftover < threshold) {
2019-11-06 15:58:12 +05:30
random = get_random_u64(state);
multiresult = random * bound;
leftover = multiresult;
}
}
return multiresult >> 64;
2018-08-22 00:53:22 +05:30
}