2018-09-02 11:33:27 +05:30
|
|
|
#include <errno.h>
|
|
|
|
|
|
|
|
#include "memory.h"
|
|
|
|
#include "pages.h"
|
|
|
|
#include "util.h"
|
|
|
|
|
2018-10-15 03:50:18 +05:30
|
|
|
static uintptr_t alignment_ceiling(uintptr_t s, uintptr_t alignment) {
|
|
|
|
return ((s) + (alignment - 1)) & ((~alignment) + 1);
|
|
|
|
}
|
2018-09-02 11:33:27 +05:30
|
|
|
|
2019-03-23 08:47:26 +05:30
|
|
|
void *allocate_pages(size_t usable_size, size_t guard_size, bool unprotect, const char *name) {
|
2018-09-02 11:33:27 +05:30
|
|
|
size_t real_size;
|
|
|
|
if (unlikely(__builtin_add_overflow(usable_size, guard_size * 2, &real_size))) {
|
|
|
|
errno = ENOMEM;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
void *real = memory_map(real_size);
|
2018-09-02 11:38:44 +05:30
|
|
|
if (unlikely(real == NULL)) {
|
2018-09-02 11:33:27 +05:30
|
|
|
return NULL;
|
|
|
|
}
|
2019-03-23 08:47:26 +05:30
|
|
|
memory_set_name(real, real_size, name);
|
2018-09-02 11:33:27 +05:30
|
|
|
void *usable = (char *)real + guard_size;
|
2018-09-02 11:38:44 +05:30
|
|
|
if (unprotect && unlikely(memory_protect_rw(usable, usable_size))) {
|
2018-09-02 11:33:27 +05:30
|
|
|
memory_unmap(real, real_size);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
return usable;
|
|
|
|
}
|
|
|
|
|
|
|
|
void deallocate_pages(void *usable, size_t usable_size, size_t guard_size) {
|
|
|
|
memory_unmap((char *)usable - guard_size, usable_size + guard_size * 2);
|
|
|
|
}
|
|
|
|
|
|
|
|
void *allocate_pages_aligned(size_t usable_size, size_t alignment, size_t guard_size) {
|
|
|
|
usable_size = PAGE_CEILING(usable_size);
|
|
|
|
if (unlikely(!usable_size)) {
|
|
|
|
errno = ENOMEM;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t alloc_size;
|
|
|
|
if (unlikely(__builtin_add_overflow(usable_size, alignment - PAGE_SIZE, &alloc_size))) {
|
|
|
|
errno = ENOMEM;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t real_alloc_size;
|
|
|
|
if (unlikely(__builtin_add_overflow(alloc_size, guard_size * 2, &real_alloc_size))) {
|
|
|
|
errno = ENOMEM;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
void *real = memory_map(real_alloc_size);
|
2018-09-02 11:38:44 +05:30
|
|
|
if (unlikely(real == NULL)) {
|
2018-09-02 11:33:27 +05:30
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
void *usable = (char *)real + guard_size;
|
|
|
|
|
2018-10-15 03:50:18 +05:30
|
|
|
size_t lead_size = alignment_ceiling((uintptr_t)usable, alignment) - (uintptr_t)usable;
|
2018-09-02 11:33:27 +05:30
|
|
|
size_t trail_size = alloc_size - lead_size - usable_size;
|
|
|
|
void *base = (char *)usable + lead_size;
|
|
|
|
|
2018-09-02 11:38:44 +05:30
|
|
|
if (unlikely(memory_protect_rw(base, usable_size))) {
|
2018-09-02 11:33:27 +05:30
|
|
|
memory_unmap(real, real_alloc_size);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (lead_size) {
|
2018-09-02 11:38:44 +05:30
|
|
|
if (unlikely(memory_unmap(real, lead_size))) {
|
2018-09-02 11:33:27 +05:30
|
|
|
memory_unmap(real, real_alloc_size);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (trail_size) {
|
2018-09-02 11:38:44 +05:30
|
|
|
if (unlikely(memory_unmap((char *)base + usable_size + guard_size, trail_size))) {
|
2018-09-02 11:33:27 +05:30
|
|
|
memory_unmap(real, real_alloc_size);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return base;
|
|
|
|
}
|