1#include <assert.h>
  2#include <sys/mman.h>
  3
  4#include "alignment.h"
  5#include "arena.h"
  6
  7/**********************************************************
  8 * General purpose helper functions and macros
  9 */
 10
 11#define is_power_of_two(v) (v && !((v & (v - 1))))
 12// http://www.graphics.stanford.edu/~seander/bithacks.html#DetermineIfPowerOf2
 13
 14#define max(a, b) (((a) > (b))? (a) : (b))
 15
 16static void* alloc_mem(unsigned size)
 17{
 18    void* result = mmap(NULL, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
 19    if (result == MAP_FAILED) {
 20        perror("Arena allocation");
 21        return nullptr;
 22    }
 23    return result;
 24}
 25
 26static inline void free_mem(void* ptr, unsigned size)
 27{
 28    munmap(ptr, size);
 29}
 30
 31/**********************************************************
 32 * Arena and Region structures
 33 */
 34
 35typedef struct _Region Region;
 36
 37struct _Region {
 38    Region* next;
 39    unsigned tail;
 40    unsigned capacity;
 41    char data[1];
 42};
 43
 44#define REGION_HEADER_SIZE offsetof(Region, data)
 45
 46struct _Arena {
 47    Region* last;
 48    unsigned new_region_capacity;
 49    Region  first;  // arena embeds the first region
 50};
 51
 52#define ARENA_HEADER_SIZE (offsetof(Arena, first) + REGION_HEADER_SIZE)
 53
 54
 55/**********************************************************
 56 * Internal functions
 57 */
 58
 59static inline void free_arena(Arena* arena)
 60{
 61    free_mem(arena, arena->first.capacity + ARENA_HEADER_SIZE);
 62}
 63
 64static inline void free_region(Region* region)
 65{
 66    free_mem(region, region->capacity + REGION_HEADER_SIZE);
 67}
 68
 69static Region* create_region(unsigned capacity)
 70{
 71    unsigned mem_size = align_unsigned_to_page(capacity + REGION_HEADER_SIZE);
 72    Region* region = alloc_mem(mem_size);
 73    if (region) {
 74        region->next = nullptr;
 75        region->tail = 0;
 76        region->capacity = mem_size - REGION_HEADER_SIZE;
 77    }
 78    return region;
 79}
 80
 81static void* region_alloc(Region* region, unsigned size, unsigned alignment)
 82/*
 83 * Allocate aligned `size` bytes from `region`.
 84 * Return nullptr if `region` has no space available.
 85 */
 86{
 87    assert(size > 0);
 88    assert(alignment <= alignof(max_align_t) && is_power_of_two(alignment));
 89
 90    unsigned start = align_unsigned(region->tail + offsetof(Region, data), alignment) - offsetof(Region, data);
 91    if (start >= region->capacity) {
 92        return nullptr;
 93    }
 94
 95    unsigned bytes_available = region->capacity - start;
 96    if (size > bytes_available) {
 97        return nullptr;
 98    }
 99
100    void* result = &region->data[start];
101    region->tail = start + size;
102    return result;
103}
104
105static void* new_region_alloc(Arena* arena, unsigned size, unsigned alignment)
106/*
107 * Create new region and allocate aligned `size` bytes from it.
108 */
109{
110    Region* new_region = create_region(max(size, arena->new_region_capacity));
111    if (!new_region) {
112        return nullptr;
113    }
114    arena->last->next = new_region;
115    arena->last = new_region;
116    return region_alloc(new_region, size, alignment);
117}
118
119/**********************************************************
120 * Public API
121 */
122
123Arena* create_arena(unsigned capacity)
124{
125    unsigned mem_size = align_unsigned_to_page(capacity + ARENA_HEADER_SIZE);
126    Arena* arena = (Arena*) alloc_mem(mem_size);
127    if (arena) {
128        arena->last = &arena->first;
129
130        arena->first.next = nullptr;
131        arena->first.tail = 0;
132        arena->first.capacity = mem_size - ARENA_HEADER_SIZE;
133        arena->new_region_capacity = capacity;
134    }
135    return arena;
136}
137
138void delete_arena(Arena* arena)
139{
140    for (Region* region = arena->first.next; region != nullptr;) {
141
142        assert(region->tail <= region->capacity);
143
144        Region* next_region = region->next;
145        free_region(region);
146        region = next_region;
147    }
148
149    assert(arena->first.tail <= arena->first.capacity);
150
151    free_arena(arena);
152}
153
154void set_region_capacity(Arena* arena, unsigned capacity)
155{
156    arena->new_region_capacity = capacity;
157}
158
159void* _arena_alloc(Arena* arena, unsigned size, unsigned alignment)
160{
161    void* result = region_alloc(arena->last, size, alignment);
162    if (result) {
163        return result;
164    } else {
165        return new_region_alloc(arena, size, alignment);
166    }
167}
168
169void* _arena_fit(Arena* arena, unsigned size, unsigned alignment)
170{
171    for (Region* region = &arena->first; region != nullptr; region = region->next) {
172        void* result = region_alloc(region, size, alignment);
173        if (result) {
174            return result;
175        }
176    }
177    return new_region_alloc(arena, size, alignment);
178}
179
180void arena_print(FILE* fp, Arena* arena)
181{
182    fprintf(fp, "Arena at %p\n", (void*) arena);
183    fprintf(fp, "last region: %p\n", (void*) arena->last);
184    fprintf(fp, "new_region_capacity: %u\n", arena->new_region_capacity);
185    fprintf(fp, "first region -> next: %p\n", (void*) arena->first.next);
186    fprintf(fp, "first region -> tail: %u\n", arena->first.tail);
187    fprintf(fp, "first region -> capacity: %u\n", arena->first.capacity);
188    for (Region* region = arena->first.next; region != nullptr; region = region->next) {
189        fprintf(fp, "\nRegion %p\n", (void*) region);
190        fprintf(fp, "next region: %p\n", (void*) region->next);
191        fprintf(fp, "tail: %u\n", region->tail);
192        fprintf(fp, "capacity: %u\n", region->capacity);
193    }
194}