2017-12-23 19:08:53 -08:00
|
|
|
const std = @import("index.zig");
|
|
|
|
const debug = std.debug;
|
2017-10-31 01:47:55 -07:00
|
|
|
const assert = debug.assert;
|
2017-12-23 19:08:53 -08:00
|
|
|
const mem = std.mem;
|
|
|
|
const os = std.os;
|
2017-10-31 01:47:55 -07:00
|
|
|
const builtin = @import("builtin");
|
|
|
|
const Os = builtin.Os;
|
2017-12-23 19:08:53 -08:00
|
|
|
const c = std.c;
|
2018-10-26 11:59:58 -07:00
|
|
|
const maxInt = std.math.maxInt;
|
2017-10-31 01:47:55 -07:00
|
|
|
|
|
|
|
const Allocator = mem.Allocator;
|
|
|
|
|
2017-12-22 21:29:39 -08:00
|
|
|
pub const c_allocator = &c_allocator_state;
|
2018-11-13 05:08:37 -08:00
|
|
|
var c_allocator_state = Allocator{
|
2017-10-31 01:47:55 -07:00
|
|
|
.allocFn = cAlloc,
|
|
|
|
.reallocFn = cRealloc,
|
|
|
|
.freeFn = cFree,
|
|
|
|
};
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn cAlloc(self: *Allocator, n: usize, alignment: u29) ![]u8 {
|
2018-02-12 00:15:12 -08:00
|
|
|
assert(alignment <= @alignOf(c_longdouble));
|
2018-06-03 22:09:15 -07:00
|
|
|
return if (c.malloc(n)) |buf| @ptrCast([*]u8, buf)[0..n] else error.OutOfMemory;
|
2017-10-31 01:47:55 -07:00
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn cRealloc(self: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
2018-06-05 15:03:21 -07:00
|
|
|
const old_ptr = @ptrCast(*c_void, old_mem.ptr);
|
2017-12-22 21:29:39 -08:00
|
|
|
if (c.realloc(old_ptr, new_size)) |buf| {
|
2018-06-04 19:11:14 -07:00
|
|
|
return @ptrCast([*]u8, buf)[0..new_size];
|
2017-12-22 21:29:39 -08:00
|
|
|
} else if (new_size <= old_mem.len) {
|
2017-12-21 21:50:30 -08:00
|
|
|
return old_mem[0..new_size];
|
2017-10-31 01:47:55 -07:00
|
|
|
} else {
|
2017-12-22 21:29:39 -08:00
|
|
|
return error.OutOfMemory;
|
2017-10-31 01:47:55 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn cFree(self: *Allocator, old_mem: []u8) void {
|
2018-06-05 15:03:21 -07:00
|
|
|
const old_ptr = @ptrCast(*c_void, old_mem.ptr);
|
2017-10-31 01:47:55 -07:00
|
|
|
c.free(old_ptr);
|
|
|
|
}
|
|
|
|
|
2018-02-11 23:14:44 -08:00
|
|
|
/// This allocator makes a syscall directly for every allocation and free.
|
2018-07-05 12:09:02 -07:00
|
|
|
/// Thread-safe and lock-free.
|
2018-11-13 05:08:37 -08:00
|
|
|
pub const DirectAllocator = struct {
|
2018-02-11 23:14:44 -08:00
|
|
|
allocator: Allocator,
|
|
|
|
heap_handle: ?HeapHandle,
|
|
|
|
|
|
|
|
const HeapHandle = if (builtin.os == Os.windows) os.windows.HANDLE else void;
|
|
|
|
|
|
|
|
pub fn init() DirectAllocator {
|
2018-11-13 05:08:37 -08:00
|
|
|
return DirectAllocator{
|
|
|
|
.allocator = Allocator{
|
2018-02-11 23:14:44 -08:00
|
|
|
.allocFn = alloc,
|
|
|
|
.reallocFn = realloc,
|
|
|
|
.freeFn = free,
|
|
|
|
},
|
|
|
|
.heap_handle = if (builtin.os == Os.windows) null else {},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
pub fn deinit(self: *DirectAllocator) void {
|
2018-02-11 23:14:44 -08:00
|
|
|
switch (builtin.os) {
|
|
|
|
Os.windows => if (self.heap_handle) |heap_handle| {
|
|
|
|
_ = os.windows.HeapDestroy(heap_handle);
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-12 17:19:46 -08:00
|
|
|
fn alloc(allocator: *Allocator, n: usize, alignment: u29) error{OutOfMemory}![]u8 {
|
2018-02-11 23:14:44 -08:00
|
|
|
const self = @fieldParentPtr(DirectAllocator, "allocator", allocator);
|
2017-10-31 01:47:55 -07:00
|
|
|
|
|
|
|
switch (builtin.os) {
|
2018-10-17 08:01:00 -07:00
|
|
|
Os.linux, Os.macosx, Os.ios, Os.freebsd => {
|
2017-10-31 01:47:55 -07:00
|
|
|
const p = os.posix;
|
2018-04-30 22:53:04 -07:00
|
|
|
const alloc_size = if (alignment <= os.page_size) n else n + alignment;
|
|
|
|
const addr = p.mmap(null, alloc_size, p.PROT_READ | p.PROT_WRITE, p.MAP_PRIVATE | p.MAP_ANONYMOUS, -1, 0);
|
|
|
|
if (addr == p.MAP_FAILED) return error.OutOfMemory;
|
2018-06-03 22:09:15 -07:00
|
|
|
if (alloc_size == n) return @intToPtr([*]u8, addr)[0..n];
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-07-05 12:09:02 -07:00
|
|
|
const aligned_addr = (addr & ~usize(alignment - 1)) + alignment;
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-07-05 12:09:02 -07:00
|
|
|
// We can unmap the unused portions of our mmap, but we must only
|
|
|
|
// pass munmap bytes that exist outside our allocated pages or it
|
|
|
|
// will happily eat us too.
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-07-05 12:09:02 -07:00
|
|
|
// Since alignment > page_size, we are by definition on a page boundary.
|
2018-04-21 18:41:49 -07:00
|
|
|
const unused_start = addr;
|
|
|
|
const unused_len = aligned_addr - 1 - unused_start;
|
|
|
|
|
2018-07-05 12:09:02 -07:00
|
|
|
const err = p.munmap(unused_start, unused_len);
|
|
|
|
assert(p.getErrno(err) == 0);
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-07-05 12:09:02 -07:00
|
|
|
// It is impossible that there is an unoccupied page at the top of our
|
|
|
|
// mmap.
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-06-03 22:09:15 -07:00
|
|
|
return @intToPtr([*]u8, aligned_addr)[0..n];
|
2018-02-11 23:14:44 -08:00
|
|
|
},
|
|
|
|
Os.windows => {
|
|
|
|
const amt = n + alignment + @sizeOf(usize);
|
2018-07-09 10:19:11 -07:00
|
|
|
const optional_heap_handle = @atomicLoad(?HeapHandle, &self.heap_handle, builtin.AtomicOrder.SeqCst);
|
2018-07-05 12:09:02 -07:00
|
|
|
const heap_handle = optional_heap_handle orelse blk: {
|
2018-07-09 13:49:46 -07:00
|
|
|
const hh = os.windows.HeapCreate(0, amt, 0) orelse return error.OutOfMemory;
|
2018-07-05 12:09:02 -07:00
|
|
|
const other_hh = @cmpxchgStrong(?HeapHandle, &self.heap_handle, null, hh, builtin.AtomicOrder.SeqCst, builtin.AtomicOrder.SeqCst) orelse break :blk hh;
|
|
|
|
_ = os.windows.HeapDestroy(hh);
|
2018-07-09 10:19:11 -07:00
|
|
|
break :blk other_hh.?; // can't be null because of the cmpxchg
|
2017-10-31 01:47:55 -07:00
|
|
|
};
|
2018-06-09 22:13:51 -07:00
|
|
|
const ptr = os.windows.HeapAlloc(heap_handle, 0, amt) orelse return error.OutOfMemory;
|
2018-02-11 23:14:44 -08:00
|
|
|
const root_addr = @ptrToInt(ptr);
|
2019-02-06 10:48:04 -08:00
|
|
|
const adjusted_addr = mem.alignForward(root_addr, alignment);
|
2018-02-11 23:14:44 -08:00
|
|
|
const record_addr = adjusted_addr + n;
|
2018-05-31 07:56:59 -07:00
|
|
|
@intToPtr(*align(1) usize, record_addr).* = root_addr;
|
2018-06-03 22:09:15 -07:00
|
|
|
return @intToPtr([*]u8, adjusted_addr)[0..n];
|
2018-02-11 23:14:44 -08:00
|
|
|
},
|
|
|
|
else => @compileError("Unsupported OS"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
2018-02-11 23:14:44 -08:00
|
|
|
const self = @fieldParentPtr(DirectAllocator, "allocator", allocator);
|
|
|
|
|
|
|
|
switch (builtin.os) {
|
2018-10-17 08:01:00 -07:00
|
|
|
Os.linux, Os.macosx, Os.ios, Os.freebsd => {
|
2018-02-11 23:14:44 -08:00
|
|
|
if (new_size <= old_mem.len) {
|
|
|
|
const base_addr = @ptrToInt(old_mem.ptr);
|
|
|
|
const old_addr_end = base_addr + old_mem.len;
|
|
|
|
const new_addr_end = base_addr + new_size;
|
2019-02-06 10:48:04 -08:00
|
|
|
const new_addr_end_rounded = mem.alignForward(new_addr_end, os.page_size);
|
2018-02-11 23:14:44 -08:00
|
|
|
if (old_addr_end > new_addr_end_rounded) {
|
2018-04-28 23:52:04 -07:00
|
|
|
_ = os.posix.munmap(new_addr_end_rounded, old_addr_end - new_addr_end_rounded);
|
2018-02-11 23:14:44 -08:00
|
|
|
}
|
|
|
|
return old_mem[0..new_size];
|
|
|
|
}
|
|
|
|
|
|
|
|
const result = try alloc(allocator, new_size, alignment);
|
|
|
|
mem.copy(u8, result, old_mem);
|
|
|
|
return result;
|
2017-10-31 01:47:55 -07:00
|
|
|
},
|
|
|
|
Os.windows => {
|
2018-02-11 23:14:44 -08:00
|
|
|
const old_adjusted_addr = @ptrToInt(old_mem.ptr);
|
|
|
|
const old_record_addr = old_adjusted_addr + old_mem.len;
|
2018-05-31 07:56:59 -07:00
|
|
|
const root_addr = @intToPtr(*align(1) usize, old_record_addr).*;
|
2018-06-05 15:03:21 -07:00
|
|
|
const old_ptr = @intToPtr(*c_void, root_addr);
|
2018-02-11 23:14:44 -08:00
|
|
|
const amt = new_size + alignment + @sizeOf(usize);
|
2018-06-09 22:13:51 -07:00
|
|
|
const new_ptr = os.windows.HeapReAlloc(self.heap_handle.?, 0, old_ptr, amt) orelse blk: {
|
2018-02-11 23:14:44 -08:00
|
|
|
if (new_size > old_mem.len) return error.OutOfMemory;
|
|
|
|
const new_record_addr = old_record_addr - new_size + old_mem.len;
|
2018-05-31 07:56:59 -07:00
|
|
|
@intToPtr(*align(1) usize, new_record_addr).* = root_addr;
|
2018-02-11 23:14:44 -08:00
|
|
|
return old_mem[0..new_size];
|
2017-10-31 01:47:55 -07:00
|
|
|
};
|
2018-02-11 23:14:44 -08:00
|
|
|
const offset = old_adjusted_addr - root_addr;
|
|
|
|
const new_root_addr = @ptrToInt(new_ptr);
|
|
|
|
const new_adjusted_addr = new_root_addr + offset;
|
|
|
|
assert(new_adjusted_addr % alignment == 0);
|
|
|
|
const new_record_addr = new_adjusted_addr + new_size;
|
2018-05-31 07:56:59 -07:00
|
|
|
@intToPtr(*align(1) usize, new_record_addr).* = new_root_addr;
|
2018-06-03 22:09:15 -07:00
|
|
|
return @intToPtr([*]u8, new_adjusted_addr)[0..new_size];
|
2017-10-31 01:47:55 -07:00
|
|
|
},
|
|
|
|
else => @compileError("Unsupported OS"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn free(allocator: *Allocator, bytes: []u8) void {
|
2018-02-11 23:14:44 -08:00
|
|
|
const self = @fieldParentPtr(DirectAllocator, "allocator", allocator);
|
|
|
|
|
2017-10-31 01:47:55 -07:00
|
|
|
switch (builtin.os) {
|
2018-10-17 08:01:00 -07:00
|
|
|
Os.linux, Os.macosx, Os.ios, Os.freebsd => {
|
2018-04-28 23:52:04 -07:00
|
|
|
_ = os.posix.munmap(@ptrToInt(bytes.ptr), bytes.len);
|
2017-10-31 01:47:55 -07:00
|
|
|
},
|
|
|
|
Os.windows => {
|
2018-02-11 23:14:44 -08:00
|
|
|
const record_addr = @ptrToInt(bytes.ptr) + bytes.len;
|
2018-05-31 07:56:59 -07:00
|
|
|
const root_addr = @intToPtr(*align(1) usize, record_addr).*;
|
2018-06-05 15:03:21 -07:00
|
|
|
const ptr = @intToPtr(*c_void, root_addr);
|
2018-06-09 20:42:14 -07:00
|
|
|
_ = os.windows.HeapFree(self.heap_handle.?, 0, ptr);
|
2017-10-31 01:47:55 -07:00
|
|
|
},
|
|
|
|
else => @compileError("Unsupported OS"),
|
|
|
|
}
|
|
|
|
}
|
2018-02-11 23:14:44 -08:00
|
|
|
};
|
2017-10-31 01:47:55 -07:00
|
|
|
|
2018-02-11 23:14:44 -08:00
|
|
|
/// This allocator takes an existing allocator, wraps it, and provides an interface
|
|
|
|
/// where you can allocate without freeing, and then free it all together.
|
2018-11-13 05:08:37 -08:00
|
|
|
pub const ArenaAllocator = struct {
|
2018-02-11 23:14:44 -08:00
|
|
|
pub allocator: Allocator,
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
child_allocator: *Allocator,
|
2018-02-11 23:14:44 -08:00
|
|
|
buffer_list: std.LinkedList([]u8),
|
|
|
|
end_index: usize,
|
|
|
|
|
|
|
|
const BufNode = std.LinkedList([]u8).Node;
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
pub fn init(child_allocator: *Allocator) ArenaAllocator {
|
2018-11-13 05:08:37 -08:00
|
|
|
return ArenaAllocator{
|
|
|
|
.allocator = Allocator{
|
2018-02-11 23:14:44 -08:00
|
|
|
.allocFn = alloc,
|
|
|
|
.reallocFn = realloc,
|
|
|
|
.freeFn = free,
|
|
|
|
},
|
|
|
|
.child_allocator = child_allocator,
|
|
|
|
.buffer_list = std.LinkedList([]u8).init(),
|
|
|
|
.end_index = 0,
|
|
|
|
};
|
2017-10-31 01:47:55 -07:00
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
pub fn deinit(self: *ArenaAllocator) void {
|
2018-02-11 23:14:44 -08:00
|
|
|
var it = self.buffer_list.first;
|
|
|
|
while (it) |node| {
|
|
|
|
// this has to occur before the free because the free frees node
|
|
|
|
it = node.next;
|
|
|
|
|
|
|
|
self.child_allocator.free(node.data);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn createNode(self: *ArenaAllocator, prev_len: usize, minimum_size: usize) !*BufNode {
|
2018-02-11 23:14:44 -08:00
|
|
|
const actual_min_size = minimum_size + @sizeOf(BufNode);
|
|
|
|
var len = prev_len;
|
|
|
|
while (true) {
|
|
|
|
len += len / 2;
|
|
|
|
len += os.page_size - @rem(len, os.page_size);
|
|
|
|
if (len >= actual_min_size) break;
|
|
|
|
}
|
|
|
|
const buf = try self.child_allocator.alignedAlloc(u8, @alignOf(BufNode), len);
|
2018-06-18 14:25:29 -07:00
|
|
|
const buf_node_slice = @bytesToSlice(BufNode, buf[0..@sizeOf(BufNode)]);
|
2018-02-11 23:14:44 -08:00
|
|
|
const buf_node = &buf_node_slice[0];
|
2018-11-13 05:08:37 -08:00
|
|
|
buf_node.* = BufNode{
|
2018-02-11 23:14:44 -08:00
|
|
|
.data = buf,
|
|
|
|
.prev = null,
|
|
|
|
.next = null,
|
|
|
|
};
|
|
|
|
self.buffer_list.append(buf_node);
|
|
|
|
self.end_index = 0;
|
|
|
|
return buf_node;
|
2017-10-31 01:47:55 -07:00
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
2018-02-11 23:14:44 -08:00
|
|
|
const self = @fieldParentPtr(ArenaAllocator, "allocator", allocator);
|
|
|
|
|
|
|
|
var cur_node = if (self.buffer_list.last) |last_node| last_node else try self.createNode(0, n + alignment);
|
|
|
|
while (true) {
|
|
|
|
const cur_buf = cur_node.data[@sizeOf(BufNode)..];
|
2018-02-12 00:21:18 -08:00
|
|
|
const addr = @ptrToInt(cur_buf.ptr) + self.end_index;
|
2018-02-11 23:14:44 -08:00
|
|
|
const rem = @rem(addr, alignment);
|
|
|
|
const march_forward_bytes = if (rem == 0) 0 else (alignment - rem);
|
|
|
|
const adjusted_index = self.end_index + march_forward_bytes;
|
|
|
|
const new_end_index = adjusted_index + n;
|
|
|
|
if (new_end_index > cur_buf.len) {
|
|
|
|
cur_node = try self.createNode(cur_buf.len, n + alignment);
|
|
|
|
continue;
|
|
|
|
}
|
2018-04-30 22:53:04 -07:00
|
|
|
const result = cur_buf[adjusted_index..new_end_index];
|
2018-02-11 23:14:44 -08:00
|
|
|
self.end_index = new_end_index;
|
|
|
|
return result;
|
2017-10-31 01:47:55 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
2017-10-31 01:47:55 -07:00
|
|
|
if (new_size <= old_mem.len) {
|
|
|
|
return old_mem[0..new_size];
|
|
|
|
} else {
|
2018-01-07 13:51:46 -08:00
|
|
|
const result = try alloc(allocator, new_size, alignment);
|
2017-10-31 01:47:55 -07:00
|
|
|
mem.copy(u8, result, old_mem);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn free(allocator: *Allocator, bytes: []u8) void {}
|
2017-10-31 01:47:55 -07:00
|
|
|
};
|
|
|
|
|
2018-11-13 05:08:37 -08:00
|
|
|
pub const FixedBufferAllocator = struct {
|
2018-02-11 23:27:02 -08:00
|
|
|
allocator: Allocator,
|
|
|
|
end_index: usize,
|
|
|
|
buffer: []u8,
|
2018-02-11 23:14:44 -08:00
|
|
|
|
2018-02-11 23:27:02 -08:00
|
|
|
pub fn init(buffer: []u8) FixedBufferAllocator {
|
2018-11-13 05:08:37 -08:00
|
|
|
return FixedBufferAllocator{
|
|
|
|
.allocator = Allocator{
|
2018-02-11 23:27:02 -08:00
|
|
|
.allocFn = alloc,
|
|
|
|
.reallocFn = realloc,
|
|
|
|
.freeFn = free,
|
|
|
|
},
|
|
|
|
.buffer = buffer,
|
|
|
|
.end_index = 0,
|
|
|
|
};
|
2017-11-05 09:27:56 -08:00
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
2018-02-11 23:27:02 -08:00
|
|
|
const self = @fieldParentPtr(FixedBufferAllocator, "allocator", allocator);
|
2018-04-28 14:53:06 -07:00
|
|
|
const addr = @ptrToInt(self.buffer.ptr) + self.end_index;
|
2018-02-11 23:27:02 -08:00
|
|
|
const rem = @rem(addr, alignment);
|
|
|
|
const march_forward_bytes = if (rem == 0) 0 else (alignment - rem);
|
|
|
|
const adjusted_index = self.end_index + march_forward_bytes;
|
|
|
|
const new_end_index = adjusted_index + n;
|
|
|
|
if (new_end_index > self.buffer.len) {
|
|
|
|
return error.OutOfMemory;
|
|
|
|
}
|
2018-04-30 22:53:04 -07:00
|
|
|
const result = self.buffer[adjusted_index..new_end_index];
|
2018-02-11 23:27:02 -08:00
|
|
|
self.end_index = new_end_index;
|
2017-10-31 01:47:55 -07:00
|
|
|
|
2018-02-11 23:27:02 -08:00
|
|
|
return result;
|
|
|
|
}
|
2017-10-31 01:47:55 -07:00
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
2018-07-14 13:31:11 -07:00
|
|
|
const self = @fieldParentPtr(FixedBufferAllocator, "allocator", allocator);
|
|
|
|
assert(old_mem.len <= self.end_index);
|
2018-02-11 23:27:02 -08:00
|
|
|
if (new_size <= old_mem.len) {
|
|
|
|
return old_mem[0..new_size];
|
2018-07-14 13:31:11 -07:00
|
|
|
} else if (old_mem.ptr == self.buffer.ptr + self.end_index - old_mem.len) {
|
|
|
|
const start_index = self.end_index - old_mem.len;
|
|
|
|
const new_end_index = start_index + new_size;
|
|
|
|
if (new_end_index > self.buffer.len) return error.OutOfMemory;
|
|
|
|
const result = self.buffer[start_index..new_end_index];
|
|
|
|
self.end_index = new_end_index;
|
|
|
|
return result;
|
2018-02-11 23:27:02 -08:00
|
|
|
} else {
|
|
|
|
const result = try alloc(allocator, new_size, alignment);
|
|
|
|
mem.copy(u8, result, old_mem);
|
|
|
|
return result;
|
|
|
|
}
|
2017-10-31 01:47:55 -07:00
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn free(allocator: *Allocator, bytes: []u8) void {}
|
2018-02-11 23:27:02 -08:00
|
|
|
};
|
|
|
|
|
2019-02-07 12:28:37 -08:00
|
|
|
pub const ThreadSafeFixedBufferAllocator = blk: {
|
|
|
|
if (builtin.single_threaded) {
|
|
|
|
break :blk FixedBufferAllocator;
|
|
|
|
} else {
|
|
|
|
/// lock free
|
|
|
|
break :blk struct {
|
|
|
|
allocator: Allocator,
|
|
|
|
end_index: usize,
|
|
|
|
buffer: []u8,
|
|
|
|
|
|
|
|
pub fn init(buffer: []u8) ThreadSafeFixedBufferAllocator {
|
|
|
|
return ThreadSafeFixedBufferAllocator{
|
|
|
|
.allocator = Allocator{
|
|
|
|
.allocFn = alloc,
|
|
|
|
.reallocFn = realloc,
|
|
|
|
.freeFn = free,
|
|
|
|
},
|
|
|
|
.buffer = buffer,
|
|
|
|
.end_index = 0,
|
|
|
|
};
|
|
|
|
}
|
2018-04-28 14:53:06 -07:00
|
|
|
|
2019-02-07 12:28:37 -08:00
|
|
|
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
|
|
|
const self = @fieldParentPtr(ThreadSafeFixedBufferAllocator, "allocator", allocator);
|
|
|
|
var end_index = @atomicLoad(usize, &self.end_index, builtin.AtomicOrder.SeqCst);
|
|
|
|
while (true) {
|
|
|
|
const addr = @ptrToInt(self.buffer.ptr) + end_index;
|
|
|
|
const rem = @rem(addr, alignment);
|
|
|
|
const march_forward_bytes = if (rem == 0) 0 else (alignment - rem);
|
|
|
|
const adjusted_index = end_index + march_forward_bytes;
|
|
|
|
const new_end_index = adjusted_index + n;
|
|
|
|
if (new_end_index > self.buffer.len) {
|
|
|
|
return error.OutOfMemory;
|
|
|
|
}
|
|
|
|
end_index = @cmpxchgWeak(usize, &self.end_index, end_index, new_end_index, builtin.AtomicOrder.SeqCst, builtin.AtomicOrder.SeqCst) orelse return self.buffer[adjusted_index..new_end_index];
|
|
|
|
}
|
|
|
|
}
|
2018-04-28 14:53:06 -07:00
|
|
|
|
2019-02-07 12:28:37 -08:00
|
|
|
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
|
|
|
if (new_size <= old_mem.len) {
|
|
|
|
return old_mem[0..new_size];
|
|
|
|
} else {
|
|
|
|
const result = try alloc(allocator, new_size, alignment);
|
|
|
|
mem.copy(u8, result, old_mem);
|
|
|
|
return result;
|
|
|
|
}
|
2018-04-28 14:53:06 -07:00
|
|
|
}
|
|
|
|
|
2019-02-07 12:28:37 -08:00
|
|
|
fn free(allocator: *Allocator, bytes: []u8) void {}
|
|
|
|
};
|
2018-04-28 14:53:06 -07:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-07-06 22:23:18 -07:00
|
|
|
pub fn stackFallback(comptime size: usize, fallback_allocator: *Allocator) StackFallbackAllocator(size) {
|
2018-11-13 05:08:37 -08:00
|
|
|
return StackFallbackAllocator(size){
|
2018-07-06 22:23:18 -07:00
|
|
|
.buffer = undefined,
|
|
|
|
.fallback_allocator = fallback_allocator,
|
|
|
|
.fixed_buffer_allocator = undefined,
|
2018-11-13 05:08:37 -08:00
|
|
|
.allocator = Allocator{
|
2018-07-06 22:23:18 -07:00
|
|
|
.allocFn = StackFallbackAllocator(size).alloc,
|
|
|
|
.reallocFn = StackFallbackAllocator(size).realloc,
|
|
|
|
.freeFn = StackFallbackAllocator(size).free,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn StackFallbackAllocator(comptime size: usize) type {
|
2018-11-13 05:08:37 -08:00
|
|
|
return struct {
|
2018-09-13 13:34:33 -07:00
|
|
|
const Self = @This();
|
2018-07-06 22:23:18 -07:00
|
|
|
|
|
|
|
buffer: [size]u8,
|
|
|
|
allocator: Allocator,
|
|
|
|
fallback_allocator: *Allocator,
|
|
|
|
fixed_buffer_allocator: FixedBufferAllocator,
|
|
|
|
|
|
|
|
pub fn get(self: *Self) *Allocator {
|
|
|
|
self.fixed_buffer_allocator = FixedBufferAllocator.init(self.buffer[0..]);
|
|
|
|
return &self.allocator;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn alloc(allocator: *Allocator, n: usize, alignment: u29) ![]u8 {
|
|
|
|
const self = @fieldParentPtr(Self, "allocator", allocator);
|
|
|
|
return FixedBufferAllocator.alloc(&self.fixed_buffer_allocator.allocator, n, alignment) catch
|
|
|
|
self.fallback_allocator.allocFn(self.fallback_allocator, n, alignment);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn realloc(allocator: *Allocator, old_mem: []u8, new_size: usize, alignment: u29) ![]u8 {
|
|
|
|
const self = @fieldParentPtr(Self, "allocator", allocator);
|
|
|
|
const in_buffer = @ptrToInt(old_mem.ptr) >= @ptrToInt(&self.buffer) and
|
|
|
|
@ptrToInt(old_mem.ptr) < @ptrToInt(&self.buffer) + self.buffer.len;
|
|
|
|
if (in_buffer) {
|
|
|
|
return FixedBufferAllocator.realloc(
|
|
|
|
&self.fixed_buffer_allocator.allocator,
|
|
|
|
old_mem,
|
|
|
|
new_size,
|
|
|
|
alignment,
|
|
|
|
) catch {
|
|
|
|
const result = try self.fallback_allocator.allocFn(
|
|
|
|
self.fallback_allocator,
|
|
|
|
new_size,
|
|
|
|
alignment,
|
|
|
|
);
|
|
|
|
mem.copy(u8, result, old_mem);
|
|
|
|
return result;
|
|
|
|
};
|
|
|
|
}
|
|
|
|
return self.fallback_allocator.reallocFn(self.fallback_allocator, old_mem, new_size, alignment);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn free(allocator: *Allocator, bytes: []u8) void {
|
|
|
|
const self = @fieldParentPtr(Self, "allocator", allocator);
|
|
|
|
const in_buffer = @ptrToInt(bytes.ptr) >= @ptrToInt(&self.buffer) and
|
|
|
|
@ptrToInt(bytes.ptr) < @ptrToInt(&self.buffer) + self.buffer.len;
|
|
|
|
if (!in_buffer) {
|
|
|
|
return self.fallback_allocator.freeFn(self.fallback_allocator, bytes);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2018-02-11 23:27:02 -08:00
|
|
|
test "c_allocator" {
|
|
|
|
if (builtin.link_libc) {
|
|
|
|
var slice = c_allocator.alloc(u8, 50) catch return;
|
|
|
|
defer c_allocator.free(slice);
|
|
|
|
slice = c_allocator.realloc(u8, slice, 100) catch return;
|
|
|
|
}
|
2017-10-31 01:47:55 -07:00
|
|
|
}
|
|
|
|
|
2018-02-11 23:14:44 -08:00
|
|
|
test "DirectAllocator" {
|
|
|
|
var direct_allocator = DirectAllocator.init();
|
|
|
|
defer direct_allocator.deinit();
|
|
|
|
|
|
|
|
const allocator = &direct_allocator.allocator;
|
|
|
|
try testAllocator(allocator);
|
2018-07-14 09:03:06 -07:00
|
|
|
try testAllocatorAligned(allocator, 16);
|
2018-04-21 18:41:49 -07:00
|
|
|
try testAllocatorLargeAlignment(allocator);
|
2018-02-11 23:14:44 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
test "ArenaAllocator" {
|
|
|
|
var direct_allocator = DirectAllocator.init();
|
|
|
|
defer direct_allocator.deinit();
|
|
|
|
|
|
|
|
var arena_allocator = ArenaAllocator.init(&direct_allocator.allocator);
|
|
|
|
defer arena_allocator.deinit();
|
|
|
|
|
|
|
|
try testAllocator(&arena_allocator.allocator);
|
2018-07-14 09:03:06 -07:00
|
|
|
try testAllocatorAligned(&arena_allocator.allocator, 16);
|
2018-04-21 18:41:49 -07:00
|
|
|
try testAllocatorLargeAlignment(&arena_allocator.allocator);
|
2018-02-11 23:14:44 -08:00
|
|
|
}
|
|
|
|
|
2018-02-11 23:27:02 -08:00
|
|
|
var test_fixed_buffer_allocator_memory: [30000 * @sizeOf(usize)]u8 = undefined;
|
|
|
|
test "FixedBufferAllocator" {
|
|
|
|
var fixed_buffer_allocator = FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]);
|
|
|
|
|
|
|
|
try testAllocator(&fixed_buffer_allocator.allocator);
|
2018-07-14 09:03:06 -07:00
|
|
|
try testAllocatorAligned(&fixed_buffer_allocator.allocator, 16);
|
2018-04-21 18:41:49 -07:00
|
|
|
try testAllocatorLargeAlignment(&fixed_buffer_allocator.allocator);
|
2018-02-11 23:27:02 -08:00
|
|
|
}
|
|
|
|
|
2018-07-14 13:31:11 -07:00
|
|
|
test "FixedBufferAllocator Reuse memory on realloc" {
|
|
|
|
var small_fixed_buffer: [10]u8 = undefined;
|
|
|
|
// check if we re-use the memory
|
|
|
|
{
|
|
|
|
var fixed_buffer_allocator = FixedBufferAllocator.init(small_fixed_buffer[0..]);
|
|
|
|
|
|
|
|
var slice0 = try fixed_buffer_allocator.allocator.alloc(u8, 5);
|
|
|
|
assert(slice0.len == 5);
|
|
|
|
var slice1 = try fixed_buffer_allocator.allocator.realloc(u8, slice0, 10);
|
|
|
|
assert(slice1.ptr == slice0.ptr);
|
|
|
|
assert(slice1.len == 10);
|
|
|
|
debug.assertError(fixed_buffer_allocator.allocator.realloc(u8, slice1, 11), error.OutOfMemory);
|
|
|
|
}
|
|
|
|
// check that we don't re-use the memory if it's not the most recent block
|
|
|
|
{
|
|
|
|
var fixed_buffer_allocator = FixedBufferAllocator.init(small_fixed_buffer[0..]);
|
|
|
|
|
|
|
|
var slice0 = try fixed_buffer_allocator.allocator.alloc(u8, 2);
|
|
|
|
slice0[0] = 1;
|
|
|
|
slice0[1] = 2;
|
|
|
|
var slice1 = try fixed_buffer_allocator.allocator.alloc(u8, 2);
|
|
|
|
var slice2 = try fixed_buffer_allocator.allocator.realloc(u8, slice0, 4);
|
|
|
|
assert(slice0.ptr != slice2.ptr);
|
|
|
|
assert(slice1.ptr != slice2.ptr);
|
|
|
|
assert(slice2[0] == 1);
|
|
|
|
assert(slice2[1] == 2);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-28 14:53:06 -07:00
|
|
|
test "ThreadSafeFixedBufferAllocator" {
|
|
|
|
var fixed_buffer_allocator = ThreadSafeFixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]);
|
|
|
|
|
|
|
|
try testAllocator(&fixed_buffer_allocator.allocator);
|
2018-07-14 09:03:06 -07:00
|
|
|
try testAllocatorAligned(&fixed_buffer_allocator.allocator, 16);
|
2018-04-28 14:53:06 -07:00
|
|
|
try testAllocatorLargeAlignment(&fixed_buffer_allocator.allocator);
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn testAllocator(allocator: *mem.Allocator) !void {
|
|
|
|
var slice = try allocator.alloc(*i32, 100);
|
2018-07-14 09:03:06 -07:00
|
|
|
assert(slice.len == 100);
|
2018-02-11 23:14:44 -08:00
|
|
|
for (slice) |*item, i| {
|
2019-02-03 13:13:28 -08:00
|
|
|
item.* = try allocator.create(i32);
|
|
|
|
item.*.* = @intCast(i32, i);
|
2018-02-11 23:14:44 -08:00
|
|
|
}
|
|
|
|
|
2018-07-14 13:31:11 -07:00
|
|
|
slice = try allocator.realloc(*i32, slice, 20000);
|
|
|
|
assert(slice.len == 20000);
|
|
|
|
|
|
|
|
for (slice[0..100]) |item, i| {
|
|
|
|
assert(item.* == @intCast(i32, i));
|
2018-02-11 23:14:44 -08:00
|
|
|
allocator.destroy(item);
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
slice = try allocator.realloc(*i32, slice, 50);
|
2018-07-14 09:03:06 -07:00
|
|
|
assert(slice.len == 50);
|
2018-05-31 07:56:59 -07:00
|
|
|
slice = try allocator.realloc(*i32, slice, 25);
|
2018-07-14 09:03:06 -07:00
|
|
|
assert(slice.len == 25);
|
|
|
|
slice = try allocator.realloc(*i32, slice, 0);
|
|
|
|
assert(slice.len == 0);
|
2018-05-31 07:56:59 -07:00
|
|
|
slice = try allocator.realloc(*i32, slice, 10);
|
2018-07-14 09:03:06 -07:00
|
|
|
assert(slice.len == 10);
|
2018-02-11 23:14:44 -08:00
|
|
|
|
|
|
|
allocator.free(slice);
|
|
|
|
}
|
2018-04-21 18:41:49 -07:00
|
|
|
|
2018-07-14 09:03:06 -07:00
|
|
|
fn testAllocatorAligned(allocator: *mem.Allocator, comptime alignment: u29) !void {
|
|
|
|
// initial
|
|
|
|
var slice = try allocator.alignedAlloc(u8, alignment, 10);
|
|
|
|
assert(slice.len == 10);
|
|
|
|
// grow
|
|
|
|
slice = try allocator.alignedRealloc(u8, alignment, slice, 100);
|
|
|
|
assert(slice.len == 100);
|
|
|
|
// shrink
|
|
|
|
slice = try allocator.alignedRealloc(u8, alignment, slice, 10);
|
|
|
|
assert(slice.len == 10);
|
|
|
|
// go to zero
|
|
|
|
slice = try allocator.alignedRealloc(u8, alignment, slice, 0);
|
|
|
|
assert(slice.len == 0);
|
|
|
|
// realloc from zero
|
|
|
|
slice = try allocator.alignedRealloc(u8, alignment, slice, 100);
|
|
|
|
assert(slice.len == 100);
|
|
|
|
// shrink with shrink
|
|
|
|
slice = allocator.alignedShrink(u8, alignment, slice, 10);
|
|
|
|
assert(slice.len == 10);
|
|
|
|
// shrink to zero
|
|
|
|
slice = allocator.alignedShrink(u8, alignment, slice, 0);
|
|
|
|
assert(slice.len == 0);
|
|
|
|
}
|
|
|
|
|
2018-05-31 07:56:59 -07:00
|
|
|
fn testAllocatorLargeAlignment(allocator: *mem.Allocator) mem.Allocator.Error!void {
|
2018-05-28 17:23:55 -07:00
|
|
|
//Maybe a platform's page_size is actually the same as or
|
2018-04-21 18:41:49 -07:00
|
|
|
// very near usize?
|
2018-10-26 11:59:58 -07:00
|
|
|
if (os.page_size << 2 > maxInt(usize)) return;
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-04-21 18:41:49 -07:00
|
|
|
const USizeShift = @IntType(false, std.math.log2(usize.bit_count));
|
|
|
|
const large_align = u29(os.page_size << 2);
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-04-21 18:41:49 -07:00
|
|
|
var align_mask: usize = undefined;
|
|
|
|
_ = @shlWithOverflow(usize, ~usize(0), USizeShift(@ctz(large_align)), &align_mask);
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-04-21 18:41:49 -07:00
|
|
|
var slice = try allocator.allocFn(allocator, 500, large_align);
|
|
|
|
debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-04-21 18:41:49 -07:00
|
|
|
slice = try allocator.reallocFn(allocator, slice, 100, large_align);
|
|
|
|
debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-04-21 18:41:49 -07:00
|
|
|
slice = try allocator.reallocFn(allocator, slice, 5000, large_align);
|
|
|
|
debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-04-21 18:41:49 -07:00
|
|
|
slice = try allocator.reallocFn(allocator, slice, 10, large_align);
|
|
|
|
debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
|
2018-04-30 22:53:04 -07:00
|
|
|
|
2018-04-21 18:41:49 -07:00
|
|
|
slice = try allocator.reallocFn(allocator, slice, 20000, large_align);
|
|
|
|
debug.assert(@ptrToInt(slice.ptr) & align_mask == @ptrToInt(slice.ptr));
|
|
|
|
|
|
|
|
allocator.free(slice);
|
|
|
|
}
|