zig/src-self-hosted/value.zig

980 lines
33 KiB
Zig
Raw Normal View History

const std = @import("std");
2020-04-18 16:41:45 -07:00
const Type = @import("type.zig").Type;
const log2 = std.math.log2;
const assert = std.debug.assert;
const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable;
2020-04-21 16:48:59 -07:00
const Target = std.Target;
const Allocator = std.mem.Allocator;
2020-05-15 18:44:33 -07:00
const Module = @import("Module.zig");
2020-04-17 21:09:43 -07:00
/// This is the raw data, with no bookkeeping, no memory awareness,
/// no de-duplication, and no type system awareness.
/// It's important for this type to be small.
2020-04-18 16:41:45 -07:00
/// This union takes advantage of the fact that the first page of memory
/// is unmapped, giving us 4096 possible enum tags that have no payload.
pub const Value = extern union {
/// If the tag value is less than Tag.no_payload_count, then no pointer
/// dereference is needed.
tag_if_small_enough: usize,
ptr_otherwise: *Payload,
2020-04-17 21:09:43 -07:00
pub const Tag = enum {
2020-04-18 16:41:45 -07:00
// The first section of this enum are tags that require no payload.
2020-04-21 14:06:09 -07:00
u8_type,
i8_type,
isize_type,
usize_type,
c_short_type,
c_ushort_type,
c_int_type,
c_uint_type,
c_long_type,
c_ulong_type,
c_longlong_type,
c_ulonglong_type,
c_longdouble_type,
f16_type,
f32_type,
f64_type,
f128_type,
c_void_type,
bool_type,
2020-04-17 21:09:43 -07:00
void_type,
2020-04-21 14:06:09 -07:00
type_type,
anyerror_type,
comptime_int_type,
comptime_float_type,
2020-04-17 21:09:43 -07:00
noreturn_type,
null_type,
undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
fn_noreturn_no_args_type,
2020-04-21 14:06:09 -07:00
fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
fn_ccc_void_no_args_type,
2020-04-21 18:14:56 -07:00
single_const_pointer_to_comptime_int_type,
2020-04-21 14:06:09 -07:00
const_slice_u8_type,
2020-04-28 18:04:18 -07:00
undef,
2020-04-21 21:04:52 -07:00
zero,
2020-04-28 18:04:18 -07:00
the_one_possible_value, // when the type only has one possible value
null_value,
2020-04-17 21:09:43 -07:00
bool_true,
2020-04-19 17:04:11 -07:00
bool_false, // See last_no_payload_tag below.
2020-04-18 16:41:45 -07:00
// After this, the tag requires a payload.
2020-04-17 21:09:43 -07:00
2020-04-18 16:41:45 -07:00
ty,
2020-04-17 21:09:43 -07:00
int_u64,
int_i64,
int_big_positive,
int_big_negative,
2020-04-17 21:09:43 -07:00
function,
ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
decl_ref,
elem_ptr,
2020-04-17 21:09:43 -07:00
bytes,
2020-04-28 18:04:18 -07:00
repeated, // the value is a value repeated some number of times
2020-04-19 17:04:11 -07:00
pub const last_no_payload_tag = Tag.bool_false;
pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1;
};
2020-04-18 17:22:54 -07:00
pub fn initTag(comptime small_tag: Tag) Value {
comptime assert(@enumToInt(small_tag) < Tag.no_payload_count);
return .{ .tag_if_small_enough = @enumToInt(small_tag) };
2020-04-18 16:41:45 -07:00
}
2020-04-18 16:41:45 -07:00
pub fn initPayload(payload: *Payload) Value {
assert(@enumToInt(payload.tag) >= Tag.no_payload_count);
return .{ .ptr_otherwise = payload };
}
2020-04-18 16:41:45 -07:00
pub fn tag(self: Value) Tag {
if (self.tag_if_small_enough < Tag.no_payload_count) {
2020-04-19 17:04:11 -07:00
return @intToEnum(Tag, @intCast(@TagType(Tag), self.tag_if_small_enough));
2020-04-18 16:41:45 -07:00
} else {
return self.ptr_otherwise.tag;
}
}
2020-04-19 17:04:11 -07:00
pub fn cast(self: Value, comptime T: type) ?*T {
if (self.tag_if_small_enough < Tag.no_payload_count)
return null;
const expected_tag = std.meta.fieldInfo(T, "base").default_value.?.tag;
if (self.ptr_otherwise.tag != expected_tag)
return null;
return @fieldParentPtr(T, "base", self.ptr_otherwise);
}
pub fn format(
self: Value,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: var,
) !void {
comptime assert(fmt.len == 0);
2020-04-21 18:14:56 -07:00
var val = self;
while (true) switch (val.tag()) {
2020-04-21 14:06:09 -07:00
.u8_type => return out_stream.writeAll("u8"),
.i8_type => return out_stream.writeAll("i8"),
.isize_type => return out_stream.writeAll("isize"),
.usize_type => return out_stream.writeAll("usize"),
.c_short_type => return out_stream.writeAll("c_short"),
.c_ushort_type => return out_stream.writeAll("c_ushort"),
.c_int_type => return out_stream.writeAll("c_int"),
.c_uint_type => return out_stream.writeAll("c_uint"),
.c_long_type => return out_stream.writeAll("c_long"),
.c_ulong_type => return out_stream.writeAll("c_ulong"),
.c_longlong_type => return out_stream.writeAll("c_longlong"),
.c_ulonglong_type => return out_stream.writeAll("c_ulonglong"),
.c_longdouble_type => return out_stream.writeAll("c_longdouble"),
.f16_type => return out_stream.writeAll("f16"),
.f32_type => return out_stream.writeAll("f32"),
.f64_type => return out_stream.writeAll("f64"),
.f128_type => return out_stream.writeAll("f128"),
.c_void_type => return out_stream.writeAll("c_void"),
.bool_type => return out_stream.writeAll("bool"),
2020-04-19 17:04:11 -07:00
.void_type => return out_stream.writeAll("void"),
2020-04-21 14:06:09 -07:00
.type_type => return out_stream.writeAll("type"),
.anyerror_type => return out_stream.writeAll("anyerror"),
.comptime_int_type => return out_stream.writeAll("comptime_int"),
.comptime_float_type => return out_stream.writeAll("comptime_float"),
2020-04-19 17:04:11 -07:00
.noreturn_type => return out_stream.writeAll("noreturn"),
.null_type => return out_stream.writeAll("@TypeOf(null)"),
.undefined_type => return out_stream.writeAll("@TypeOf(undefined)"),
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type => return out_stream.writeAll("fn() noreturn"),
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args_type => return out_stream.writeAll("fn() callconv(.Naked) noreturn"),
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type => return out_stream.writeAll("fn() callconv(.C) void"),
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int_type => return out_stream.writeAll("*const comptime_int"),
2020-04-21 14:06:09 -07:00
.const_slice_u8_type => return out_stream.writeAll("[]const u8"),
2020-04-28 18:04:18 -07:00
.null_value => return out_stream.writeAll("null"),
.undef => return out_stream.writeAll("undefined"),
2020-04-21 21:04:52 -07:00
.zero => return out_stream.writeAll("0"),
2020-04-28 18:04:18 -07:00
.the_one_possible_value => return out_stream.writeAll("(one possible value)"),
2020-04-19 17:04:11 -07:00
.bool_true => return out_stream.writeAll("true"),
.bool_false => return out_stream.writeAll("false"),
2020-04-21 18:14:56 -07:00
.ty => return val.cast(Payload.Ty).?.ty.format("", options, out_stream),
.int_u64 => return std.fmt.formatIntValue(val.cast(Payload.Int_u64).?.int, "", options, out_stream),
.int_i64 => return std.fmt.formatIntValue(val.cast(Payload.Int_i64).?.int, "", options, out_stream),
.int_big_positive => return out_stream.print("{}", .{val.cast(Payload.IntBigPositive).?.asBigInt()}),
.int_big_negative => return out_stream.print("{}", .{val.cast(Payload.IntBigNegative).?.asBigInt()}),
2020-04-19 17:04:11 -07:00
.function => return out_stream.writeAll("(function)"),
.ref_val => {
const ref_val = val.cast(Payload.RefVal).?;
try out_stream.writeAll("&const ");
val = ref_val.val;
},
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref => return out_stream.writeAll("(decl ref)"),
.elem_ptr => {
const elem_ptr = val.cast(Payload.ElemPtr).?;
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
try out_stream.print("&[{}] ", .{elem_ptr.index});
val = elem_ptr.array_ptr;
2020-04-21 18:14:56 -07:00
},
.bytes => return std.zig.renderStringLiteral(self.cast(Payload.Bytes).?.data, out_stream),
2020-04-28 18:04:18 -07:00
.repeated => {
try out_stream.writeAll("(repeated) ");
val = val.cast(Payload.Repeated).?.val;
},
2020-04-21 18:14:56 -07:00
};
2020-04-19 17:04:11 -07:00
}
2020-04-20 21:56:30 -07:00
/// Asserts that the value is representable as an array of bytes.
/// Copies the value into a freshly allocated slice of memory, which is owned by the caller.
pub fn toAllocatedBytes(self: Value, allocator: *Allocator) ![]u8 {
2020-04-20 21:56:30 -07:00
if (self.cast(Payload.Bytes)) |bytes| {
return std.mem.dupe(allocator, u8, bytes.data);
}
if (self.cast(Payload.Repeated)) |repeated| {
@panic("TODO implement toAllocatedBytes for this Value tag");
}
if (self.cast(Payload.DeclRef)) |declref| {
const val = try declref.decl.value();
return val.toAllocatedBytes(allocator);
}
2020-04-20 21:56:30 -07:00
unreachable;
}
2020-04-21 13:06:15 -07:00
/// Asserts that the value is representable as a type.
pub fn toType(self: Value) Type {
return switch (self.tag()) {
.ty => self.cast(Payload.Ty).?.ty,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.u8_type => Type.initTag(.u8),
.i8_type => Type.initTag(.i8),
.isize_type => Type.initTag(.isize),
.usize_type => Type.initTag(.usize),
.c_short_type => Type.initTag(.c_short),
.c_ushort_type => Type.initTag(.c_ushort),
.c_int_type => Type.initTag(.c_int),
.c_uint_type => Type.initTag(.c_uint),
.c_long_type => Type.initTag(.c_long),
.c_ulong_type => Type.initTag(.c_ulong),
.c_longlong_type => Type.initTag(.c_longlong),
.c_ulonglong_type => Type.initTag(.c_ulonglong),
.c_longdouble_type => Type.initTag(.c_longdouble),
.f16_type => Type.initTag(.f16),
.f32_type => Type.initTag(.f32),
.f64_type => Type.initTag(.f64),
.f128_type => Type.initTag(.f128),
.c_void_type => Type.initTag(.c_void),
.bool_type => Type.initTag(.bool),
.void_type => Type.initTag(.void),
.type_type => Type.initTag(.type),
.anyerror_type => Type.initTag(.anyerror),
.comptime_int_type => Type.initTag(.comptime_int),
.comptime_float_type => Type.initTag(.comptime_float),
.noreturn_type => Type.initTag(.noreturn),
.null_type => Type.initTag(.@"null"),
.undefined_type => Type.initTag(.@"undefined"),
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type => Type.initTag(.fn_noreturn_no_args),
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args_type => Type.initTag(.fn_naked_noreturn_no_args),
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type => Type.initTag(.fn_ccc_void_no_args),
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int_type => Type.initTag(.single_const_pointer_to_comptime_int),
2020-04-21 14:06:09 -07:00
.const_slice_u8_type => Type.initTag(.const_slice_u8),
2020-04-21 13:06:15 -07:00
2020-04-28 18:04:18 -07:00
.undef,
2020-04-21 21:04:52 -07:00
.zero,
2020-04-28 18:04:18 -07:00
.the_one_possible_value,
2020-04-21 13:06:15 -07:00
.bool_true,
.bool_false,
2020-04-28 18:04:18 -07:00
.null_value,
2020-04-21 13:06:15 -07:00
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
2020-04-21 13:06:15 -07:00
.function,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
2020-04-21 13:06:15 -07:00
.bytes,
2020-04-28 18:04:18 -07:00
.repeated,
2020-04-21 13:06:15 -07:00
=> unreachable,
};
}
/// Asserts the value is an integer.
pub fn toBigInt(self: Value, space: *BigIntSpace) BigIntConst {
switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.bool_true,
.bool_false,
2020-04-28 18:04:18 -07:00
.null_value,
.function,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
.bytes,
2020-04-28 18:04:18 -07:00
.undef,
.repeated,
=> unreachable,
2020-04-28 18:04:18 -07:00
.the_one_possible_value, // An integer with one possible value is always zero.
.zero,
=> return BigIntMutable.init(&space.limbs, 0).toConst(),
.int_u64 => return BigIntMutable.init(&space.limbs, self.cast(Payload.Int_u64).?.int).toConst(),
.int_i64 => return BigIntMutable.init(&space.limbs, self.cast(Payload.Int_i64).?.int).toConst(),
.int_big_positive => return self.cast(Payload.IntBigPositive).?.asBigInt(),
.int_big_negative => return self.cast(Payload.IntBigPositive).?.asBigInt(),
}
}
/// Asserts the value is an integer and it fits in a u64
pub fn toUnsignedInt(self: Value) u64 {
switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.bool_true,
.bool_false,
2020-04-28 18:04:18 -07:00
.null_value,
.function,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
.bytes,
2020-04-28 18:04:18 -07:00
.undef,
.repeated,
=> unreachable,
2020-04-28 18:04:18 -07:00
.zero,
.the_one_possible_value, // an integer with one possible value is always zero
=> return 0,
.int_u64 => return self.cast(Payload.Int_u64).?.int,
.int_i64 => return @intCast(u64, self.cast(Payload.Int_u64).?.int),
.int_big_positive => return self.cast(Payload.IntBigPositive).?.asBigInt().to(u64) catch unreachable,
.int_big_negative => return self.cast(Payload.IntBigNegative).?.asBigInt().to(u64) catch unreachable,
}
}
2020-04-28 18:04:18 -07:00
/// Asserts the value is an integer and not undefined.
/// Returns the number of bits the value requires to represent stored in twos complement form.
pub fn intBitCountTwosComp(self: Value) usize {
switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
2020-04-28 18:04:18 -07:00
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.bool_true,
.bool_false,
.null_value,
.function,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
2020-04-28 18:04:18 -07:00
.bytes,
.undef,
.repeated,
=> unreachable,
.the_one_possible_value, // an integer with one possible value is always zero
.zero,
=> return 0,
.int_u64 => {
const x = self.cast(Payload.Int_u64).?.int;
if (x == 0) return 0;
return std.math.log2(x) + 1;
},
.int_i64 => {
@panic("TODO implement i64 intBitCountTwosComp");
},
.int_big_positive => return self.cast(Payload.IntBigPositive).?.asBigInt().bitCountTwosComp(),
.int_big_negative => return self.cast(Payload.IntBigNegative).?.asBigInt().bitCountTwosComp(),
2020-04-28 18:04:18 -07:00
}
}
2020-04-21 16:48:59 -07:00
/// Asserts the value is an integer, and the destination type is ComptimeInt or Int.
pub fn intFitsInType(self: Value, ty: Type, target: Target) bool {
switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
2020-04-21 16:48:59 -07:00
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int_type,
2020-04-21 16:48:59 -07:00
.const_slice_u8_type,
.bool_true,
.bool_false,
2020-04-28 18:04:18 -07:00
.null_value,
2020-04-21 16:48:59 -07:00
.function,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
2020-04-21 16:48:59 -07:00
.bytes,
2020-04-28 18:04:18 -07:00
.repeated,
2020-04-21 16:48:59 -07:00
=> unreachable,
2020-04-28 18:04:18 -07:00
.zero,
.undef,
.the_one_possible_value, // an integer with one possible value is always zero
=> return true,
2020-04-21 21:04:52 -07:00
2020-04-21 16:48:59 -07:00
.int_u64 => switch (ty.zigTypeTag()) {
.Int => {
const x = self.cast(Payload.Int_u64).?.int;
2020-04-21 19:19:32 -07:00
if (x == 0) return true;
2020-04-21 16:48:59 -07:00
const info = ty.intInfo(target);
const needed_bits = std.math.log2(x) + 1 + @boolToInt(info.signed);
return info.bits >= needed_bits;
},
.ComptimeInt => return true,
else => unreachable,
},
.int_i64 => switch (ty.zigTypeTag()) {
.Int => {
const x = self.cast(Payload.Int_i64).?.int;
2020-04-21 19:19:32 -07:00
if (x == 0) return true;
2020-04-21 16:48:59 -07:00
const info = ty.intInfo(target);
if (!info.signed and x < 0)
return false;
@panic("TODO implement i64 intFitsInType");
},
.ComptimeInt => return true,
else => unreachable,
},
.int_big_positive => switch (ty.zigTypeTag()) {
2020-04-21 16:48:59 -07:00
.Int => {
const info = ty.intInfo(target);
return self.cast(Payload.IntBigPositive).?.asBigInt().fitsInTwosComp(info.signed, info.bits);
},
.ComptimeInt => return true,
else => unreachable,
},
.int_big_negative => switch (ty.zigTypeTag()) {
.Int => {
const info = ty.intInfo(target);
return self.cast(Payload.IntBigNegative).?.asBigInt().fitsInTwosComp(info.signed, info.bits);
2020-04-21 16:48:59 -07:00
},
.ComptimeInt => return true,
else => unreachable,
},
}
}
2020-04-28 18:04:18 -07:00
/// Asserts the value is a float
pub fn floatHasFraction(self: Value) bool {
return switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
2020-04-28 18:04:18 -07:00
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.bool_true,
.bool_false,
.null_value,
.function,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
2020-04-28 18:04:18 -07:00
.bytes,
.repeated,
.undef,
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
2020-04-28 18:04:18 -07:00
.the_one_possible_value,
=> unreachable,
.zero => false,
};
}
pub fn orderAgainstZero(lhs: Value) std.math.Order {
switch (lhs.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
2020-04-28 18:04:18 -07:00
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.bool_true,
.bool_false,
.null_value,
.function,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
2020-04-28 18:04:18 -07:00
.bytes,
.repeated,
.undef,
=> unreachable,
.zero,
.the_one_possible_value, // an integer with one possible value is always zero
=> return .eq,
.int_u64 => return std.math.order(lhs.cast(Payload.Int_u64).?.int, 0),
.int_i64 => return std.math.order(lhs.cast(Payload.Int_i64).?.int, 0),
.int_big_positive => return lhs.cast(Payload.IntBigPositive).?.asBigInt().orderAgainstScalar(0),
.int_big_negative => return lhs.cast(Payload.IntBigNegative).?.asBigInt().orderAgainstScalar(0),
2020-04-28 18:04:18 -07:00
}
}
/// Asserts the value is comparable.
pub fn order(lhs: Value, rhs: Value) std.math.Order {
const lhs_tag = lhs.tag();
const rhs_tag = lhs.tag();
const lhs_is_zero = lhs_tag == .zero or lhs_tag == .the_one_possible_value;
const rhs_is_zero = rhs_tag == .zero or rhs_tag == .the_one_possible_value;
if (lhs_is_zero) return rhs.orderAgainstZero().invert();
if (rhs_is_zero) return lhs.orderAgainstZero();
// TODO floats
var lhs_bigint_space: BigIntSpace = undefined;
var rhs_bigint_space: BigIntSpace = undefined;
const lhs_bigint = lhs.toBigInt(&lhs_bigint_space);
const rhs_bigint = rhs.toBigInt(&rhs_bigint_space);
return lhs_bigint.order(rhs_bigint);
2020-04-28 18:04:18 -07:00
}
/// Asserts the value is comparable.
pub fn compare(lhs: Value, op: std.math.CompareOperator, rhs: Value) bool {
return order(lhs, rhs).compare(op);
}
/// Asserts the value is comparable.
pub fn compareWithZero(lhs: Value, op: std.math.CompareOperator) bool {
return orderAgainstZero(lhs).compare(op);
}
pub fn eql(a: Value, b: Value) bool {
// TODO non numerical comparisons
return compare(a, .eq, b);
}
2020-04-28 18:04:18 -07:00
pub fn toBool(self: Value) bool {
return switch (self.tag()) {
.bool_true => true,
.bool_false => false,
else => unreachable,
};
}
2020-04-21 18:33:55 -07:00
/// Asserts the value is a pointer and dereferences it.
/// Returns error.AnalysisFail if the pointer points to a Decl that failed semantic analysis.
pub fn pointerDeref(self: Value, allocator: *Allocator) error{ AnalysisFail, OutOfMemory }!Value {
2020-04-28 18:04:18 -07:00
return switch (self.tag()) {
2020-04-21 18:33:55 -07:00
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
2020-04-21 18:33:55 -07:00
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
2020-04-21 18:33:55 -07:00
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
2020-04-21 21:04:52 -07:00
.zero,
2020-04-21 18:33:55 -07:00
.bool_true,
.bool_false,
2020-04-28 18:04:18 -07:00
.null_value,
2020-04-21 18:33:55 -07:00
.function,
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
2020-04-21 18:33:55 -07:00
.bytes,
2020-04-28 18:04:18 -07:00
.undef,
.repeated,
2020-04-21 18:33:55 -07:00
=> unreachable,
2020-04-28 18:04:18 -07:00
.the_one_possible_value => Value.initTag(.the_one_possible_value),
.ref_val => self.cast(Payload.RefVal).?.val,
.decl_ref => self.cast(Payload.DeclRef).?.decl.value(),
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.elem_ptr => {
const elem_ptr = self.cast(Payload.ElemPtr).?;
const array_val = try elem_ptr.array_ptr.pointerDeref(allocator);
return array_val.elemValue(allocator, elem_ptr.index);
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
},
2020-04-28 18:04:18 -07:00
};
2020-04-21 18:33:55 -07:00
}
2020-04-25 22:20:58 -07:00
/// Asserts the value is a single-item pointer to an array, or an array,
/// or an unknown-length pointer, and returns the element value at the index.
pub fn elemValue(self: Value, allocator: *Allocator, index: usize) error{OutOfMemory}!Value {
2020-04-25 22:20:58 -07:00
switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
2020-04-25 22:20:58 -07:00
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
2020-04-25 22:20:58 -07:00
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.zero,
2020-04-28 18:04:18 -07:00
.the_one_possible_value,
2020-04-25 22:20:58 -07:00
.bool_true,
.bool_false,
2020-04-28 18:04:18 -07:00
.null_value,
2020-04-25 22:20:58 -07:00
.function,
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
2020-04-28 18:04:18 -07:00
.undef,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.elem_ptr,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
2020-04-25 22:20:58 -07:00
=> unreachable,
.bytes => {
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
const int_payload = try allocator.create(Payload.Int_u64);
2020-04-25 22:20:58 -07:00
int_payload.* = .{ .int = self.cast(Payload.Bytes).?.data[index] };
return Value.initPayload(&int_payload.base);
},
2020-04-28 18:04:18 -07:00
// No matter the index; all the elements are the same!
.repeated => return self.cast(Payload.Repeated).?.val,
2020-04-25 22:20:58 -07:00
}
}
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
/// Returns a pointer to the element value at the index.
pub fn elemPtr(self: Value, allocator: *Allocator, index: usize) !Value {
const payload = try allocator.create(Payload.ElemPtr);
if (self.cast(Payload.ElemPtr)) |elem_ptr| {
payload.* = .{ .array_ptr = elem_ptr.array_ptr, .index = elem_ptr.index + index };
} else {
payload.* = .{ .array_ptr = self, .index = index };
}
return Value.initPayload(&payload.base);
}
2020-04-28 18:04:18 -07:00
pub fn isUndef(self: Value) bool {
return self.tag() == .undef;
}
/// Valid for all types. Asserts the value is not undefined.
/// `.the_one_possible_value` is reported as not null.
pub fn isNull(self: Value) bool {
return switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.null_type,
.undefined_type,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args_type,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args_type,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args_type,
2020-04-28 18:04:18 -07:00
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.zero,
.the_one_possible_value,
.bool_true,
.bool_false,
.function,
.int_u64,
.int_i64,
.int_big_positive,
.int_big_negative,
.ref_val,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.decl_ref,
.elem_ptr,
2020-04-28 18:04:18 -07:00
.bytes,
.repeated,
=> false,
.undef => unreachable,
.null_value => true,
};
}
2020-04-18 16:41:45 -07:00
/// This type is not copyable since it may contain pointers to its inner data.
pub const Payload = struct {
tag: Tag,
2020-04-18 16:41:45 -07:00
pub const Int_u64 = struct {
base: Payload = Payload{ .tag = .int_u64 },
int: u64,
};
2020-04-18 16:41:45 -07:00
pub const Int_i64 = struct {
base: Payload = Payload{ .tag = .int_i64 },
int: i64,
};
pub const IntBigPositive = struct {
base: Payload = Payload{ .tag = .int_big_positive },
limbs: []const std.math.big.Limb,
pub fn asBigInt(self: IntBigPositive) BigIntConst {
return BigIntConst{ .limbs = self.limbs, .positive = true };
}
};
pub const IntBigNegative = struct {
base: Payload = Payload{ .tag = .int_big_negative },
limbs: []const std.math.big.Limb,
pub fn asBigInt(self: IntBigNegative) BigIntConst {
return BigIntConst{ .limbs = self.limbs, .positive = false };
}
2020-04-21 14:54:00 -07:00
};
2020-04-18 16:41:45 -07:00
pub const Function = struct {
base: Payload = Payload{ .tag = .function },
2020-05-15 18:44:33 -07:00
func: *Module.Fn,
2020-04-18 16:41:45 -07:00
};
pub const ArraySentinel0_u8_Type = struct {
base: Payload = Payload{ .tag = .array_sentinel_0_u8_type },
len: u64,
};
pub const SingleConstPtrType = struct {
base: Payload = Payload{ .tag = .single_const_ptr_type },
elem_type: *Type,
};
/// Represents a pointer to another immutable value.
pub const RefVal = struct {
base: Payload = Payload{ .tag = .ref_val },
val: Value,
};
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
/// Represents a pointer to a decl, not the value of the decl.
pub const DeclRef = struct {
base: Payload = Payload{ .tag = .decl_ref },
2020-05-15 18:44:33 -07:00
decl: *Module.Decl,
2020-04-18 16:41:45 -07:00
};
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
pub const ElemPtr = struct {
base: Payload = Payload{ .tag = .elem_ptr },
array_ptr: Value,
index: usize,
2020-04-21 18:14:56 -07:00
};
2020-04-18 16:41:45 -07:00
pub const Bytes = struct {
base: Payload = Payload{ .tag = .bytes },
2020-04-20 22:20:01 -07:00
data: []const u8,
2020-04-18 16:41:45 -07:00
};
2018-07-13 18:56:38 -07:00
2020-04-18 16:41:45 -07:00
pub const Ty = struct {
2020-04-19 17:04:11 -07:00
base: Payload = Payload{ .tag = .ty },
ty: Type,
2020-04-18 16:41:45 -07:00
};
2020-04-28 18:04:18 -07:00
pub const Repeated = struct {
base: Payload = Payload{ .tag = .ty },
/// This value is repeated some number of times. The amount of times to repeat
/// is stored externally.
val: Value,
};
};
/// Big enough to fit any non-BigInt value
pub const BigIntSpace = struct {
/// The +1 is headroom so that operations such as incrementing once or decrementing once
/// are possible without using an allocator.
limbs: [(@sizeOf(u64) / @sizeOf(std.math.big.Limb)) + 1]std.math.big.Limb,
2020-04-17 21:09:43 -07:00
};
};