zig/src-self-hosted/type.zig

1341 lines
39 KiB
Zig
Raw Normal View History

const std = @import("std");
const Value = @import("value.zig").Value;
const assert = std.debug.assert;
2020-04-21 14:06:09 -07:00
const Allocator = std.mem.Allocator;
2020-04-21 16:48:59 -07:00
const Target = std.Target;
2020-04-18 16:41:45 -07:00
/// This is the raw data, with no bookkeeping, no memory awareness, no de-duplication.
/// It's important for this type to be small.
2020-04-18 16:41:45 -07:00
/// Types are not de-duplicated, which helps with multi-threading since it obviates the requirement
/// of obtaining a lock on a global type table, as well as making the
/// garbage collection bookkeeping simpler.
/// This union takes advantage of the fact that the first page of memory
/// is unmapped, giving us 4096 possible enum tags that have no payload.
pub const Type = extern union {
/// If the tag value is less than Tag.no_payload_count, then no pointer
/// dereference is needed.
tag_if_small_enough: usize,
ptr_otherwise: *Payload,
pub fn zigTypeTag(self: Type) std.builtin.TypeId {
switch (self.tag()) {
2020-04-28 18:04:18 -07:00
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.int_signed,
.int_unsigned,
2020-04-20 21:56:30 -07:00
=> return .Int,
2020-04-28 18:04:18 -07:00
.f16,
.f32,
.f64,
.f128,
2020-04-20 21:56:30 -07:00
=> return .Float,
2020-04-28 18:04:18 -07:00
.c_void => return .Opaque,
.bool => return .Bool,
.void => return .Void,
.type => return .Type,
.anyerror => return .ErrorSet,
.comptime_int => return .ComptimeInt,
.comptime_float => return .ComptimeFloat,
.noreturn => return .NoReturn,
.@"null" => return .Null,
2020-04-20 21:56:30 -07:00
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args => return .Fn,
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args => return .Fn,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args => return .Fn,
2020-04-21 14:06:09 -07:00
2020-04-20 21:56:30 -07:00
.array, .array_u8_sentinel_0 => return .Array,
2020-04-18 16:41:45 -07:00
.single_const_pointer => return .Pointer,
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int => return .Pointer,
2020-04-20 21:56:30 -07:00
.const_slice_u8 => return .Pointer,
}
}
2020-04-18 17:22:54 -07:00
pub fn initTag(comptime small_tag: Tag) Type {
comptime assert(@enumToInt(small_tag) < Tag.no_payload_count);
return .{ .tag_if_small_enough = @enumToInt(small_tag) };
}
2020-04-18 16:41:45 -07:00
pub fn initPayload(payload: *Payload) Type {
assert(@enumToInt(payload.tag) >= Tag.no_payload_count);
return .{ .ptr_otherwise = payload };
}
2020-04-18 16:41:45 -07:00
pub fn tag(self: Type) Tag {
if (self.tag_if_small_enough < Tag.no_payload_count) {
2020-04-19 17:04:11 -07:00
return @intToEnum(Tag, @intCast(@TagType(Tag), self.tag_if_small_enough));
2020-04-18 16:41:45 -07:00
} else {
return self.ptr_otherwise.tag;
}
}
2020-04-21 10:24:25 -07:00
pub fn cast(self: Type, comptime T: type) ?*T {
if (self.tag_if_small_enough < Tag.no_payload_count)
return null;
const expected_tag = std.meta.fieldInfo(T, "base").default_value.?.tag;
if (self.ptr_otherwise.tag != expected_tag)
return null;
return @fieldParentPtr(T, "base", self.ptr_otherwise);
}
pub fn eql(a: Type, b: Type) bool {
//std.debug.warn("test {} == {}\n", .{ a, b });
// As a shortcut, if the small tags / addresses match, we're done.
if (a.tag_if_small_enough == b.tag_if_small_enough)
return true;
const zig_tag_a = a.zigTypeTag();
const zig_tag_b = b.zigTypeTag();
if (zig_tag_a != zig_tag_b)
return false;
switch (zig_tag_a) {
.Type => return true,
.Void => return true,
.Bool => return true,
.NoReturn => return true,
.ComptimeFloat => return true,
.ComptimeInt => return true,
.Undefined => return true,
.Null => return true,
.Pointer => {
const is_slice_a = isSlice(a);
const is_slice_b = isSlice(b);
if (is_slice_a != is_slice_b)
return false;
@panic("TODO implement more pointer Type equality comparison");
},
.Int => {
if (a.tag() != b.tag()) {
// Detect that e.g. u64 != usize, even if the bits match on a particular target.
return false;
}
// The target will not be branched upon, because we handled target-dependent cases above.
const info_a = a.intInfo(@as(Target, undefined));
const info_b = b.intInfo(@as(Target, undefined));
return info_a.signed == info_b.signed and info_a.bits == info_b.bits;
},
.Array => {
if (a.arrayLen() != b.arrayLen())
return false;
if (a.elemType().eql(b.elemType()))
return false;
const sentinel_a = a.arraySentinel();
const sentinel_b = b.arraySentinel();
if (sentinel_a) |sa| {
if (sentinel_b) |sb| {
return sa.eql(sb);
} else {
return false;
}
} else {
return sentinel_b == null;
}
},
.Float,
.Struct,
.Optional,
.ErrorUnion,
.ErrorSet,
.Enum,
.Union,
.Fn,
.BoundFn,
.Opaque,
.Frame,
.AnyFrame,
.Vector,
.EnumLiteral,
=> @panic("TODO implement more Type equality comparison"),
}
}
2020-04-18 16:41:45 -07:00
pub fn format(
self: Type,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: var,
) !void {
comptime assert(fmt.len == 0);
2020-04-19 17:04:11 -07:00
var ty = self;
while (true) {
2020-04-20 21:56:30 -07:00
const t = ty.tag();
switch (t) {
2020-04-28 18:04:18 -07:00
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.c_void,
.f16,
.f32,
.f64,
.f128,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
2020-04-20 21:56:30 -07:00
=> return out_stream.writeAll(@tagName(t)),
.@"null" => return out_stream.writeAll("@TypeOf(null)"),
2020-04-20 21:56:30 -07:00
.const_slice_u8 => return out_stream.writeAll("[]const u8"),
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args => return out_stream.writeAll("fn() noreturn"),
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args => return out_stream.writeAll("fn() callconv(.Naked) noreturn"),
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args => return out_stream.writeAll("fn() callconv(.C) void"),
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int => return out_stream.writeAll("*const comptime_int"),
2020-04-19 17:04:11 -07:00
.array_u8_sentinel_0 => {
const payload = @fieldParentPtr(Payload.Array_u8_Sentinel0, "base", ty.ptr_otherwise);
return out_stream.print("[{}:0]u8", .{payload.len});
},
.array => {
const payload = @fieldParentPtr(Payload.Array, "base", ty.ptr_otherwise);
try out_stream.print("[{}]", .{payload.len});
ty = payload.elem_type;
continue;
},
.single_const_pointer => {
const payload = @fieldParentPtr(Payload.SingleConstPointer, "base", ty.ptr_otherwise);
try out_stream.writeAll("*const ");
ty = payload.pointee_type;
continue;
},
2020-04-28 18:04:18 -07:00
.int_signed => {
const payload = @fieldParentPtr(Payload.IntSigned, "base", ty.ptr_otherwise);
return out_stream.print("i{}", .{payload.bits});
},
.int_unsigned => {
const payload = @fieldParentPtr(Payload.IntUnsigned, "base", ty.ptr_otherwise);
return out_stream.print("u{}", .{payload.bits});
},
2020-04-19 17:04:11 -07:00
}
unreachable;
}
}
2020-04-21 14:06:09 -07:00
pub fn toValue(self: Type, allocator: *Allocator) Allocator.Error!Value {
switch (self.tag()) {
2020-04-28 18:04:18 -07:00
.u8 => return Value.initTag(.u8_type),
.i8 => return Value.initTag(.i8_type),
.isize => return Value.initTag(.isize_type),
.usize => return Value.initTag(.usize_type),
.c_short => return Value.initTag(.c_short_type),
.c_ushort => return Value.initTag(.c_ushort_type),
.c_int => return Value.initTag(.c_int_type),
.c_uint => return Value.initTag(.c_uint_type),
.c_long => return Value.initTag(.c_long_type),
.c_ulong => return Value.initTag(.c_ulong_type),
.c_longlong => return Value.initTag(.c_longlong_type),
.c_ulonglong => return Value.initTag(.c_ulonglong_type),
.c_longdouble => return Value.initTag(.c_longdouble_type),
.c_void => return Value.initTag(.c_void_type),
.f16 => return Value.initTag(.f16_type),
.f32 => return Value.initTag(.f32_type),
.f64 => return Value.initTag(.f64_type),
.f128 => return Value.initTag(.f128_type),
.bool => return Value.initTag(.bool_type),
.void => return Value.initTag(.void_type),
.type => return Value.initTag(.type_type),
.anyerror => return Value.initTag(.anyerror_type),
.comptime_int => return Value.initTag(.comptime_int_type),
.comptime_float => return Value.initTag(.comptime_float_type),
.noreturn => return Value.initTag(.noreturn_type),
.@"null" => return Value.initTag(.null_type),
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args => return Value.initTag(.fn_noreturn_no_args_type),
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args => return Value.initTag(.fn_naked_noreturn_no_args_type),
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args => return Value.initTag(.fn_ccc_void_no_args_type),
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int => return Value.initTag(.single_const_pointer_to_comptime_int_type),
2020-04-21 14:06:09 -07:00
.const_slice_u8 => return Value.initTag(.const_slice_u8_type),
else => {
const ty_payload = try allocator.create(Value.Payload.Ty);
ty_payload.* = .{ .ty = self };
return Value.initPayload(&ty_payload.base);
},
}
}
pub fn hasCodeGenBits(self: Type) bool {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.bool,
.anyerror,
.fn_noreturn_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.array_u8_sentinel_0,
.array, // TODO check for zero bits
.single_const_pointer,
.int_signed, // TODO check for zero bits
.int_unsigned, // TODO check for zero bits
=> true,
.c_void,
.void,
.type,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
=> false,
};
}
/// Asserts that hasCodeGenBits() is true.
pub fn abiAlignment(self: Type, target: Target) u32 {
return switch (self.tag()) {
.u8,
.i8,
.bool,
.fn_noreturn_no_args, // represents machine code; not a pointer
.fn_naked_noreturn_no_args, // represents machine code; not a pointer
.fn_ccc_void_no_args, // represents machine code; not a pointer
.array_u8_sentinel_0,
=> return 1,
.isize,
.usize,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.single_const_pointer,
=> return @divExact(target.cpu.arch.ptrBitWidth(), 8),
.c_short => return @divExact(CType.short.sizeInBits(target), 8),
.c_ushort => return @divExact(CType.ushort.sizeInBits(target), 8),
.c_int => return @divExact(CType.int.sizeInBits(target), 8),
.c_uint => return @divExact(CType.uint.sizeInBits(target), 8),
.c_long => return @divExact(CType.long.sizeInBits(target), 8),
.c_ulong => return @divExact(CType.ulong.sizeInBits(target), 8),
.c_longlong => return @divExact(CType.longlong.sizeInBits(target), 8),
.c_ulonglong => return @divExact(CType.ulonglong.sizeInBits(target), 8),
.f16 => return 2,
.f32 => return 4,
.f64 => return 8,
.f128 => return 16,
.c_longdouble => return 16,
.anyerror => return 2, // TODO revisit this when we have the concept of the error tag type
.array => return self.cast(Payload.Array).?.elem_type.abiAlignment(target),
.int_signed, .int_unsigned => {
const bits: u16 = if (self.cast(Payload.IntSigned)) |pl|
pl.bits
else if (self.cast(Payload.IntUnsigned)) |pl|
pl.bits
else
unreachable;
return std.math.ceilPowerOfTwoPromote(u16, (bits + 7) / 8);
},
.c_void,
.void,
.type,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
=> unreachable,
};
}
2020-04-21 10:24:25 -07:00
pub fn isSinglePointer(self: Type) bool {
return switch (self.tag()) {
2020-04-28 18:04:18 -07:00
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
2020-04-21 10:24:25 -07:00
.array,
.array_u8_sentinel_0,
.const_slice_u8,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 10:24:25 -07:00
=> false,
2020-04-21 18:14:56 -07:00
.single_const_pointer,
.single_const_pointer_to_comptime_int,
=> true,
2020-04-21 10:24:25 -07:00
};
}
pub fn isSlice(self: Type) bool {
return switch (self.tag()) {
2020-04-28 18:04:18 -07:00
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
2020-04-21 10:24:25 -07:00
.array,
.array_u8_sentinel_0,
.single_const_pointer,
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 10:24:25 -07:00
=> false,
.const_slice_u8 => true,
};
}
/// Asserts the type is a pointer type.
pub fn pointerIsConst(self: Type) bool {
return switch (self.tag()) {
2020-04-28 18:04:18 -07:00
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
2020-04-21 10:24:25 -07:00
.array,
.array_u8_sentinel_0,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 10:24:25 -07:00
=> unreachable,
2020-04-21 18:14:56 -07:00
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
=> true,
2020-04-21 10:24:25 -07:00
};
}
/// Asserts the type is a pointer or array type.
pub fn elemType(self: Type) Type {
return switch (self.tag()) {
2020-04-28 18:04:18 -07:00
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-21 14:06:09 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 10:24:25 -07:00
=> unreachable,
.array => self.cast(Payload.Array).?.elem_type,
.single_const_pointer => self.cast(Payload.SingleConstPointer).?.pointee_type,
2020-04-21 18:14:56 -07:00
.array_u8_sentinel_0, .const_slice_u8 => Type.initTag(.u8),
.single_const_pointer_to_comptime_int => Type.initTag(.comptime_int),
};
}
2020-04-28 18:04:18 -07:00
/// Asserts the type is an array or vector.
2020-04-21 18:14:56 -07:00
pub fn arrayLen(self: Type) u64 {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-21 18:14:56 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-21 18:14:56 -07:00
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 18:14:56 -07:00
=> unreachable,
.array => self.cast(Payload.Array).?.len,
.array_u8_sentinel_0 => self.cast(Payload.Array_u8_Sentinel0).?.len,
2020-04-21 10:24:25 -07:00
};
}
/// Asserts the type is an array or vector.
pub fn arraySentinel(self: Type) ?Value {
return switch (self.tag()) {
.u8,
.i8,
.isize,
.usize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.c_longdouble,
.f16,
.f32,
.f64,
.f128,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
.fn_noreturn_no_args,
.fn_naked_noreturn_no_args,
.fn_ccc_void_no_args,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.const_slice_u8,
.int_unsigned,
.int_signed,
=> unreachable,
.array => return null,
.array_u8_sentinel_0 => return Value.initTag(.zero),
};
}
2020-04-28 18:04:18 -07:00
/// Returns true if and only if the type is a fixed-width, signed integer.
pub fn isSignedInt(self: Type) bool {
return switch (self.tag()) {
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.int_unsigned,
.u8,
.usize,
.c_ushort,
.c_uint,
.c_ulong,
.c_ulonglong,
=> false,
.int_signed,
.i8,
.isize,
.c_short,
.c_int,
.c_long,
.c_longlong,
=> true,
};
}
2020-04-21 16:48:59 -07:00
/// Asserts the type is a fixed-width integer.
pub fn intInfo(self: Type, target: Target) struct { signed: bool, bits: u16 } {
return switch (self.tag()) {
2020-04-28 18:04:18 -07:00
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-21 16:48:59 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-21 16:48:59 -07:00
.array,
.single_const_pointer,
2020-04-21 18:14:56 -07:00
.single_const_pointer_to_comptime_int,
2020-04-21 16:48:59 -07:00
.array_u8_sentinel_0,
.const_slice_u8,
=> unreachable,
2020-04-28 18:04:18 -07:00
.int_unsigned => .{ .signed = false, .bits = self.cast(Payload.IntUnsigned).?.bits },
.int_signed => .{ .signed = true, .bits = self.cast(Payload.IntSigned).?.bits },
.u8 => .{ .signed = false, .bits = 8 },
.i8 => .{ .signed = true, .bits = 8 },
.usize => .{ .signed = false, .bits = target.cpu.arch.ptrBitWidth() },
.isize => .{ .signed = true, .bits = target.cpu.arch.ptrBitWidth() },
.c_short => .{ .signed = true, .bits = CType.short.sizeInBits(target) },
.c_ushort => .{ .signed = false, .bits = CType.ushort.sizeInBits(target) },
.c_int => .{ .signed = true, .bits = CType.int.sizeInBits(target) },
.c_uint => .{ .signed = false, .bits = CType.uint.sizeInBits(target) },
.c_long => .{ .signed = true, .bits = CType.long.sizeInBits(target) },
.c_ulong => .{ .signed = false, .bits = CType.ulong.sizeInBits(target) },
.c_longlong => .{ .signed = true, .bits = CType.longlong.sizeInBits(target) },
.c_ulonglong => .{ .signed = false, .bits = CType.ulonglong.sizeInBits(target) },
};
}
pub fn isFloat(self: Type) bool {
return switch (self.tag()) {
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
=> true,
else => false,
};
}
/// Asserts the type is a fixed-size float.
pub fn floatBits(self: Type, target: Target) u16 {
return switch (self.tag()) {
.f16 => 16,
.f32 => 32,
.f64 => 64,
.f128 => 128,
.c_longdouble => CType.longdouble.sizeInBits(target),
else => unreachable,
2020-04-21 16:48:59 -07:00
};
}
2020-04-21 21:26:37 -07:00
/// Asserts the type is a function.
pub fn fnParamLen(self: Type) usize {
return switch (self.tag()) {
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args => 0,
2020-04-21 21:26:37 -07:00
.fn_naked_noreturn_no_args => 0,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args => 0,
2020-04-21 21:26:37 -07:00
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
2020-04-21 21:26:37 -07:00
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 21:26:37 -07:00
=> unreachable,
};
}
/// Asserts the type is a function. The length of the slice must be at least the length
/// given by `fnParamLen`.
pub fn fnParamTypes(self: Type, types: []Type) void {
switch (self.tag()) {
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args => return,
2020-04-21 21:26:37 -07:00
.fn_naked_noreturn_no_args => return,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args => return,
2020-04-21 21:26:37 -07:00
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
2020-04-21 21:26:37 -07:00
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 21:26:37 -07:00
=> unreachable,
}
}
/// Asserts the type is a function.
pub fn fnReturnType(self: Type) Type {
return switch (self.tag()) {
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args => Type.initTag(.noreturn),
2020-04-21 21:26:37 -07:00
.fn_naked_noreturn_no_args => Type.initTag(.noreturn),
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args => Type.initTag(.void),
2020-04-21 21:26:37 -07:00
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
2020-04-21 21:26:37 -07:00
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 21:26:37 -07:00
=> unreachable,
};
}
/// Asserts the type is a function.
pub fn fnCallingConvention(self: Type) std.builtin.CallingConvention {
return switch (self.tag()) {
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args => .Unspecified,
2020-04-21 21:26:37 -07:00
.fn_naked_noreturn_no_args => .Naked,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args => .C,
2020-04-21 21:26:37 -07:00
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
2020-04-21 21:26:37 -07:00
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
2020-04-21 21:26:37 -07:00
=> unreachable,
};
}
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
/// Asserts the type is a function.
pub fn fnIsVarArgs(self: Type) bool {
return switch (self.tag()) {
.fn_noreturn_no_args => false,
.fn_naked_noreturn_no_args => false,
.fn_ccc_void_no_args => false,
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.c_void,
.bool,
.void,
.type,
.anyerror,
.comptime_int,
.comptime_float,
.noreturn,
.@"null",
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.int_unsigned,
.int_signed,
=> unreachable,
};
}
2020-04-28 18:04:18 -07:00
pub fn isNumeric(self: Type) bool {
return switch (self.tag()) {
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.comptime_int,
.comptime_float,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.int_unsigned,
.int_signed,
=> true,
.c_void,
.bool,
.void,
.type,
.anyerror,
.noreturn,
.@"null",
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.array,
.single_const_pointer,
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
=> false,
};
}
pub fn onePossibleValue(self: Type) bool {
var ty = self;
while (true) switch (ty.tag()) {
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.comptime_int,
.comptime_float,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.bool,
.type,
.anyerror,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
=> return false,
.c_void,
.void,
.noreturn,
.@"null",
2020-04-28 18:04:18 -07:00
=> return true,
.int_unsigned => return ty.cast(Payload.IntUnsigned).?.bits == 0,
.int_signed => return ty.cast(Payload.IntSigned).?.bits == 0,
.array => {
const array = ty.cast(Payload.Array).?;
if (array.len == 0)
return true;
ty = array.elem_type;
continue;
},
.single_const_pointer => {
const ptr = ty.cast(Payload.SingleConstPointer).?;
ty = ptr.pointee_type;
continue;
},
};
}
pub fn isCPtr(self: Type) bool {
return switch (self.tag()) {
.f16,
.f32,
.f64,
.f128,
.c_longdouble,
.comptime_int,
.comptime_float,
.u8,
.i8,
.usize,
.isize,
.c_short,
.c_ushort,
.c_int,
.c_uint,
.c_long,
.c_ulong,
.c_longlong,
.c_ulonglong,
.bool,
.type,
.anyerror,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
.fn_noreturn_no_args,
2020-04-28 18:04:18 -07:00
.fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
.fn_ccc_void_no_args,
2020-04-28 18:04:18 -07:00
.single_const_pointer_to_comptime_int,
.array_u8_sentinel_0,
.const_slice_u8,
.c_void,
.void,
.noreturn,
.@"null",
2020-04-28 18:04:18 -07:00
.int_unsigned,
.int_signed,
.array,
.single_const_pointer,
=> return false,
};
}
2020-04-18 16:41:45 -07:00
/// This enum does not directly correspond to `std.builtin.TypeId` because
/// it has extra enum tags in it, as a way of using less memory. For example,
/// even though Zig recognizes `*align(10) i32` and `*i32` both as Pointer types
/// but with different alignment values, in this data structure they are represented
/// with different enum tags, because the the former requires more payload data than the latter.
/// See `zigTypeTag` for the function that corresponds to `std.builtin.TypeId`.
pub const Tag = enum {
// The first section of this enum are tags that require no payload.
2020-04-21 18:14:56 -07:00
u8,
i8,
isize,
usize,
c_short,
c_ushort,
c_int,
c_uint,
c_long,
c_ulong,
c_longlong,
c_ulonglong,
c_longdouble,
f16,
f32,
f64,
f128,
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
c_void,
2020-04-21 18:14:56 -07:00
bool,
void,
type,
anyerror,
comptime_int,
comptime_float,
noreturn,
@"null",
rework self-hosted compiler for incremental builds * introduce std.ArrayListUnmanaged for when you have the allocator stored elsewhere * move std.heap.ArenaAllocator implementation to its own file. extract the main state into std.heap.ArenaAllocator.State, which can be stored as an alternative to storing the entire ArenaAllocator, saving 24 bytes per ArenaAllocator on 64 bit targets. * std.LinkedList.Node pointer field now defaults to being null initialized. * Rework self-hosted compiler Package API * Delete almost all the bitrotted self-hosted compiler code. The only bit rotted code left is in main.zig and compilation.zig * Add call instruction to ZIR * self-hosted compiler ir API and link API are reworked to support a long-running compiler that incrementally updates declarations * Introduce the concept of scopes to ZIR semantic analysis * ZIR text format supports referencing named decls that are declared later in the file * Figure out how memory management works for the long-running compiler and incremental compilation. The main roots are top level declarations. There is a table of decls. The key is a cryptographic hash of the fully qualified decl name. Each decl has an arena allocator where all of the memory related to that decl is stored. Each code block has its own arena allocator for the lifetime of the block. Values that want to survive when going out of scope in a block must get copied into the outer block. Finally, values must get copied into the Decl arena to be long-lived. * Delete the unused MemoryCell struct. Instead, comptime pointers are based on references to Decl structs. * Figure out how caching works. Each Decl will store a set of other Decls which must be recompiled when it changes. This branch is still work-in-progress; this commit breaks the build.
2020-05-09 23:05:54 -07:00
fn_noreturn_no_args,
2020-04-21 14:06:09 -07:00
fn_naked_noreturn_no_args,
2020-04-28 18:40:51 -07:00
fn_ccc_void_no_args,
2020-04-21 18:14:56 -07:00
single_const_pointer_to_comptime_int,
2020-04-21 14:06:09 -07:00
const_slice_u8, // See last_no_payload_tag below.
2020-04-18 16:41:45 -07:00
// After this, the tag requires a payload.
array_u8_sentinel_0,
array,
single_const_pointer,
2020-04-28 18:04:18 -07:00
int_signed,
int_unsigned,
2020-04-19 17:04:11 -07:00
2020-04-21 14:06:09 -07:00
pub const last_no_payload_tag = Tag.const_slice_u8;
2020-04-19 17:04:11 -07:00
pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1;
2020-04-18 16:41:45 -07:00
};
pub const Payload = struct {
tag: Tag,
pub const Array_u8_Sentinel0 = struct {
base: Payload = Payload{ .tag = .array_u8_sentinel_0 },
len: u64,
};
2018-07-13 18:56:38 -07:00
2020-04-18 16:41:45 -07:00
pub const Array = struct {
base: Payload = Payload{ .tag = .array },
2020-04-18 16:41:45 -07:00
elem_type: Type,
len: u64,
};
2020-04-18 16:41:45 -07:00
pub const SingleConstPointer = struct {
base: Payload = Payload{ .tag = .single_const_pointer },
2020-04-18 16:41:45 -07:00
pointee_type: Type,
};
2020-04-28 18:04:18 -07:00
pub const IntSigned = struct {
base: Payload = Payload{ .tag = .int_signed },
bits: u16,
};
pub const IntUnsigned = struct {
base: Payload = Payload{ .tag = .int_unsigned },
bits: u16,
};
};
};
2020-04-21 16:48:59 -07:00
2020-04-28 18:04:18 -07:00
pub const CType = enum {
2020-04-21 16:48:59 -07:00
short,
ushort,
int,
uint,
long,
ulong,
longlong,
ulonglong,
2020-04-28 18:04:18 -07:00
longdouble,
2020-04-21 16:48:59 -07:00
2020-04-28 18:04:18 -07:00
pub fn sizeInBits(self: CType, target: Target) u16 {
2020-04-21 16:48:59 -07:00
const arch = target.cpu.arch;
switch (target.os.tag) {
.freestanding, .other => switch (target.cpu.arch) {
.msp430 => switch (self) {
.short,
.ushort,
.int,
.uint,
=> return 16,
.long,
.ulong,
=> return 32,
.longlong,
.ulonglong,
=> return 64,
2020-04-28 18:04:18 -07:00
.longdouble => @panic("TODO figure out what kind of float `long double` is on this target"),
2020-04-21 16:48:59 -07:00
},
else => switch (self) {
.short,
.ushort,
=> return 16,
.int,
.uint,
=> return 32,
.long,
.ulong,
=> return target.cpu.arch.ptrBitWidth(),
.longlong,
.ulonglong,
=> return 64,
2020-04-28 18:04:18 -07:00
.longdouble => @panic("TODO figure out what kind of float `long double` is on this target"),
2020-04-21 16:48:59 -07:00
},
},
.linux,
.macosx,
.freebsd,
.netbsd,
.dragonfly,
.openbsd,
.wasi,
.emscripten,
=> switch (self) {
.short,
.ushort,
=> return 16,
.int,
.uint,
=> return 32,
.long,
.ulong,
=> return target.cpu.arch.ptrBitWidth(),
.longlong,
.ulonglong,
=> return 64,
2020-04-28 18:04:18 -07:00
.longdouble => @panic("TODO figure out what kind of float `long double` is on this target"),
2020-04-21 16:48:59 -07:00
},
.windows, .uefi => switch (self) {
.short,
.ushort,
=> return 16,
.int,
.uint,
.long,
.ulong,
=> return 32,
.longlong,
.ulonglong,
=> return 64,
2020-04-28 18:04:18 -07:00
.longdouble => @panic("TODO figure out what kind of float `long double` is on this target"),
2020-04-21 16:48:59 -07:00
},
.ios => switch (self) {
.short,
.ushort,
=> return 16,
.int,
.uint,
=> return 32,
.long,
.ulong,
.longlong,
.ulonglong,
=> return 64,
2020-04-28 18:04:18 -07:00
.longdouble => @panic("TODO figure out what kind of float `long double` is on this target"),
2020-04-21 16:48:59 -07:00
},
.ananas,
.cloudabi,
.fuchsia,
.kfreebsd,
.lv2,
.solaris,
.haiku,
.minix,
.rtems,
.nacl,
.cnk,
.aix,
.cuda,
.nvcl,
.amdhsa,
.ps4,
.elfiamcu,
.tvos,
.watchos,
.mesa3d,
.contiki,
.amdpal,
.hermit,
.hurd,
2020-04-28 18:04:18 -07:00
=> @panic("TODO specify the C integer and float type sizes for this OS"),
2020-04-21 16:48:59 -07:00
}
}
};