Merge remote-tracking branch 'origin/master' into null-terminated-pointers

master
Andrew Kelley 2019-11-24 20:28:46 -05:00
commit 34b1ebefaa
No known key found for this signature in database
GPG Key ID: 7C5F548F728501A9
33 changed files with 1571 additions and 1619 deletions

View File

@ -54,6 +54,7 @@ pub fn build(b: *Builder) !void {
var test_stage2 = b.addTest("src-self-hosted/test.zig");
test_stage2.setBuildMode(builtin.Mode.Debug);
test_stage2.addPackagePath("stage2_tests", "test/stage2/test.zig");
const fmt_build_zig = b.addFmt([_][]const u8{"build.zig"});
@ -72,9 +73,9 @@ pub fn build(b: *Builder) !void {
const skip_non_native = b.option(bool, "skip-non-native", "Main test suite skips non-native builds") orelse false;
const skip_libc = b.option(bool, "skip-libc", "Main test suite skips tests that link libc") orelse false;
const skip_self_hosted = b.option(bool, "skip-self-hosted", "Main test suite skips building self hosted compiler") orelse false;
if (!skip_self_hosted) {
// TODO re-enable this after https://github.com/ziglang/zig/issues/2377
//test_step.dependOn(&exe.step);
if (!skip_self_hosted and builtin.os == .linux) {
// TODO evented I/O other OS's
test_step.dependOn(&exe.step);
}
const only_install_lib_files = b.option(bool, "lib-files-only", "Only install library files") orelse false;
@ -98,11 +99,7 @@ pub fn build(b: *Builder) !void {
const test_stage2_step = b.step("test-stage2", "Run the stage2 compiler tests");
test_stage2_step.dependOn(&test_stage2.step);
// TODO see https://github.com/ziglang/zig/issues/1364
if (false) {
test_step.dependOn(test_stage2_step);
}
test_step.dependOn(test_stage2_step);
var chosen_modes: [4]builtin.Mode = undefined;
var chosen_mode_index: usize = 0;
@ -235,6 +232,9 @@ fn findLLVM(b: *Builder, llvm_config_exe: []const u8) !LibraryDep {
if (fs.path.isAbsolute(lib_arg)) {
try result.libs.append(lib_arg);
} else {
if (mem.endsWith(u8, lib_arg, ".lib")) {
lib_arg = lib_arg[0 .. lib_arg.len - 4];
}
try result.system_libs.append(lib_arg);
}
}

View File

@ -63,7 +63,7 @@ pub const ChildProcess = struct {
/// Windows-only. `cwd` was provided, but the path did not exist when spawning the child process.
CurrentWorkingDirectoryUnlinked,
} || os.ExecveError || os.SetIdError || os.ChangeCurDirError || windows.CreateProcessError;
} || os.ExecveError || os.SetIdError || os.ChangeCurDirError || windows.CreateProcessError || windows.WaitForSingleObjectError;
pub const Term = union(enum) {
Exited: u32,
@ -259,7 +259,9 @@ pub const ChildProcess = struct {
}
fn handleWaitResult(self: *ChildProcess, status: u32) void {
self.term = self.cleanupAfterWait(status);
// TODO https://github.com/ziglang/zig/issues/3190
var term = self.cleanupAfterWait(status);
self.term = term;
}
fn cleanupStreams(self: *ChildProcess) void {

View File

@ -179,7 +179,7 @@ pub const Coff = struct {
if (byte != 0 and i == buffer.len)
return error.NameTooLong;
return i;
return @as(usize, i);
}
pub fn loadSections(self: *Coff) !void {

View File

@ -26,7 +26,7 @@ pub fn Future(comptime T: type) type {
pub fn init() Self {
return Self{
.lock = Lock.initLocked(),
.available = 0,
.available = .NotStarted,
.data = undefined,
};
}

View File

@ -66,11 +66,10 @@ pub fn Group(comptime ReturnType: type) type {
node.* = AllocStack.Node{
.next = undefined,
.data = Node{
.handle = frame,
.bytes = @sliceToBytes((*[1]@Frame(func))(frame)[0..]),
.handle = @asyncCall(frame, {}, func, args),
.bytes = std.mem.asBytes(frame),
},
};
frame.* = async func(args);
self.alloc_stack.push(node);
}

View File

@ -9,27 +9,77 @@ const debug = std.debug;
const assert = debug.assert;
const testing = std.testing;
pub fn FixedSizeFifo(comptime T: type) type {
pub const LinearFifoBufferType = union(enum) {
/// The buffer is internal to the fifo; it is of the specified size.
Static: usize,
/// The buffer is passed as a slice to the initialiser.
Slice,
/// The buffer is managed dynamically using a `mem.Allocator`.
Dynamic,
};
pub fn LinearFifo(
comptime T: type,
comptime buffer_type: LinearFifoBufferType,
) type {
const autoalign = false;
const powers_of_two = switch (buffer_type) {
.Static => std.math.isPowerOfTwo(buffer_type.Static),
.Slice => false, // Any size slice could be passed in
.Dynamic => true, // This could be configurable in future
};
return struct {
allocator: *Allocator,
buf: []u8,
allocator: if (buffer_type == .Dynamic) *Allocator else void,
buf: if (buffer_type == .Static) [buffer_type.Static]T else []T,
head: usize,
count: usize,
const Self = @This();
pub fn init(allocator: *Allocator) Self {
return Self{
.allocator = allocator,
.buf = [_]T{},
.head = 0,
.count = 0,
};
}
// Type of Self argument for slice operations.
// If buffer is inline (Static) then we need to ensure we haven't
// returned a slice into a copy on the stack
const SliceSelfArg = if (buffer_type == .Static) *Self else Self;
pub fn deinit(self: *Self) void {
self.allocator.free(self.buf);
self.* = undefined;
pub usingnamespace switch (buffer_type) {
.Static => struct {
pub fn init() Self {
return .{
.allocator = {},
.buf = undefined,
.head = 0,
.count = 0,
};
}
},
.Slice => struct {
pub fn init(buf: []T) Self {
return .{
.allocator = {},
.buf = buf,
.head = 0,
.count = 0,
};
}
},
.Dynamic => struct {
pub fn init(allocator: *Allocator) Self {
return .{
.allocator = allocator,
.buf = [_]T{},
.head = 0,
.count = 0,
};
}
},
};
pub fn deinit(self: Self) void {
if (buffer_type == .Dynamic) self.allocator.free(self.buf);
}
pub fn realign(self: *Self) void {
@ -59,18 +109,24 @@ pub fn FixedSizeFifo(comptime T: type) type {
/// Reduce allocated capacity to `size`.
pub fn shrink(self: *Self, size: usize) void {
assert(size >= self.count);
self.realign();
self.buf = self.allocator.realloc(self.buf, size) catch |e| switch (e) {
error.OutOfMemory => return, // no problem, capacity is still correct then.
};
if (buffer_type == .Dynamic) {
self.realign();
self.buf = self.allocator.realloc(self.buf, size) catch |e| switch (e) {
error.OutOfMemory => return, // no problem, capacity is still correct then.
};
}
}
/// Ensure that the buffer can fit at least `size` items
pub fn ensureCapacity(self: *Self, size: usize) error{OutOfMemory}!void {
pub fn ensureCapacity(self: *Self, size: usize) !void {
if (self.buf.len >= size) return;
self.realign();
const new_size = math.ceilPowerOfTwo(usize, size) catch return error.OutOfMemory;
self.buf = try self.allocator.realloc(self.buf, new_size);
if (buffer_type == .Dynamic) {
self.realign();
const new_size = if (powers_of_two) math.ceilPowerOfTwo(usize, size) catch return error.OutOfMemory else size;
self.buf = try self.allocator.realloc(self.buf, new_size);
} else {
return error.OutOfMemory;
}
}
/// Makes sure at least `size` items are unused
@ -86,29 +142,24 @@ pub fn FixedSizeFifo(comptime T: type) type {
}
/// Returns a writable slice from the 'read' end of the fifo
fn readableSliceMut(self: Self, offset: usize) []T {
fn readableSliceMut(self: SliceSelfArg, offset: usize) []T {
if (offset > self.count) return [_]T{};
const start = self.head + offset;
var start = self.head + offset;
if (start >= self.buf.len) {
return self.buf[start - self.buf.len ..][0 .. self.count - offset];
start -= self.buf.len;
return self.buf[start..self.count - offset];
} else {
const end: usize = self.head + self.count;
if (end >= self.buf.len) {
return self.buf[start..self.buf.len];
} else {
return self.buf[start..end];
}
const end = math.min(self.head + self.count, self.buf.len);
return self.buf[start..end];
}
}
/// Returns a readable slice from `offset`
pub fn readableSlice(self: Self, offset: usize) []const T {
pub fn readableSlice(self: SliceSelfArg, offset: usize) []const T {
return self.readableSliceMut(offset);
}
const autoalign = false;
/// Discard first `count` bytes of readable data
pub fn discard(self: *Self, count: usize) void {
assert(count <= self.count);
@ -124,10 +175,19 @@ pub fn FixedSizeFifo(comptime T: type) type {
@memset(unused2.ptr, undefined, unused2.len);
}
}
self.head = (self.head + count) % self.buf.len;
self.count -= count;
if (autoalign and self.count == 0)
if (autoalign and self.count == count) {
self.head = 0;
self.count = 0;
} else {
var head = self.head + count;
if (powers_of_two) {
head &= self.buf.len - 1;
} else {
head %= self.buf.len;
}
self.head = head;
self.count -= count;
}
}
/// Read the next item from the fifo
@ -139,8 +199,8 @@ pub fn FixedSizeFifo(comptime T: type) type {
return c;
}
/// Read data from the fifo into `dst`, returns slice of bytes copied (subslice of `dst`)
pub fn read(self: *Self, dst: []T) []T {
/// Read data from the fifo into `dst`, returns number of bytes copied.
pub fn read(self: *Self, dst: []T) usize {
var dst_left = dst;
while (dst_left.len > 0) {
@ -152,7 +212,7 @@ pub fn FixedSizeFifo(comptime T: type) type {
dst_left = dst_left[n..];
}
return dst[0 .. dst.len - dst_left.len];
return dst.len - dst_left.len;
}
/// Returns number of bytes available in fifo
@ -162,7 +222,7 @@ pub fn FixedSizeFifo(comptime T: type) type {
/// Returns the first section of writable buffer
/// Note that this may be of length 0
pub fn writableSlice(self: Self, offset: usize) []T {
pub fn writableSlice(self: SliceSelfArg, offset: usize) []T {
if (offset > self.buf.len) return [_]T{};
const tail = self.head + offset + self.count;
@ -193,7 +253,8 @@ pub fn FixedSizeFifo(comptime T: type) type {
self.count += count;
}
/// Appends the data in `src` to the fifo. You must
/// Appends the data in `src` to the fifo.
/// You must have ensured there is enough space.
pub fn writeAssumeCapacity(self: *Self, src: []const T) void {
assert(self.writableLength() >= src.len);
@ -208,6 +269,20 @@ pub fn FixedSizeFifo(comptime T: type) type {
}
}
/// Write a single item to the fifo
pub fn writeItem(self: *Self, item: T) !void {
try self.ensureUnusedCapacity(1);
var tail = self.head + self.count;
if (powers_of_two) {
tail &= self.buf.len - 1;
} else {
tail %= self.buf.len;
}
self.buf[tail] = byte;
self.update(1);
}
/// Appends the data in `src` to the fifo.
/// Allocates more memory as necessary
pub fn write(self: *Self, src: []const T) !void {
@ -216,16 +291,27 @@ pub fn FixedSizeFifo(comptime T: type) type {
return self.writeAssumeCapacity(src);
}
pub fn print(self: *Self, comptime format: []const u8, args: ...) !void {
return std.fmt.format(self, error{OutOfMemory}, Self.write, format, args);
}
pub usingnamespace if (T == u8)
struct {
pub fn print(self: *Self, comptime format: []const u8, args: ...) !void {
return std.fmt.format(self, error{OutOfMemory}, Self.write, format, args);
}
}
else
struct {};
/// Make `count` bytes available before the current read location
fn rewind(self: *Self, size: usize) void {
assert(self.writableLength() >= size);
/// Make `count` items available before the current read location
fn rewind(self: *Self, count: usize) void {
assert(self.writableLength() >= count);
self.head = (self.head + (self.buf.len - size)) % self.buf.len;
self.count += size;
var head = self.head + (self.buf.len - count);
if (powers_of_two) {
head &= self.buf.len - 1;
} else {
head %= self.buf.len;
}
self.head = head;
self.count += count;
}
/// Place data back into the read stream
@ -235,9 +321,13 @@ pub fn FixedSizeFifo(comptime T: type) type {
self.rewind(src.len);
const slice = self.readableSliceMut(0);
mem.copy(T, slice, src[0..slice.len]);
const slice2 = self.readableSliceMut(slice.len);
mem.copy(T, slice2, src[slice.len..]);
if (src.len < slice.len) {
mem.copy(T, slice, src);
} else {
mem.copy(T, slice, src[0..slice.len]);
const slice2 = self.readableSliceMut(slice.len);
mem.copy(T, slice2, src[slice.len..]);
}
}
/// Peek at the item at `offset`
@ -245,15 +335,19 @@ pub fn FixedSizeFifo(comptime T: type) type {
if (offset >= self.count)
return error.EndOfStream;
return self.buf[(self.head + offset) % self.buf.len];
var index = self.head + offset;
if (powers_of_two) {
index &= self.buf.len - 1;
} else {
index %= self.buf.len;
}
return self.buf[index];
}
};
}
const ByteFifo = FixedSizeFifo(u8);
test "ByteFifo" {
var fifo = ByteFifo.init(debug.global_allocator);
test "LinearFifo(u8, .Dynamic)" {
var fifo = LinearFifo(u8, .Dynamic).init(debug.global_allocator);
defer fifo.deinit();
try fifo.write("HELLO");
@ -304,7 +398,10 @@ test "ByteFifo" {
{
try fifo.unget("prependedstring");
var result: [30]u8 = undefined;
testing.expectEqualSlices(u8, "prependedstringabcdefghij", fifo.read(&result));
testing.expectEqualSlices(u8, "prependedstringabcdefghij", result[0..fifo.read(&result)]);
try fifo.unget("b");
try fifo.unget("a");
testing.expectEqualSlices(u8, "ab", result[0..fifo.read(&result)]);
}
fifo.shrink(0);
@ -312,7 +409,33 @@ test "ByteFifo" {
{
try fifo.print("{}, {}!", "Hello", "World");
var result: [30]u8 = undefined;
testing.expectEqualSlices(u8, "Hello, World!", fifo.read(&result));
testing.expectEqualSlices(u8, "Hello, World!", result[0..fifo.read(&result)]);
testing.expectEqual(@as(usize, 0), fifo.readableLength());
}
}
test "LinearFifo" {
inline for ([_]type{ u1, u8, u16, u64 }) |T| {
inline for ([_]LinearFifoBufferType{ LinearFifoBufferType{ .Static = 32 }, .Slice, .Dynamic }) |bt| {
const FifoType = LinearFifo(T, bt);
var buf: if (bt == .Slice) [32]T else void = undefined;
var fifo = switch (bt) {
.Static => FifoType.init(),
.Slice => FifoType.init(buf[0..]),
.Dynamic => FifoType.init(debug.global_allocator),
};
defer fifo.deinit();
try fifo.write([_]T{ 0, 1, 1, 0, 1 });
testing.expectEqual(@as(usize, 5), fifo.readableLength());
{
testing.expectEqual(@as(T, 0), try fifo.readItem());
testing.expectEqual(@as(T, 1), try fifo.readItem());
testing.expectEqual(@as(T, 1), try fifo.readItem());
testing.expectEqual(@as(T, 0), try fifo.readItem());
testing.expectEqual(@as(T, 1), try fifo.readItem());
}
}
}
}

View File

@ -1,5 +1,6 @@
// This file is included in the compilation unit when exporting a DLL on windows.
const root = @import("root");
const std = @import("std");
const builtin = @import("builtin");
@ -12,5 +13,9 @@ stdcallcc fn _DllMainCRTStartup(
fdwReason: std.os.windows.DWORD,
lpReserved: std.os.windows.LPVOID,
) std.os.windows.BOOL {
if (@hasDecl(root, "DllMain")) {
return root.DllMain(hinstDLL, fdwReason, lpReserved);
}
return std.os.windows.TRUE;
}

View File

@ -119,9 +119,9 @@ pub const Args = struct {
// MergeN creation disallows 0 length flag entry (doesn't make sense)
switch (flag_args) {
FlagArg.None => unreachable,
FlagArg.Single => |inner| try prev.append(inner),
FlagArg.Many => |inner| try prev.appendSlice(inner.toSliceConst()),
.None => unreachable,
.Single => |inner| try prev.append(inner),
.Many => |inner| try prev.appendSlice(inner.toSliceConst()),
}
_ = try parsed.flags.put(flag_name_trimmed, FlagArg{ .Many = prev });
@ -158,7 +158,7 @@ pub const Args = struct {
pub fn single(self: *Args, name: []const u8) ?[]const u8 {
if (self.flags.get(name)) |entry| {
switch (entry.value) {
FlagArg.Single => |inner| {
.Single => |inner| {
return inner;
},
else => @panic("attempted to retrieve flag with wrong type"),
@ -172,7 +172,7 @@ pub const Args = struct {
pub fn many(self: *Args, name: []const u8) []const []const u8 {
if (self.flags.get(name)) |entry| {
switch (entry.value) {
FlagArg.Many => |inner| {
.Many => |inner| {
return inner.toSliceConst();
},
else => @panic("attempted to retrieve flag with wrong type"),

View File

@ -1,3 +1,5 @@
const Target = @import("std").Target;
pub const CInt = struct {
id: Id,
zig_name: []const u8,
@ -17,52 +19,152 @@ pub const CInt = struct {
pub const list = [_]CInt{
CInt{
.id = Id.Short,
.id = .Short,
.zig_name = "c_short",
.c_name = "short",
.is_signed = true,
},
CInt{
.id = Id.UShort,
.id = .UShort,
.zig_name = "c_ushort",
.c_name = "unsigned short",
.is_signed = false,
},
CInt{
.id = Id.Int,
.id = .Int,
.zig_name = "c_int",
.c_name = "int",
.is_signed = true,
},
CInt{
.id = Id.UInt,
.id = .UInt,
.zig_name = "c_uint",
.c_name = "unsigned int",
.is_signed = false,
},
CInt{
.id = Id.Long,
.id = .Long,
.zig_name = "c_long",
.c_name = "long",
.is_signed = true,
},
CInt{
.id = Id.ULong,
.id = .ULong,
.zig_name = "c_ulong",
.c_name = "unsigned long",
.is_signed = false,
},
CInt{
.id = Id.LongLong,
.id = .LongLong,
.zig_name = "c_longlong",
.c_name = "long long",
.is_signed = true,
},
CInt{
.id = Id.ULongLong,
.id = .ULongLong,
.zig_name = "c_ulonglong",
.c_name = "unsigned long long",
.is_signed = false,
},
};
pub fn sizeInBits(cint: CInt, self: Target) u32 {
const arch = self.getArch();
switch (self.getOs()) {
.freestanding => switch (self.getArch()) {
.msp430 => switch (cint.id) {
.Short,
.UShort,
.Int,
.UInt,
=> return 16,
.Long,
.ULong,
=> return 32,
.LongLong,
.ULongLong,
=> return 64,
},
else => switch (cint.id) {
.Short,
.UShort,
=> return 16,
.Int,
.UInt,
=> return 32,
.Long,
.ULong,
=> return self.getArchPtrBitWidth(),
.LongLong,
.ULongLong,
=> return 64,
},
},
.linux,
.macosx,
.freebsd,
.openbsd,
.zen,
=> switch (cint.id) {
.Short,
.UShort,
=> return 16,
.Int,
.UInt,
=> return 32,
.Long,
.ULong,
=> return self.getArchPtrBitWidth(),
.LongLong,
.ULongLong,
=> return 64,
},
.windows, .uefi => switch (cint.id) {
.Short,
.UShort,
=> return 16,
.Int,
.UInt,
=> return 32,
.Long,
.ULong,
.LongLong,
.ULongLong,
=> return 64,
},
.ananas,
.cloudabi,
.dragonfly,
.fuchsia,
.ios,
.kfreebsd,
.lv2,
.netbsd,
.solaris,
.haiku,
.minix,
.rtems,
.nacl,
.cnk,
.aix,
.cuda,
.nvcl,
.amdhsa,
.ps4,
.elfiamcu,
.tvos,
.watchos,
.mesa3d,
.contiki,
.amdpal,
.hermit,
.hurd,
.wasi,
.emscripten,
=> @panic("TODO specify the C integer type sizes for this OS"),
}
}
};

View File

@ -1,5 +1,4 @@
const std = @import("std");
const builtin = @import("builtin");
const Compilation = @import("compilation.zig").Compilation;
const llvm = @import("llvm.zig");
const c = @import("c.zig");
@ -7,17 +6,18 @@ const ir = @import("ir.zig");
const Value = @import("value.zig").Value;
const Type = @import("type.zig").Type;
const Scope = @import("scope.zig").Scope;
const util = @import("util.zig");
const event = std.event;
const assert = std.debug.assert;
const DW = std.dwarf;
const maxInt = std.math.maxInt;
pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code) !void {
pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code) Compilation.BuildError!void {
fn_val.base.ref();
defer fn_val.base.deref(comp);
defer code.destroy(comp.gpa());
var output_path = try await (async comp.createRandomOutputPath(comp.target.objFileExt()) catch unreachable);
var output_path = try comp.createRandomOutputPath(comp.target.oFileExt());
errdefer output_path.deinit();
const llvm_handle = try comp.zig_compiler.getAnyLlvmContext();
@ -31,7 +31,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
llvm.SetTarget(module, comp.llvm_triple.ptr());
llvm.SetDataLayout(module, comp.target_layout_str);
if (comp.target.getObjectFormat() == builtin.ObjectFormat.coff) {
if (util.getObjectFormat(comp.target) == .coff) {
llvm.AddModuleCodeViewFlag(module);
} else {
llvm.AddModuleDebugInfoFlag(module);
@ -59,7 +59,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
comp.name.ptr(),
comp.root_package.root_src_dir.ptr(),
) orelse return error.OutOfMemory;
const is_optimized = comp.build_mode != builtin.Mode.Debug;
const is_optimized = comp.build_mode != .Debug;
const compile_unit = llvm.CreateCompileUnit(
dibuilder,
DW.LANG_C99,
@ -79,7 +79,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
.builder = builder,
.dibuilder = dibuilder,
.context = context,
.lock = event.Lock.init(comp.loop),
.lock = event.Lock.init(),
.arena = &code.arena.allocator,
};
@ -105,8 +105,8 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
assert(comp.emit_file_type == Compilation.Emit.Binary); // TODO support other types
const is_small = comp.build_mode == builtin.Mode.ReleaseSmall;
const is_debug = comp.build_mode == builtin.Mode.Debug;
const is_small = comp.build_mode == .ReleaseSmall;
const is_debug = comp.build_mode == .Debug;
var err_msg: [*]u8 = undefined;
// TODO integrate this with evented I/O
@ -234,8 +234,8 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
// create debug variable declarations for variables and allocate all local variables
for (var_list) |var_scope, i| {
const var_type = switch (var_scope.data) {
Scope.Var.Data.Const => unreachable,
Scope.Var.Data.Param => |param| param.typ,
.Const => unreachable,
.Param => |param| param.typ,
};
// if (!type_has_bits(var->value->type)) {
// continue;
@ -266,7 +266,7 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
var_scope.data.Param.llvm_value = llvm.GetParam(llvm_fn, @intCast(c_uint, i));
} else {
// gen_type = var->value->type;
var_scope.data.Param.llvm_value = try renderAlloca(ofile, var_type, var_scope.name, Type.Pointer.Align.Abi);
var_scope.data.Param.llvm_value = try renderAlloca(ofile, var_type, var_scope.name, .Abi);
}
// if (var->decl_node) {
// var->di_loc_var = ZigLLVMCreateParameterVariable(g->dbuilder, get_di_scope(g, var->parent_scope),
@ -300,8 +300,8 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
ofile,
llvm_param,
scope_var.data.Param.llvm_value,
Type.Pointer.Align.Abi,
Type.Pointer.Vol.Non,
.Abi,
.Non,
);
}
@ -383,8 +383,8 @@ fn renderLoadUntyped(
) !*llvm.Value {
const result = llvm.BuildLoad(ofile.builder, ptr, name) orelse return error.OutOfMemory;
switch (vol) {
Type.Pointer.Vol.Non => {},
Type.Pointer.Vol.Volatile => llvm.SetVolatile(result, 1),
.Non => {},
.Volatile => llvm.SetVolatile(result, 1),
}
llvm.SetAlignment(result, resolveAlign(ofile, alignment, llvm.GetElementType(llvm.TypeOf(ptr))));
return result;
@ -414,8 +414,8 @@ pub fn renderStoreUntyped(
) !*llvm.Value {
const result = llvm.BuildStore(ofile.builder, value, ptr) orelse return error.OutOfMemory;
switch (vol) {
Type.Pointer.Vol.Non => {},
Type.Pointer.Vol.Volatile => llvm.SetVolatile(result, 1),
.Non => {},
.Volatile => llvm.SetVolatile(result, 1),
}
llvm.SetAlignment(result, resolveAlign(ofile, alignment, llvm.TypeOf(value)));
return result;
@ -445,7 +445,7 @@ pub fn renderAlloca(
pub fn resolveAlign(ofile: *ObjectFile, alignment: Type.Pointer.Align, llvm_type: *llvm.Type) u32 {
return switch (alignment) {
Type.Pointer.Align.Abi => return llvm.ABIAlignmentOfType(ofile.comp.target_data_ref, llvm_type),
Type.Pointer.Align.Override => |a| a,
.Abi => return llvm.ABIAlignmentOfType(ofile.comp.target_data_ref, llvm_type),
.Override => |a| a,
};
}

View File

@ -5,8 +5,8 @@ const Allocator = mem.Allocator;
const Buffer = std.Buffer;
const llvm = @import("llvm.zig");
const c = @import("c.zig");
const builtin = @import("builtin");
const Target = @import("target.zig").Target;
const builtin = std.builtin;
const Target = std.Target;
const warn = std.debug.warn;
const Token = std.zig.Token;
const ArrayList = std.ArrayList;
@ -30,14 +30,15 @@ const link = @import("link.zig").link;
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
const CInt = @import("c_int.zig").CInt;
const fs = event.fs;
const util = @import("util.zig");
const max_src_size = 2 * 1024 * 1024 * 1024; // 2 GiB
/// Data that is local to the event loop.
pub const ZigCompiler = struct {
loop: *event.Loop,
llvm_handle_pool: std.atomic.Stack(*llvm.Context),
lld_lock: event.Lock,
allocator: *Allocator,
/// TODO pool these so that it doesn't have to lock
prng: event.Locked(std.rand.DefaultPrng),
@ -46,9 +47,9 @@ pub const ZigCompiler = struct {
var lazy_init_targets = std.lazyInit(void);
pub fn init(loop: *event.Loop) !ZigCompiler {
pub fn init(allocator: *Allocator) !ZigCompiler {
lazy_init_targets.get() orelse {
Target.initializeAll();
util.initializeAllTargets();
lazy_init_targets.resolve();
};
@ -57,11 +58,11 @@ pub const ZigCompiler = struct {
const seed = mem.readIntNative(u64, &seed_bytes);
return ZigCompiler{
.loop = loop,
.lld_lock = event.Lock.init(loop),
.allocator = allocator,
.lld_lock = event.Lock.init(),
.llvm_handle_pool = std.atomic.Stack(*llvm.Context).init(),
.prng = event.Locked(std.rand.DefaultPrng).init(loop, std.rand.DefaultPrng.init(seed)),
.native_libc = event.Future(LibCInstallation).init(loop),
.prng = event.Locked(std.rand.DefaultPrng).init(std.rand.DefaultPrng.init(seed)),
.native_libc = event.Future(LibCInstallation).init(),
};
}
@ -70,7 +71,7 @@ pub const ZigCompiler = struct {
self.lld_lock.deinit();
while (self.llvm_handle_pool.pop()) |node| {
llvm.ContextDispose(node.data);
self.loop.allocator.destroy(node);
self.allocator.destroy(node);
}
}
@ -82,19 +83,19 @@ pub const ZigCompiler = struct {
const context_ref = llvm.ContextCreate() orelse return error.OutOfMemory;
errdefer llvm.ContextDispose(context_ref);
const node = try self.loop.allocator.create(std.atomic.Stack(*llvm.Context).Node);
const node = try self.allocator.create(std.atomic.Stack(*llvm.Context).Node);
node.* = std.atomic.Stack(*llvm.Context).Node{
.next = undefined,
.data = context_ref,
};
errdefer self.loop.allocator.destroy(node);
errdefer self.allocator.destroy(node);
return LlvmHandle{ .node = node };
}
pub async fn getNativeLibC(self: *ZigCompiler) !*LibCInstallation {
if (await (async self.native_libc.start() catch unreachable)) |ptr| return ptr;
try await (async self.native_libc.data.findNative(self.loop) catch unreachable);
if (self.native_libc.start()) |ptr| return ptr;
try self.native_libc.data.findNative(self.allocator);
self.native_libc.resolve();
return &self.native_libc.data;
}
@ -122,7 +123,6 @@ pub const LlvmHandle = struct {
pub const Compilation = struct {
zig_compiler: *ZigCompiler,
loop: *event.Loop,
name: Buffer,
llvm_triple: Buffer,
root_src_path: ?[]const u8,
@ -227,8 +227,8 @@ pub const Compilation = struct {
/// need to wait on this group before deinitializing
deinit_group: event.Group(void),
destroy_handle: promise,
main_loop_handle: promise,
// destroy_frame: @Frame(createAsync),
// main_loop_frame: @Frame(Compilation.mainLoop),
main_loop_future: event.Future(void),
have_err_ret_tracing: bool,
@ -243,7 +243,7 @@ pub const Compilation = struct {
c_int_types: [CInt.list.len]*Type.Int,
fs_watch: *fs.Watch(*Scope.Root),
// fs_watch: *fs.Watch(*Scope.Root),
const IntTypeTable = std.HashMap(*const Type.Int.Key, *Type.Int, Type.Int.Key.hash, Type.Int.Key.eql);
const ArrayTypeTable = std.HashMap(*const Type.Array.Key, *Type.Array, Type.Array.Key.hash, Type.Array.Key.eql);
@ -348,7 +348,7 @@ pub const Compilation = struct {
zig_lib_dir: []const u8,
) !*Compilation {
var optional_comp: ?*Compilation = null;
const handle = try async<zig_compiler.loop.allocator> createAsync(
var frame = async createAsync(
&optional_comp,
zig_compiler,
name,
@ -359,10 +359,7 @@ pub const Compilation = struct {
is_static,
zig_lib_dir,
);
return optional_comp orelse if (getAwaitResult(
zig_compiler.loop.allocator,
handle,
)) |_| unreachable else |err| err;
return optional_comp orelse if (await frame) |_| unreachable else |err| err;
}
async fn createAsync(
@ -376,15 +373,9 @@ pub const Compilation = struct {
is_static: bool,
zig_lib_dir: []const u8,
) !void {
// workaround for https://github.com/ziglang/zig/issues/1194
suspend {
resume @handle();
}
const loop = zig_compiler.loop;
const allocator = zig_compiler.allocator;
var comp = Compilation{
.loop = loop,
.arena_allocator = std.heap.ArenaAllocator.init(loop.allocator),
.arena_allocator = std.heap.ArenaAllocator.init(allocator),
.zig_compiler = zig_compiler,
.events = undefined,
.root_src_path = root_src_path,
@ -394,10 +385,10 @@ pub const Compilation = struct {
.build_mode = build_mode,
.zig_lib_dir = zig_lib_dir,
.zig_std_dir = undefined,
.tmp_dir = event.Future(BuildError![]u8).init(loop),
.destroy_handle = @handle(),
.main_loop_handle = undefined,
.main_loop_future = event.Future(void).init(loop),
.tmp_dir = event.Future(BuildError![]u8).init(),
// .destroy_frame = @frame(),
// .main_loop_frame = undefined,
.main_loop_future = event.Future(void).init(),
.name = undefined,
.llvm_triple = undefined,
@ -426,7 +417,7 @@ pub const Compilation = struct {
.rpath_list = [_][]const u8{},
.assembly_files = [_][]const u8{},
.link_objects = [_][]const u8{},
.fn_link_set = event.Locked(FnLinkSet).init(loop, FnLinkSet.init()),
.fn_link_set = event.Locked(FnLinkSet).init(FnLinkSet.init()),
.windows_subsystem_windows = false,
.windows_subsystem_console = false,
.link_libs_list = undefined,
@ -438,14 +429,14 @@ pub const Compilation = struct {
.test_name_prefix = null,
.emit_file_type = Emit.Binary,
.link_out_file = null,
.exported_symbol_names = event.Locked(Decl.Table).init(loop, Decl.Table.init(loop.allocator)),
.prelink_group = event.Group(BuildError!void).init(loop),
.deinit_group = event.Group(void).init(loop),
.compile_errors = event.Locked(CompileErrList).init(loop, CompileErrList.init(loop.allocator)),
.int_type_table = event.Locked(IntTypeTable).init(loop, IntTypeTable.init(loop.allocator)),
.array_type_table = event.Locked(ArrayTypeTable).init(loop, ArrayTypeTable.init(loop.allocator)),
.ptr_type_table = event.Locked(PtrTypeTable).init(loop, PtrTypeTable.init(loop.allocator)),
.fn_type_table = event.Locked(FnTypeTable).init(loop, FnTypeTable.init(loop.allocator)),
.exported_symbol_names = event.Locked(Decl.Table).init(Decl.Table.init(allocator)),
.prelink_group = event.Group(BuildError!void).init(allocator),
.deinit_group = event.Group(void).init(allocator),
.compile_errors = event.Locked(CompileErrList).init(CompileErrList.init(allocator)),
.int_type_table = event.Locked(IntTypeTable).init(IntTypeTable.init(allocator)),
.array_type_table = event.Locked(ArrayTypeTable).init(ArrayTypeTable.init(allocator)),
.ptr_type_table = event.Locked(PtrTypeTable).init(PtrTypeTable.init(allocator)),
.fn_type_table = event.Locked(FnTypeTable).init(FnTypeTable.init(allocator)),
.c_int_types = undefined,
.meta_type = undefined,
@ -471,7 +462,7 @@ pub const Compilation = struct {
.have_err_ret_tracing = false,
.primitive_type_table = undefined,
.fs_watch = undefined,
// .fs_watch = undefined,
};
comp.link_libs_list = ArrayList(*LinkLib).init(comp.arena());
comp.primitive_type_table = TypeTable.init(comp.arena());
@ -485,12 +476,12 @@ pub const Compilation = struct {
}
comp.name = try Buffer.init(comp.arena(), name);
comp.llvm_triple = try target.getTriple(comp.arena());
comp.llvm_target = try Target.llvmTargetFromTriple(comp.llvm_triple);
comp.llvm_triple = try util.getTriple(comp.arena(), target);
comp.llvm_target = try util.llvmTargetFromTriple(comp.llvm_triple);
comp.zig_std_dir = try std.fs.path.join(comp.arena(), [_][]const u8{ zig_lib_dir, "std" });
const opt_level = switch (build_mode) {
builtin.Mode.Debug => llvm.CodeGenLevelNone,
.Debug => llvm.CodeGenLevelNone,
else => llvm.CodeGenLevelAggressive,
};
@ -516,7 +507,7 @@ pub const Compilation = struct {
opt_level,
reloc_mode,
llvm.CodeModelDefault,
false // TODO: add -ffunction-sections option
false, // TODO: add -ffunction-sections option
) orelse return error.OutOfMemory;
defer llvm.DisposeTargetMachine(comp.target_machine);
@ -526,8 +517,11 @@ pub const Compilation = struct {
comp.target_layout_str = llvm.CopyStringRepOfTargetData(comp.target_data_ref) orelse return error.OutOfMemory;
defer llvm.DisposeMessage(comp.target_layout_str);
comp.events = try event.Channel(Event).create(comp.loop, 0);
defer comp.events.destroy();
comp.events = try allocator.create(event.Channel(Event));
defer allocator.destroy(comp.events);
comp.events.init([0]Event{});
defer comp.events.deinit();
if (root_src_path) |root_src| {
const dirname = std.fs.path.dirname(root_src) orelse ".";
@ -540,13 +534,13 @@ pub const Compilation = struct {
comp.root_package = try Package.create(comp.arena(), ".", "");
}
comp.fs_watch = try fs.Watch(*Scope.Root).create(loop, 16);
defer comp.fs_watch.destroy();
// comp.fs_watch = try fs.Watch(*Scope.Root).create(16);
// defer comp.fs_watch.destroy();
try comp.initTypes();
defer comp.primitive_type_table.deinit();
comp.main_loop_handle = async comp.mainLoop() catch unreachable;
// comp.main_loop_frame = async comp.mainLoop();
// Set this to indicate that initialization completed successfully.
// from here on out we must not return an error.
// This must occur before the first suspend/await.
@ -555,12 +549,13 @@ pub const Compilation = struct {
suspend;
// From here on is cleanup.
await (async comp.deinit_group.wait() catch unreachable);
comp.deinit_group.wait();
if (comp.tmp_dir.getOrNull()) |tmp_dir_result| if (tmp_dir_result.*) |tmp_dir| {
// TODO evented I/O?
std.fs.deleteTree(comp.arena(), tmp_dir) catch {};
} else |_| {};
if (comp.tmp_dir.getOrNull()) |tmp_dir_result|
if (tmp_dir_result.*) |tmp_dir| {
// TODO evented I/O?
std.fs.deleteTree(tmp_dir) catch {};
} else |_| {};
}
/// it does ref the result because it could be an arbitrary integer size
@ -578,10 +573,10 @@ pub const Compilation = struct {
error.Overflow => return error.Overflow,
error.InvalidCharacter => unreachable, // we just checked the characters above
};
const int_type = try await (async Type.Int.get(comp, Type.Int.Key{
const int_type = try Type.Int.get(comp, Type.Int.Key{
.bit_count = bit_count,
.is_signed = is_signed,
}) catch unreachable);
});
errdefer int_type.base.base.deref();
return &int_type.base;
},
@ -603,12 +598,12 @@ pub const Compilation = struct {
.base = Type{
.name = "type",
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = undefined,
.ref_count = std.atomic.Int(usize).init(3), // 3 because it references itself twice
},
.id = builtin.TypeId.Type,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
.id = .Type,
.abi_alignment = Type.AbiAlignment.init(),
},
.value = undefined,
};
@ -621,12 +616,12 @@ pub const Compilation = struct {
.base = Type{
.name = "void",
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Void,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
.id = .Void,
.abi_alignment = Type.AbiAlignment.init(),
},
};
assert((try comp.primitive_type_table.put(comp.void_type.base.name, &comp.void_type.base)) == null);
@ -636,12 +631,12 @@ pub const Compilation = struct {
.base = Type{
.name = "noreturn",
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.NoReturn,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
.id = .NoReturn,
.abi_alignment = Type.AbiAlignment.init(),
},
};
assert((try comp.primitive_type_table.put(comp.noreturn_type.base.name, &comp.noreturn_type.base)) == null);
@ -651,12 +646,12 @@ pub const Compilation = struct {
.base = Type{
.name = "comptime_int",
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.ComptimeInt,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
.id = .ComptimeInt,
.abi_alignment = Type.AbiAlignment.init(),
},
};
assert((try comp.primitive_type_table.put(comp.comptime_int_type.base.name, &comp.comptime_int_type.base)) == null);
@ -666,12 +661,12 @@ pub const Compilation = struct {
.base = Type{
.name = "bool",
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Bool,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
.id = .Bool,
.abi_alignment = Type.AbiAlignment.init(),
},
};
assert((try comp.primitive_type_table.put(comp.bool_type.base.name, &comp.bool_type.base)) == null);
@ -679,7 +674,7 @@ pub const Compilation = struct {
comp.void_value = try comp.arena().create(Value.Void);
comp.void_value.* = Value.Void{
.base = Value{
.id = Value.Id.Void,
.id = .Void,
.typ = &Type.Void.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -688,7 +683,7 @@ pub const Compilation = struct {
comp.true_value = try comp.arena().create(Value.Bool);
comp.true_value.* = Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.id = .Bool,
.typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -698,7 +693,7 @@ pub const Compilation = struct {
comp.false_value = try comp.arena().create(Value.Bool);
comp.false_value.* = Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.id = .Bool,
.typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -708,7 +703,7 @@ pub const Compilation = struct {
comp.noreturn_value = try comp.arena().create(Value.NoReturn);
comp.noreturn_value.* = Value.NoReturn{
.base = Value{
.id = Value.Id.NoReturn,
.id = .NoReturn,
.typ = &Type.NoReturn.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -720,16 +715,16 @@ pub const Compilation = struct {
.base = Type{
.name = cint.zig_name,
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Int,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
.id = .Int,
.abi_alignment = Type.AbiAlignment.init(),
},
.key = Type.Int.Key{
.is_signed = cint.is_signed,
.bit_count = comp.target.cIntTypeSizeInBits(cint.id),
.bit_count = cint.sizeInBits(comp.target),
},
.garbage_node = undefined,
};
@ -741,12 +736,12 @@ pub const Compilation = struct {
.base = Type{
.name = "u8",
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Int,
.abi_alignment = Type.AbiAlignment.init(comp.loop),
.id = .Int,
.abi_alignment = Type.AbiAlignment.init(),
},
.key = Type.Int.Key{
.is_signed = false,
@ -758,8 +753,8 @@ pub const Compilation = struct {
}
pub fn destroy(self: *Compilation) void {
cancel self.main_loop_handle;
resume self.destroy_handle;
// await self.main_loop_frame;
// resume self.destroy_frame;
}
fn start(self: *Compilation) void {
@ -768,13 +763,13 @@ pub const Compilation = struct {
async fn mainLoop(self: *Compilation) void {
// wait until start() is called
_ = await (async self.main_loop_future.get() catch unreachable);
_ = self.main_loop_future.get();
var build_result = await (async self.initialCompile() catch unreachable);
var build_result = self.initialCompile();
while (true) {
const link_result = if (build_result) blk: {
break :blk await (async self.maybeLink() catch unreachable);
break :blk self.maybeLink();
} else |err| err;
// this makes a handy error return trace and stack trace in debug mode
if (std.debug.runtime_safety) {
@ -782,65 +777,65 @@ pub const Compilation = struct {
}
const compile_errors = blk: {
const held = await (async self.compile_errors.acquire() catch unreachable);
const held = self.compile_errors.acquire();
defer held.release();
break :blk held.value.toOwnedSlice();
};
if (link_result) |_| {
if (compile_errors.len == 0) {
await (async self.events.put(Event.Ok) catch unreachable);
self.events.put(Event.Ok);
} else {
await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable);
self.events.put(Event{ .Fail = compile_errors });
}
} else |err| {
// if there's an error then the compile errors have dangling references
self.gpa().free(compile_errors);
await (async self.events.put(Event{ .Error = err }) catch unreachable);
self.events.put(Event{ .Error = err });
}
// First, get an item from the watch channel, waiting on the channel.
var group = event.Group(BuildError!void).init(self.loop);
{
const ev = (await (async self.fs_watch.channel.get() catch unreachable)) catch |err| {
build_result = err;
continue;
};
const root_scope = ev.data;
group.call(rebuildFile, self, root_scope) catch |err| {
build_result = err;
continue;
};
}
// Next, get all the items from the channel that are buffered up.
while (await (async self.fs_watch.channel.getOrNull() catch unreachable)) |ev_or_err| {
if (ev_or_err) |ev| {
const root_scope = ev.data;
group.call(rebuildFile, self, root_scope) catch |err| {
build_result = err;
continue;
};
} else |err| {
build_result = err;
continue;
}
}
build_result = await (async group.wait() catch unreachable);
// // First, get an item from the watch channel, waiting on the channel.
// var group = event.Group(BuildError!void).init(self.gpa());
// {
// const ev = (self.fs_watch.channel.get()) catch |err| {
// build_result = err;
// continue;
// };
// const root_scope = ev.data;
// group.call(rebuildFile, self, root_scope) catch |err| {
// build_result = err;
// continue;
// };
// }
// // Next, get all the items from the channel that are buffered up.
// while (self.fs_watch.channel.getOrNull()) |ev_or_err| {
// if (ev_or_err) |ev| {
// const root_scope = ev.data;
// group.call(rebuildFile, self, root_scope) catch |err| {
// build_result = err;
// continue;
// };
// } else |err| {
// build_result = err;
// continue;
// }
// }
// build_result = group.wait();
}
}
async fn rebuildFile(self: *Compilation, root_scope: *Scope.Root) !void {
const tree_scope = blk: {
const source_code = (await (async fs.readFile(
self.loop,
root_scope.realpath,
max_src_size,
) catch unreachable)) catch |err| {
try self.addCompileErrorCli(root_scope.realpath, "unable to open: {}", @errorName(err));
return;
};
errdefer self.gpa().free(source_code);
const source_code = "";
// const source_code = fs.readFile(
// root_scope.realpath,
// max_src_size,
// ) catch |err| {
// try self.addCompileErrorCli(root_scope.realpath, "unable to open: {}", @errorName(err));
// return;
// };
// errdefer self.gpa().free(source_code);
const tree = try std.zig.parse(self.gpa(), source_code);
errdefer {
@ -856,19 +851,18 @@ pub const Compilation = struct {
const msg = try Msg.createFromParseErrorAndScope(self, tree_scope, parse_error);
errdefer msg.destroy();
try await (async self.addCompileErrorAsync(msg) catch unreachable);
try self.addCompileErrorAsync(msg);
}
if (tree_scope.tree.errors.len != 0) {
return;
}
const locked_table = await (async root_scope.decls.table.acquireWrite() catch unreachable);
const locked_table = root_scope.decls.table.acquireWrite();
defer locked_table.release();
var decl_group = event.Group(BuildError!void).init(self.loop);
defer decl_group.deinit();
var decl_group = event.Group(BuildError!void).init(self.gpa());
try await try async self.rebuildChangedDecls(
try self.rebuildChangedDecls(
&decl_group,
locked_table.value,
root_scope.decls,
@ -876,7 +870,7 @@ pub const Compilation = struct {
tree_scope,
);
try await (async decl_group.wait() catch unreachable);
try decl_group.wait();
}
async fn rebuildChangedDecls(
@ -894,15 +888,15 @@ pub const Compilation = struct {
while (ast_it.next()) |decl_ptr| {
const decl = decl_ptr.*;
switch (decl.id) {
ast.Node.Id.Comptime => {
.Comptime => {
const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", decl);
// TODO connect existing comptime decls to updated source files
try self.prelink_group.call(addCompTimeBlock, self, tree_scope, &decl_scope.base, comptime_node);
},
ast.Node.Id.VarDecl => @panic("TODO"),
ast.Node.Id.FnProto => {
.VarDecl => @panic("TODO"),
.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
const name = if (fn_proto.name_token) |name_token| tree_scope.tree.tokenSlice(name_token) else {
@ -942,11 +936,11 @@ pub const Compilation = struct {
.id = Decl.Id.Fn,
.name = name,
.visib = parseVisibToken(tree_scope.tree, fn_proto.visib_token),
.resolution = event.Future(BuildError!void).init(self.loop),
.resolution = event.Future(BuildError!void).init(),
.parent_scope = &decl_scope.base,
.tree_scope = tree_scope,
},
.value = Decl.Fn.Val{ .Unresolved = {} },
.value = .Unresolved,
.fn_proto = fn_proto,
};
tree_scope.base.ref();
@ -955,7 +949,7 @@ pub const Compilation = struct {
try group.call(addTopLevelDecl, self, &fn_decl.base, locked_table);
}
},
ast.Node.Id.TestDecl => @panic("TODO"),
.TestDecl => @panic("TODO"),
else => unreachable,
}
}
@ -982,26 +976,26 @@ pub const Compilation = struct {
};
defer root_scope.base.deref(self);
assert((try await try async self.fs_watch.addFile(root_scope.realpath, root_scope)) == null);
try await try async self.rebuildFile(root_scope);
// assert((try self.fs_watch.addFile(root_scope.realpath, root_scope)) == null);
try self.rebuildFile(root_scope);
}
}
async fn maybeLink(self: *Compilation) !void {
(await (async self.prelink_group.wait() catch unreachable)) catch |err| switch (err) {
(self.prelink_group.wait()) catch |err| switch (err) {
error.SemanticAnalysisFailed => {},
else => return err,
};
const any_prelink_errors = blk: {
const compile_errors = await (async self.compile_errors.acquire() catch unreachable);
const compile_errors = self.compile_errors.acquire();
defer compile_errors.release();
break :blk compile_errors.value.len != 0;
};
if (!any_prelink_errors) {
try await (async link(self) catch unreachable);
try link(self);
}
}
@ -1013,12 +1007,12 @@ pub const Compilation = struct {
node: *ast.Node,
expected_type: ?*Type,
) !*ir.Code {
const unanalyzed_code = try await (async ir.gen(
const unanalyzed_code = try ir.gen(
comp,
node,
tree_scope,
scope,
) catch unreachable);
);
defer unanalyzed_code.destroy(comp.gpa());
if (comp.verbose_ir) {
@ -1026,11 +1020,11 @@ pub const Compilation = struct {
unanalyzed_code.dump();
}
const analyzed_code = try await (async ir.analyze(
const analyzed_code = try ir.analyze(
comp,
unanalyzed_code,
expected_type,
) catch unreachable);
);
errdefer analyzed_code.destroy(comp.gpa());
if (comp.verbose_ir) {
@ -1046,17 +1040,17 @@ pub const Compilation = struct {
tree_scope: *Scope.AstTree,
scope: *Scope,
comptime_node: *ast.Node.Comptime,
) !void {
) BuildError!void {
const void_type = Type.Void.get(comp);
defer void_type.base.base.deref(comp);
const analyzed_code = (await (async genAndAnalyzeCode(
const analyzed_code = genAndAnalyzeCode(
comp,
tree_scope,
scope,
comptime_node.expr,
&void_type.base,
) catch unreachable)) catch |err| switch (err) {
) catch |err| switch (err) {
// This poison value should not cause the errdefers to run. It simply means
// that comp.compile_errors is populated.
error.SemanticAnalysisFailed => return {},
@ -1069,7 +1063,7 @@ pub const Compilation = struct {
self: *Compilation,
decl: *Decl,
locked_table: *Decl.Table,
) !void {
) BuildError!void {
const is_export = decl.isExported(decl.tree_scope.tree);
if (is_export) {
@ -1109,17 +1103,17 @@ pub const Compilation = struct {
async fn addCompileErrorAsync(
self: *Compilation,
msg: *Msg,
) !void {
) BuildError!void {
errdefer msg.destroy();
const compile_errors = await (async self.compile_errors.acquire() catch unreachable);
const compile_errors = self.compile_errors.acquire();
defer compile_errors.release();
try compile_errors.value.append(msg);
}
async fn verifyUniqueSymbol(self: *Compilation, decl: *Decl) !void {
const exported_symbol_names = await (async self.exported_symbol_names.acquire() catch unreachable);
async fn verifyUniqueSymbol(self: *Compilation, decl: *Decl) BuildError!void {
const exported_symbol_names = self.exported_symbol_names.acquire();
defer exported_symbol_names.release();
if (try exported_symbol_names.value.put(decl.name, decl)) |other_decl| {
@ -1173,14 +1167,14 @@ pub const Compilation = struct {
/// cancels itself so no need to await or cancel the promise.
async fn startFindingNativeLibC(self: *Compilation) void {
await (async self.loop.yield() catch unreachable);
std.event.Loop.instance.?.yield();
// we don't care if it fails, we're just trying to kick off the future resolution
_ = (await (async self.zig_compiler.getNativeLibC() catch unreachable)) catch return;
_ = (self.zig_compiler.getNativeLibC()) catch return;
}
/// General Purpose Allocator. Must free when done.
fn gpa(self: Compilation) *mem.Allocator {
return self.loop.allocator;
return self.zig_compiler.allocator;
}
/// Arena Allocator. Automatically freed when the Compilation is destroyed.
@ -1191,8 +1185,8 @@ pub const Compilation = struct {
/// If the temporary directory for this compilation has not been created, it creates it.
/// Then it creates a random file name in that dir and returns it.
pub async fn createRandomOutputPath(self: *Compilation, suffix: []const u8) !Buffer {
const tmp_dir = try await (async self.getTmpDir() catch unreachable);
const file_prefix = await (async self.getRandomFileName() catch unreachable);
const tmp_dir = try self.getTmpDir();
const file_prefix = self.getRandomFileName();
const file_name = try std.fmt.allocPrint(self.gpa(), "{}{}", file_prefix[0..], suffix);
defer self.gpa().free(file_name);
@ -1207,14 +1201,14 @@ pub const Compilation = struct {
/// Then returns it. The directory is unique to this Compilation and cleaned up when
/// the Compilation deinitializes.
async fn getTmpDir(self: *Compilation) ![]const u8 {
if (await (async self.tmp_dir.start() catch unreachable)) |ptr| return ptr.*;
self.tmp_dir.data = await (async self.getTmpDirImpl() catch unreachable);
if (self.tmp_dir.start()) |ptr| return ptr.*;
self.tmp_dir.data = self.getTmpDirImpl();
self.tmp_dir.resolve();
return self.tmp_dir.data;
}
async fn getTmpDirImpl(self: *Compilation) ![]u8 {
const comp_dir_name = await (async self.getRandomFileName() catch unreachable);
const comp_dir_name = self.getRandomFileName();
const zig_dir_path = try getZigDir(self.gpa());
defer self.gpa().free(zig_dir_path);
@ -1233,7 +1227,7 @@ pub const Compilation = struct {
var rand_bytes: [9]u8 = undefined;
{
const held = await (async self.zig_compiler.prng.acquire() catch unreachable);
const held = self.zig_compiler.prng.acquire();
defer held.release();
held.value.random.bytes(rand_bytes[0..]);
@ -1256,7 +1250,7 @@ pub const Compilation = struct {
node: *ast.Node,
expected_type: *Type,
) !*Value {
const analyzed_code = try await (async comp.genAndAnalyzeCode(tree_scope, scope, node, expected_type) catch unreachable);
const analyzed_code = try comp.genAndAnalyzeCode(tree_scope, scope, node, expected_type);
defer analyzed_code.destroy(comp.gpa());
return analyzed_code.getCompTimeResult(comp);
@ -1266,17 +1260,17 @@ pub const Compilation = struct {
const meta_type = &Type.MetaType.get(comp).base;
defer meta_type.base.deref(comp);
const result_val = try await (async comp.analyzeConstValue(tree_scope, scope, node, meta_type) catch unreachable);
const result_val = try comp.analyzeConstValue(tree_scope, scope, node, meta_type);
errdefer result_val.base.deref(comp);
return result_val.cast(Type).?;
}
/// This declaration has been blessed as going into the final code generation.
pub async fn resolveDecl(comp: *Compilation, decl: *Decl) !void {
if (await (async decl.resolution.start() catch unreachable)) |ptr| return ptr.*;
pub async fn resolveDecl(comp: *Compilation, decl: *Decl) BuildError!void {
if (decl.resolution.start()) |ptr| return ptr.*;
decl.resolution.data = try await (async generateDecl(comp, decl) catch unreachable);
decl.resolution.data = try generateDecl(comp, decl);
decl.resolution.resolve();
return decl.resolution.data;
}
@ -1295,24 +1289,24 @@ fn parseVisibToken(tree: *ast.Tree, optional_token_index: ?ast.TokenIndex) Visib
/// The function that actually does the generation.
async fn generateDecl(comp: *Compilation, decl: *Decl) !void {
switch (decl.id) {
Decl.Id.Var => @panic("TODO"),
Decl.Id.Fn => {
.Var => @panic("TODO"),
.Fn => {
const fn_decl = @fieldParentPtr(Decl.Fn, "base", decl);
return await (async generateDeclFn(comp, fn_decl) catch unreachable);
return generateDeclFn(comp, fn_decl);
},
Decl.Id.CompTime => @panic("TODO"),
.CompTime => @panic("TODO"),
}
}
async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
const tree_scope = fn_decl.base.tree_scope;
const body_node = fn_decl.fn_proto.body_node orelse return await (async generateDeclFnProto(comp, fn_decl) catch unreachable);
const body_node = fn_decl.fn_proto.body_node orelse return generateDeclFnProto(comp, fn_decl);
const fndef_scope = try Scope.FnDef.create(comp, fn_decl.base.parent_scope);
defer fndef_scope.base.deref(comp);
const fn_type = try await (async analyzeFnType(comp, tree_scope, fn_decl.base.parent_scope, fn_decl.fn_proto) catch unreachable);
const fn_type = try analyzeFnType(comp, tree_scope, fn_decl.base.parent_scope, fn_decl.fn_proto);
defer fn_type.base.base.deref(comp);
var symbol_name = try std.Buffer.init(comp.gpa(), fn_decl.base.name);
@ -1356,12 +1350,12 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
try fn_type.non_key.Normal.variable_list.append(var_scope);
}
const analyzed_code = try await (async comp.genAndAnalyzeCode(
const analyzed_code = try comp.genAndAnalyzeCode(
tree_scope,
fn_val.child_scope,
body_node,
fn_type.key.data.Normal.return_type,
) catch unreachable);
);
errdefer analyzed_code.destroy(comp.gpa());
assert(fn_val.block_scope != null);
@ -1372,13 +1366,13 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
try comp.prelink_group.call(addFnToLinkSet, comp, fn_val);
}
async fn addFnToLinkSet(comp: *Compilation, fn_val: *Value.Fn) void {
async fn addFnToLinkSet(comp: *Compilation, fn_val: *Value.Fn) Compilation.BuildError!void {
fn_val.base.ref();
defer fn_val.base.deref(comp);
fn_val.link_set_node.data = fn_val;
const held = await (async comp.fn_link_set.acquire() catch unreachable);
const held = comp.fn_link_set.acquire();
defer held.release();
held.value.append(fn_val.link_set_node);
@ -1395,10 +1389,10 @@ async fn analyzeFnType(
fn_proto: *ast.Node.FnProto,
) !*Type.Fn {
const return_type_node = switch (fn_proto.return_type) {
ast.Node.FnProto.ReturnType.Explicit => |n| n,
ast.Node.FnProto.ReturnType.InferErrorSet => |n| n,
.Explicit => |n| n,
.InferErrorSet => |n| n,
};
const return_type = try await (async comp.analyzeTypeExpr(tree_scope, scope, return_type_node) catch unreachable);
const return_type = try comp.analyzeTypeExpr(tree_scope, scope, return_type_node);
return_type.base.deref(comp);
var params = ArrayList(Type.Fn.Param).init(comp.gpa());
@ -1414,7 +1408,7 @@ async fn analyzeFnType(
var it = fn_proto.params.iterator(0);
while (it.next()) |param_node_ptr| {
const param_node = param_node_ptr.*.cast(ast.Node.ParamDecl).?;
const param_type = try await (async comp.analyzeTypeExpr(tree_scope, scope, param_node.type_node) catch unreachable);
const param_type = try comp.analyzeTypeExpr(tree_scope, scope, param_node.type_node);
errdefer param_type.base.deref(comp);
try params.append(Type.Fn.Param{
.typ = param_type,
@ -1430,7 +1424,7 @@ async fn analyzeFnType(
.return_type = return_type,
.params = params.toOwnedSlice(),
.is_var_args = false, // TODO
.cc = Type.Fn.CallingConvention.Auto, // TODO
.cc = .Unspecified, // TODO
},
},
};
@ -1443,7 +1437,7 @@ async fn analyzeFnType(
comp.gpa().free(key.data.Normal.params);
};
const fn_type = try await (async Type.Fn.get(comp, key) catch unreachable);
const fn_type = try Type.Fn.get(comp, key);
key_consumed = true;
errdefer fn_type.base.base.deref(comp);
@ -1451,12 +1445,12 @@ async fn analyzeFnType(
}
async fn generateDeclFnProto(comp: *Compilation, fn_decl: *Decl.Fn) !void {
const fn_type = try await (async analyzeFnType(
const fn_type = try analyzeFnType(
comp,
fn_decl.base.tree_scope,
fn_decl.base.parent_scope,
fn_decl.fn_proto,
) catch unreachable);
);
defer fn_type.base.base.deref(comp);
var symbol_name = try std.Buffer.init(comp.gpa(), fn_decl.base.name);
@ -1468,14 +1462,3 @@ async fn generateDeclFnProto(comp: *Compilation, fn_decl: *Decl.Fn) !void {
fn_decl.value = Decl.Fn.Val{ .FnProto = fn_proto_val };
symbol_name_consumed = true;
}
// TODO these are hacks which should probably be solved by the language
fn getAwaitResult(allocator: *Allocator, handle: var) @typeInfo(@typeOf(handle)).Promise.child.? {
var result: ?@typeInfo(@typeOf(handle)).Promise.child.? = null;
cancel (async<allocator> getAwaitResultAsync(handle, &result) catch unreachable);
return result.?;
}
async fn getAwaitResultAsync(handle: var, out: *?@typeInfo(@typeOf(handle)).Promise.child.?) void {
out.* = await handle;
}

View File

@ -29,7 +29,7 @@ pub const Decl = struct {
pub fn isExported(base: *const Decl, tree: *ast.Tree) bool {
switch (base.id) {
Id.Fn => {
.Fn => {
const fn_decl = @fieldParentPtr(Fn, "base", base);
return fn_decl.isExported(tree);
},
@ -39,7 +39,7 @@ pub const Decl = struct {
pub fn getSpan(base: *const Decl) errmsg.Span {
switch (base.id) {
Id.Fn => {
.Fn => {
const fn_decl = @fieldParentPtr(Fn, "base", base);
const fn_proto = fn_decl.fn_proto;
const start = fn_proto.fn_token;
@ -74,7 +74,7 @@ pub const Decl = struct {
// TODO https://github.com/ziglang/zig/issues/683 and then make this anonymous
pub const Val = union(enum) {
Unresolved: void,
Unresolved,
Fn: *Value.Fn,
FnProto: *Value.FnProto,
};
@ -83,7 +83,7 @@ pub const Decl = struct {
return if (self.fn_proto.extern_export_inline_token) |tok_index| x: {
const token = tree.tokens.at(tok_index);
break :x switch (token.id) {
Token.Id.Extern => tree.tokenSlicePtr(token),
.Extern => tree.tokenSlicePtr(token),
else => null,
};
} else null;
@ -92,7 +92,7 @@ pub const Decl = struct {
pub fn isExported(self: Fn, tree: *ast.Tree) bool {
if (self.fn_proto.extern_export_inline_token) |tok_index| {
const token = tree.tokens.at(tok_index);
return token.id == Token.Id.Keyword_export;
return token.id == .Keyword_export;
} else {
return false;
}

View File

@ -62,17 +62,17 @@ pub const Msg = struct {
pub fn destroy(self: *Msg) void {
switch (self.data) {
Data.Cli => |cli| {
.Cli => |cli| {
cli.allocator.free(self.text);
cli.allocator.free(self.realpath);
cli.allocator.destroy(self);
},
Data.PathAndTree => |path_and_tree| {
.PathAndTree => |path_and_tree| {
path_and_tree.allocator.free(self.text);
path_and_tree.allocator.free(self.realpath);
path_and_tree.allocator.destroy(self);
},
Data.ScopeAndComp => |scope_and_comp| {
.ScopeAndComp => |scope_and_comp| {
scope_and_comp.tree_scope.base.deref(scope_and_comp.compilation);
scope_and_comp.compilation.gpa().free(self.text);
scope_and_comp.compilation.gpa().free(self.realpath);
@ -83,11 +83,11 @@ pub const Msg = struct {
fn getAllocator(self: *const Msg) *mem.Allocator {
switch (self.data) {
Data.Cli => |cli| return cli.allocator,
Data.PathAndTree => |path_and_tree| {
.Cli => |cli| return cli.allocator,
.PathAndTree => |path_and_tree| {
return path_and_tree.allocator;
},
Data.ScopeAndComp => |scope_and_comp| {
.ScopeAndComp => |scope_and_comp| {
return scope_and_comp.compilation.gpa();
},
}
@ -95,11 +95,11 @@ pub const Msg = struct {
pub fn getTree(self: *const Msg) *ast.Tree {
switch (self.data) {
Data.Cli => unreachable,
Data.PathAndTree => |path_and_tree| {
.Cli => unreachable,
.PathAndTree => |path_and_tree| {
return path_and_tree.tree;
},
Data.ScopeAndComp => |scope_and_comp| {
.ScopeAndComp => |scope_and_comp| {
return scope_and_comp.tree_scope.tree;
},
}
@ -107,9 +107,9 @@ pub const Msg = struct {
pub fn getSpan(self: *const Msg) Span {
return switch (self.data) {
Data.Cli => unreachable,
Data.PathAndTree => |path_and_tree| path_and_tree.span,
Data.ScopeAndComp => |scope_and_comp| scope_and_comp.span,
.Cli => unreachable,
.PathAndTree => |path_and_tree| path_and_tree.span,
.ScopeAndComp => |scope_and_comp| scope_and_comp.span,
};
}
@ -230,7 +230,7 @@ pub const Msg = struct {
pub fn printToStream(msg: *const Msg, stream: var, color_on: bool) !void {
switch (msg.data) {
Data.Cli => {
.Cli => {
try stream.print("{}:-:-: error: {}\n", msg.realpath, msg.text);
return;
},
@ -279,9 +279,9 @@ pub const Msg = struct {
pub fn printToFile(msg: *const Msg, file: fs.File, color: Color) !void {
const color_on = switch (color) {
Color.Auto => file.isTty(),
Color.On => true,
Color.Off => false,
.Auto => file.isTty(),
.On => true,
.Off => false,
};
var stream = &file.outStream().stream;
return msg.printToStream(stream, color_on);

View File

@ -1,5 +1,4 @@
const std = @import("std");
const builtin = @import("builtin");
const Compilation = @import("compilation.zig").Compilation;
const Scope = @import("scope.zig").Scope;
const ast = std.zig.ast;
@ -33,13 +32,13 @@ pub const IrVal = union(enum) {
pub fn dump(self: IrVal) void {
switch (self) {
IrVal.Unknown => std.debug.warn("Unknown"),
IrVal.KnownType => |typ| {
.Unknown => std.debug.warn("Unknown"),
.KnownType => |typ| {
std.debug.warn("KnownType(");
typ.dump();
std.debug.warn(")");
},
IrVal.KnownValue => |value| {
.KnownValue => |value| {
std.debug.warn("KnownValue(");
value.dump();
std.debug.warn(")");
@ -113,37 +112,37 @@ pub const Inst = struct {
pub async fn analyze(base: *Inst, ira: *Analyze) Analyze.Error!*Inst {
switch (base.id) {
Id.Return => return @fieldParentPtr(Return, "base", base).analyze(ira),
Id.Const => return @fieldParentPtr(Const, "base", base).analyze(ira),
Id.Call => return @fieldParentPtr(Call, "base", base).analyze(ira),
Id.DeclRef => return await (async @fieldParentPtr(DeclRef, "base", base).analyze(ira) catch unreachable),
Id.Ref => return await (async @fieldParentPtr(Ref, "base", base).analyze(ira) catch unreachable),
Id.DeclVar => return @fieldParentPtr(DeclVar, "base", base).analyze(ira),
Id.CheckVoidStmt => return @fieldParentPtr(CheckVoidStmt, "base", base).analyze(ira),
Id.Phi => return @fieldParentPtr(Phi, "base", base).analyze(ira),
Id.Br => return @fieldParentPtr(Br, "base", base).analyze(ira),
Id.AddImplicitReturnType => return @fieldParentPtr(AddImplicitReturnType, "base", base).analyze(ira),
Id.PtrType => return await (async @fieldParentPtr(PtrType, "base", base).analyze(ira) catch unreachable),
Id.VarPtr => return await (async @fieldParentPtr(VarPtr, "base", base).analyze(ira) catch unreachable),
Id.LoadPtr => return await (async @fieldParentPtr(LoadPtr, "base", base).analyze(ira) catch unreachable),
.Return => return @fieldParentPtr(Return, "base", base).analyze(ira),
.Const => return @fieldParentPtr(Const, "base", base).analyze(ira),
.Call => return @fieldParentPtr(Call, "base", base).analyze(ira),
.DeclRef => return @fieldParentPtr(DeclRef, "base", base).analyze(ira),
.Ref => return @fieldParentPtr(Ref, "base", base).analyze(ira),
.DeclVar => return @fieldParentPtr(DeclVar, "base", base).analyze(ira),
.CheckVoidStmt => return @fieldParentPtr(CheckVoidStmt, "base", base).analyze(ira),
.Phi => return @fieldParentPtr(Phi, "base", base).analyze(ira),
.Br => return @fieldParentPtr(Br, "base", base).analyze(ira),
.AddImplicitReturnType => return @fieldParentPtr(AddImplicitReturnType, "base", base).analyze(ira),
.PtrType => return @fieldParentPtr(PtrType, "base", base).analyze(ira),
.VarPtr => return @fieldParentPtr(VarPtr, "base", base).analyze(ira),
.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).analyze(ira),
}
}
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error{OutOfMemory}!?*llvm.Value) {
switch (base.id) {
Id.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
Id.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
Id.Call => return @fieldParentPtr(Call, "base", base).render(ofile, fn_val),
Id.VarPtr => return @fieldParentPtr(VarPtr, "base", base).render(ofile, fn_val),
Id.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).render(ofile, fn_val),
Id.DeclRef => unreachable,
Id.PtrType => unreachable,
Id.Ref => @panic("TODO"),
Id.DeclVar => @panic("TODO"),
Id.CheckVoidStmt => @panic("TODO"),
Id.Phi => @panic("TODO"),
Id.Br => @panic("TODO"),
Id.AddImplicitReturnType => @panic("TODO"),
.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
.Call => return @fieldParentPtr(Call, "base", base).render(ofile, fn_val),
.VarPtr => return @fieldParentPtr(VarPtr, "base", base).render(ofile, fn_val),
.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).render(ofile, fn_val),
.DeclRef => unreachable,
.PtrType => unreachable,
.Ref => @panic("TODO"),
.DeclVar => @panic("TODO"),
.CheckVoidStmt => @panic("TODO"),
.Phi => @panic("TODO"),
.Br => @panic("TODO"),
.AddImplicitReturnType => @panic("TODO"),
}
}
@ -165,7 +164,7 @@ pub const Inst = struct {
param.ref_count -= 1;
const child = param.child orelse return error.SemanticAnalysisFailed;
switch (child.val) {
IrVal.Unknown => return error.SemanticAnalysisFailed,
.Unknown => return error.SemanticAnalysisFailed,
else => return child,
}
}
@ -213,9 +212,9 @@ pub const Inst = struct {
/// asserts that the type is known
fn getKnownType(self: *Inst) *Type {
switch (self.val) {
IrVal.KnownType => |typ| return typ,
IrVal.KnownValue => |value| return value.typ,
IrVal.Unknown => unreachable,
.KnownType => |typ| return typ,
.KnownValue => |value| return value.typ,
.Unknown => unreachable,
}
}
@ -225,14 +224,14 @@ pub const Inst = struct {
pub fn isNoReturn(base: *const Inst) bool {
switch (base.val) {
IrVal.Unknown => return false,
IrVal.KnownValue => |x| return x.typ.id == Type.Id.NoReturn,
IrVal.KnownType => |typ| return typ.id == Type.Id.NoReturn,
.Unknown => return false,
.KnownValue => |x| return x.typ.id == .NoReturn,
.KnownType => |typ| return typ.id == .NoReturn,
}
}
pub fn isCompTime(base: *const Inst) bool {
return base.val == IrVal.KnownValue;
return base.val == .KnownValue;
}
pub fn linkToParent(self: *Inst, parent: *Inst) void {
@ -441,13 +440,13 @@ pub const Inst = struct {
.volatility = self.params.volatility,
});
const elem_type = target.getKnownType();
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = elem_type,
.mut = self.params.mut,
.vol = self.params.volatility,
.size = Type.Pointer.Size.One,
.alignment = Type.Pointer.Align.Abi,
}) catch unreachable);
.size = .One,
.alignment = .Abi,
});
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
// could be a ref of a global, for example
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
@ -474,25 +473,25 @@ pub const Inst = struct {
}
pub async fn analyze(self: *const DeclRef, ira: *Analyze) !*Inst {
(await (async ira.irb.comp.resolveDecl(self.params.decl) catch unreachable)) catch |err| switch (err) {
(ira.irb.comp.resolveDecl(self.params.decl)) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => return error.SemanticAnalysisFailed,
};
switch (self.params.decl.id) {
Decl.Id.CompTime => unreachable,
Decl.Id.Var => return error.Unimplemented,
Decl.Id.Fn => {
.CompTime => unreachable,
.Var => return error.Unimplemented,
.Fn => {
const fn_decl = @fieldParentPtr(Decl.Fn, "base", self.params.decl);
const decl_val = switch (fn_decl.value) {
Decl.Fn.Val.Unresolved => unreachable,
Decl.Fn.Val.Fn => |fn_val| &fn_val.base,
Decl.Fn.Val.FnProto => |fn_proto| &fn_proto.base,
.Unresolved => unreachable,
.Fn => |fn_val| &fn_val.base,
.FnProto => |fn_proto| &fn_proto.base,
};
switch (self.params.lval) {
LVal.None => {
.None => {
return ira.irb.buildConstValue(self.base.scope, self.base.span, decl_val);
},
LVal.Ptr => return error.Unimplemented,
.Ptr => return error.Unimplemented,
}
},
}
@ -519,21 +518,21 @@ pub const Inst = struct {
pub async fn analyze(self: *const VarPtr, ira: *Analyze) !*Inst {
switch (self.params.var_scope.data) {
Scope.Var.Data.Const => @panic("TODO"),
Scope.Var.Data.Param => |param| {
.Const => @panic("TODO"),
.Param => |param| {
const new_inst = try ira.irb.build(
Inst.VarPtr,
self.base.scope,
self.base.span,
Inst.VarPtr.Params{ .var_scope = self.params.var_scope },
);
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = param.typ,
.mut = Type.Pointer.Mut.Const,
.vol = Type.Pointer.Vol.Non,
.size = Type.Pointer.Size.One,
.alignment = Type.Pointer.Align.Abi,
}) catch unreachable);
.mut = .Const,
.vol = .Non,
.size = .One,
.alignment = .Abi,
});
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
return new_inst;
},
@ -542,8 +541,8 @@ pub const Inst = struct {
pub fn render(self: *VarPtr, ofile: *ObjectFile, fn_val: *Value.Fn) *llvm.Value {
switch (self.params.var_scope.data) {
Scope.Var.Data.Const => unreachable, // turned into Inst.Const in analyze pass
Scope.Var.Data.Param => |param| return param.llvm_value,
.Const => unreachable, // turned into Inst.Const in analyze pass
.Param => |param| return param.llvm_value,
}
}
};
@ -567,7 +566,7 @@ pub const Inst = struct {
pub async fn analyze(self: *const LoadPtr, ira: *Analyze) !*Inst {
const target = try self.params.target.getAsParam();
const target_type = target.getKnownType();
if (target_type.id != Type.Id.Pointer) {
if (target_type.id != .Pointer) {
try ira.addCompileError(self.base.span, "dereference of non pointer type '{}'", target_type.name);
return error.SemanticAnalysisFailed;
}
@ -661,13 +660,13 @@ pub const Inst = struct {
} else blk: {
break :blk Type.Pointer.Align{ .Abi = {} };
};
const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
.child_type = child_type,
.mut = self.params.mut,
.vol = self.params.vol,
.size = self.params.size,
.alignment = alignment,
}) catch unreachable);
});
ptr_type.base.base.deref(ira.irb.comp);
return ira.irb.buildConstValue(self.base.scope, self.base.span, &ptr_type.base.base);
@ -715,7 +714,7 @@ pub const Inst = struct {
pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Inst {
const target = try self.params.target.getAsParam();
if (target.getKnownType().id != Type.Id.Void) {
if (target.getKnownType().id != .Void) {
try ira.addCompileError(self.base.span, "expression value is ignored");
return error.SemanticAnalysisFailed;
}
@ -838,7 +837,7 @@ pub const Inst = struct {
const target = try self.params.target.getAsParam();
const target_type = target.getKnownType();
switch (target_type.id) {
Type.Id.ErrorUnion => {
.ErrorUnion => {
return error.Unimplemented;
// if (instr_is_comptime(value)) {
// ConstExprValue *err_union_val = ir_resolve_const(ira, value, UndefBad);
@ -868,7 +867,7 @@ pub const Inst = struct {
// ir_build_test_err_from(&ira->new_irb, &instruction->base, value);
// return ira->codegen->builtin_types.entry_bool;
},
Type.Id.ErrorSet => {
.ErrorSet => {
return ira.irb.buildConstBool(self.base.scope, self.base.span, true);
},
else => {
@ -1081,120 +1080,120 @@ pub const Builder = struct {
pub async fn genNode(irb: *Builder, node: *ast.Node, scope: *Scope, lval: LVal) Error!*Inst {
switch (node.id) {
ast.Node.Id.Root => unreachable,
ast.Node.Id.Use => unreachable,
ast.Node.Id.TestDecl => unreachable,
ast.Node.Id.VarDecl => return error.Unimplemented,
ast.Node.Id.Defer => return error.Unimplemented,
ast.Node.Id.InfixOp => return error.Unimplemented,
ast.Node.Id.PrefixOp => {
.Root => unreachable,
.Use => unreachable,
.TestDecl => unreachable,
.VarDecl => return error.Unimplemented,
.Defer => return error.Unimplemented,
.InfixOp => return error.Unimplemented,
.PrefixOp => {
const prefix_op = @fieldParentPtr(ast.Node.PrefixOp, "base", node);
switch (prefix_op.op) {
ast.Node.PrefixOp.Op.AddressOf => return error.Unimplemented,
ast.Node.PrefixOp.Op.ArrayType => |n| return error.Unimplemented,
ast.Node.PrefixOp.Op.Await => return error.Unimplemented,
ast.Node.PrefixOp.Op.BitNot => return error.Unimplemented,
ast.Node.PrefixOp.Op.BoolNot => return error.Unimplemented,
ast.Node.PrefixOp.Op.Cancel => return error.Unimplemented,
ast.Node.PrefixOp.Op.OptionalType => return error.Unimplemented,
ast.Node.PrefixOp.Op.Negation => return error.Unimplemented,
ast.Node.PrefixOp.Op.NegationWrap => return error.Unimplemented,
ast.Node.PrefixOp.Op.Resume => return error.Unimplemented,
ast.Node.PrefixOp.Op.PtrType => |ptr_info| {
const inst = try await (async irb.genPtrType(prefix_op, ptr_info, scope) catch unreachable);
.AddressOf => return error.Unimplemented,
.ArrayType => |n| return error.Unimplemented,
.Await => return error.Unimplemented,
.BitNot => return error.Unimplemented,
.BoolNot => return error.Unimplemented,
.Cancel => return error.Unimplemented,
.OptionalType => return error.Unimplemented,
.Negation => return error.Unimplemented,
.NegationWrap => return error.Unimplemented,
.Resume => return error.Unimplemented,
.PtrType => |ptr_info| {
const inst = try irb.genPtrType(prefix_op, ptr_info, scope);
return irb.lvalWrap(scope, inst, lval);
},
ast.Node.PrefixOp.Op.SliceType => |ptr_info| return error.Unimplemented,
ast.Node.PrefixOp.Op.Try => return error.Unimplemented,
.SliceType => |ptr_info| return error.Unimplemented,
.Try => return error.Unimplemented,
}
},
ast.Node.Id.SuffixOp => {
.SuffixOp => {
const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
switch (suffix_op.op) {
@TagType(ast.Node.SuffixOp.Op).Call => |*call| {
const inst = try await (async irb.genCall(suffix_op, call, scope) catch unreachable);
.Call => |*call| {
const inst = try irb.genCall(suffix_op, call, scope);
return irb.lvalWrap(scope, inst, lval);
},
@TagType(ast.Node.SuffixOp.Op).ArrayAccess => |n| return error.Unimplemented,
@TagType(ast.Node.SuffixOp.Op).Slice => |slice| return error.Unimplemented,
@TagType(ast.Node.SuffixOp.Op).ArrayInitializer => |init_list| return error.Unimplemented,
@TagType(ast.Node.SuffixOp.Op).StructInitializer => |init_list| return error.Unimplemented,
@TagType(ast.Node.SuffixOp.Op).Deref => return error.Unimplemented,
@TagType(ast.Node.SuffixOp.Op).UnwrapOptional => return error.Unimplemented,
.ArrayAccess => |n| return error.Unimplemented,
.Slice => |slice| return error.Unimplemented,
.ArrayInitializer => |init_list| return error.Unimplemented,
.StructInitializer => |init_list| return error.Unimplemented,
.Deref => return error.Unimplemented,
.UnwrapOptional => return error.Unimplemented,
}
},
ast.Node.Id.Switch => return error.Unimplemented,
ast.Node.Id.While => return error.Unimplemented,
ast.Node.Id.For => return error.Unimplemented,
ast.Node.Id.If => return error.Unimplemented,
ast.Node.Id.ControlFlowExpression => {
.Switch => return error.Unimplemented,
.While => return error.Unimplemented,
.For => return error.Unimplemented,
.If => return error.Unimplemented,
.ControlFlowExpression => {
const control_flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", node);
return await (async irb.genControlFlowExpr(control_flow_expr, scope, lval) catch unreachable);
return irb.genControlFlowExpr(control_flow_expr, scope, lval);
},
ast.Node.Id.Suspend => return error.Unimplemented,
ast.Node.Id.VarType => return error.Unimplemented,
ast.Node.Id.ErrorType => return error.Unimplemented,
ast.Node.Id.FnProto => return error.Unimplemented,
ast.Node.Id.PromiseType => return error.Unimplemented,
ast.Node.Id.IntegerLiteral => {
.Suspend => return error.Unimplemented,
.VarType => return error.Unimplemented,
.ErrorType => return error.Unimplemented,
.FnProto => return error.Unimplemented,
.AnyFrameType => return error.Unimplemented,
.IntegerLiteral => {
const int_lit = @fieldParentPtr(ast.Node.IntegerLiteral, "base", node);
return irb.lvalWrap(scope, try irb.genIntLit(int_lit, scope), lval);
},
ast.Node.Id.FloatLiteral => return error.Unimplemented,
ast.Node.Id.StringLiteral => {
.FloatLiteral => return error.Unimplemented,
.StringLiteral => {
const str_lit = @fieldParentPtr(ast.Node.StringLiteral, "base", node);
const inst = try await (async irb.genStrLit(str_lit, scope) catch unreachable);
const inst = try irb.genStrLit(str_lit, scope);
return irb.lvalWrap(scope, inst, lval);
},
ast.Node.Id.MultilineStringLiteral => return error.Unimplemented,
ast.Node.Id.CharLiteral => return error.Unimplemented,
ast.Node.Id.BoolLiteral => return error.Unimplemented,
ast.Node.Id.NullLiteral => return error.Unimplemented,
ast.Node.Id.UndefinedLiteral => return error.Unimplemented,
ast.Node.Id.Unreachable => return error.Unimplemented,
ast.Node.Id.Identifier => {
.MultilineStringLiteral => return error.Unimplemented,
.CharLiteral => return error.Unimplemented,
.BoolLiteral => return error.Unimplemented,
.NullLiteral => return error.Unimplemented,
.UndefinedLiteral => return error.Unimplemented,
.Unreachable => return error.Unimplemented,
.Identifier => {
const identifier = @fieldParentPtr(ast.Node.Identifier, "base", node);
return await (async irb.genIdentifier(identifier, scope, lval) catch unreachable);
return irb.genIdentifier(identifier, scope, lval);
},
ast.Node.Id.GroupedExpression => {
.GroupedExpression => {
const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", node);
return await (async irb.genNode(grouped_expr.expr, scope, lval) catch unreachable);
return irb.genNode(grouped_expr.expr, scope, lval);
},
ast.Node.Id.BuiltinCall => return error.Unimplemented,
ast.Node.Id.ErrorSetDecl => return error.Unimplemented,
ast.Node.Id.ContainerDecl => return error.Unimplemented,
ast.Node.Id.Asm => return error.Unimplemented,
ast.Node.Id.Comptime => return error.Unimplemented,
ast.Node.Id.Block => {
.BuiltinCall => return error.Unimplemented,
.ErrorSetDecl => return error.Unimplemented,
.ContainerDecl => return error.Unimplemented,
.Asm => return error.Unimplemented,
.Comptime => return error.Unimplemented,
.Block => {
const block = @fieldParentPtr(ast.Node.Block, "base", node);
const inst = try await (async irb.genBlock(block, scope) catch unreachable);
const inst = try irb.genBlock(block, scope);
return irb.lvalWrap(scope, inst, lval);
},
ast.Node.Id.DocComment => return error.Unimplemented,
ast.Node.Id.SwitchCase => return error.Unimplemented,
ast.Node.Id.SwitchElse => return error.Unimplemented,
ast.Node.Id.Else => return error.Unimplemented,
ast.Node.Id.Payload => return error.Unimplemented,
ast.Node.Id.PointerPayload => return error.Unimplemented,
ast.Node.Id.PointerIndexPayload => return error.Unimplemented,
ast.Node.Id.ContainerField => return error.Unimplemented,
ast.Node.Id.ErrorTag => return error.Unimplemented,
ast.Node.Id.AsmInput => return error.Unimplemented,
ast.Node.Id.AsmOutput => return error.Unimplemented,
ast.Node.Id.ParamDecl => return error.Unimplemented,
ast.Node.Id.FieldInitializer => return error.Unimplemented,
ast.Node.Id.EnumLiteral => return error.Unimplemented,
.DocComment => return error.Unimplemented,
.SwitchCase => return error.Unimplemented,
.SwitchElse => return error.Unimplemented,
.Else => return error.Unimplemented,
.Payload => return error.Unimplemented,
.PointerPayload => return error.Unimplemented,
.PointerIndexPayload => return error.Unimplemented,
.ContainerField => return error.Unimplemented,
.ErrorTag => return error.Unimplemented,
.AsmInput => return error.Unimplemented,
.AsmOutput => return error.Unimplemented,
.ParamDecl => return error.Unimplemented,
.FieldInitializer => return error.Unimplemented,
.EnumLiteral => return error.Unimplemented,
}
}
async fn genCall(irb: *Builder, suffix_op: *ast.Node.SuffixOp, call: *ast.Node.SuffixOp.Op.Call, scope: *Scope) !*Inst {
const fn_ref = try await (async irb.genNode(suffix_op.lhs, scope, LVal.None) catch unreachable);
const fn_ref = try irb.genNode(suffix_op.lhs, scope, .None);
const args = try irb.arena().alloc(*Inst, call.params.len);
var it = call.params.iterator(0);
var i: usize = 0;
while (it.next()) |arg_node_ptr| : (i += 1) {
args[i] = try await (async irb.genNode(arg_node_ptr.*, scope, LVal.None) catch unreachable);
args[i] = try irb.genNode(arg_node_ptr.*, scope, .None);
}
//bool is_async = node->data.fn_call_expr.is_async;
@ -1239,7 +1238,7 @@ pub const Builder = struct {
//} else {
// align_value = nullptr;
//}
const child_type = try await (async irb.genNode(prefix_op.rhs, scope, LVal.None) catch unreachable);
const child_type = try irb.genNode(prefix_op.rhs, scope, .None);
//uint32_t bit_offset_start = 0;
//if (node->data.pointer_type.bit_offset_start != nullptr) {
@ -1273,9 +1272,9 @@ pub const Builder = struct {
return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params{
.child_type = child_type,
.mut = Type.Pointer.Mut.Mut,
.vol = Type.Pointer.Vol.Non,
.size = Type.Pointer.Size.Many,
.mut = .Mut,
.vol = .Non,
.size = .Many,
.alignment = null,
});
}
@ -1287,15 +1286,15 @@ pub const Builder = struct {
var scope = target_scope;
while (true) {
switch (scope.id) {
Scope.Id.CompTime => return true,
Scope.Id.FnDef => return false,
Scope.Id.Decls => unreachable,
Scope.Id.Root => unreachable,
Scope.Id.AstTree => unreachable,
Scope.Id.Block,
Scope.Id.Defer,
Scope.Id.DeferExpr,
Scope.Id.Var,
.CompTime => return true,
.FnDef => return false,
.Decls => unreachable,
.Root => unreachable,
.AstTree => unreachable,
.Block,
.Defer,
.DeferExpr,
.Var,
=> scope = scope.parent.?,
}
}
@ -1366,23 +1365,23 @@ pub const Builder = struct {
buf[buf.len - 1] = 0;
// next make an array value
const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
const array_val = try Value.Array.createOwnedBuffer(irb.comp, buf);
buf_cleaned = true;
defer array_val.base.deref(irb.comp);
// then make a pointer value pointing at the first element
const ptr_val = try await (async Value.Ptr.createArrayElemPtr(
const ptr_val = try Value.Ptr.createArrayElemPtr(
irb.comp,
array_val,
Type.Pointer.Mut.Const,
Type.Pointer.Size.Many,
.Const,
.Many,
0,
) catch unreachable);
);
defer ptr_val.base.deref(irb.comp);
return irb.buildConstValue(scope, src_span, &ptr_val.base);
} else {
const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
const array_val = try Value.Array.createOwnedBuffer(irb.comp, buf);
buf_cleaned = true;
defer array_val.base.deref(irb.comp);
@ -1438,7 +1437,7 @@ pub const Builder = struct {
child_scope = &defer_child_scope.base;
continue;
}
const statement_value = try await (async irb.genNode(statement_node, child_scope, LVal.None) catch unreachable);
const statement_value = try irb.genNode(statement_node, child_scope, .None);
is_continuation_unreachable = statement_value.isNoReturn();
if (is_continuation_unreachable) {
@ -1481,7 +1480,7 @@ pub const Builder = struct {
try block_scope.incoming_values.append(
try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true),
);
_ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, .ScopeExit);
_ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params{
.dest_block = block_scope.end_block,
@ -1496,7 +1495,7 @@ pub const Builder = struct {
});
}
_ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, .ScopeExit);
return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true);
}
@ -1507,9 +1506,9 @@ pub const Builder = struct {
lval: LVal,
) !*Inst {
switch (control_flow_expr.kind) {
ast.Node.ControlFlowExpression.Kind.Break => |arg| return error.Unimplemented,
ast.Node.ControlFlowExpression.Kind.Continue => |arg| return error.Unimplemented,
ast.Node.ControlFlowExpression.Kind.Return => {
.Break => |arg| return error.Unimplemented,
.Continue => |arg| return error.Unimplemented,
.Return => {
const src_span = Span.token(control_flow_expr.ltoken);
if (scope.findFnDef() == null) {
try irb.comp.addCompileError(
@ -1534,7 +1533,7 @@ pub const Builder = struct {
const outer_scope = irb.begin_scope.?;
const return_value = if (control_flow_expr.rhs) |rhs| blk: {
break :blk try await (async irb.genNode(rhs, scope, LVal.None) catch unreachable);
break :blk try irb.genNode(rhs, scope, .None);
} else blk: {
break :blk try irb.buildConstVoid(scope, src_span, true);
};
@ -1545,7 +1544,7 @@ pub const Builder = struct {
const err_block = try irb.createBasicBlock(scope, "ErrRetErr");
const ok_block = try irb.createBasicBlock(scope, "ErrRetOk");
if (!have_err_defers) {
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
_ = try irb.genDefersForBlock(scope, outer_scope, .ScopeExit);
}
const is_err = try irb.build(
@ -1568,7 +1567,7 @@ pub const Builder = struct {
try irb.setCursorAtEndAndAppendBlock(err_block);
if (have_err_defers) {
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ErrorExit) catch unreachable);
_ = try irb.genDefersForBlock(scope, outer_scope, .ErrorExit);
}
if (irb.comp.have_err_ret_tracing and !irb.isCompTime(scope)) {
_ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params{});
@ -1580,7 +1579,7 @@ pub const Builder = struct {
try irb.setCursorAtEndAndAppendBlock(ok_block);
if (have_err_defers) {
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
_ = try irb.genDefersForBlock(scope, outer_scope, .ScopeExit);
}
_ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
.dest_block = ret_stmt_block,
@ -1590,7 +1589,7 @@ pub const Builder = struct {
try irb.setCursorAtEndAndAppendBlock(ret_stmt_block);
return irb.genAsyncReturn(scope, src_span, return_value, false);
} else {
_ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
_ = try irb.genDefersForBlock(scope, outer_scope, .ScopeExit);
return irb.genAsyncReturn(scope, src_span, return_value, false);
}
},
@ -1610,14 +1609,14 @@ pub const Builder = struct {
// return &const_instruction->base;
//}
if (await (async irb.comp.getPrimitiveType(name) catch unreachable)) |result| {
if (irb.comp.getPrimitiveType(name)) |result| {
if (result) |primitive_type| {
defer primitive_type.base.deref(irb.comp);
switch (lval) {
// if (lval == LValPtr) {
// return ir_build_ref(irb, scope, node, value, false, false);
LVal.Ptr => return error.Unimplemented,
LVal.None => return irb.buildConstValue(scope, src_span, &primitive_type.base),
.Ptr => return error.Unimplemented,
.None => return irb.buildConstValue(scope, src_span, &primitive_type.base),
}
}
} else |err| switch (err) {
@ -1628,23 +1627,23 @@ pub const Builder = struct {
error.OutOfMemory => return error.OutOfMemory,
}
switch (await (async irb.findIdent(scope, name) catch unreachable)) {
Ident.Decl => |decl| {
switch (irb.findIdent(scope, name)) {
.Decl => |decl| {
return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
.decl = decl,
.lval = lval,
});
},
Ident.VarScope => |var_scope| {
.VarScope => |var_scope| {
const var_ptr = try irb.build(Inst.VarPtr, scope, src_span, Inst.VarPtr.Params{ .var_scope = var_scope });
switch (lval) {
LVal.Ptr => return var_ptr,
LVal.None => {
.Ptr => return var_ptr,
.None => {
return irb.build(Inst.LoadPtr, scope, src_span, Inst.LoadPtr.Params{ .target = var_ptr });
},
}
},
Ident.NotFound => {},
.NotFound => {},
}
//if (node->owner->any_imports_failed) {
@ -1671,25 +1670,25 @@ pub const Builder = struct {
var scope = inner_scope;
while (scope != outer_scope) {
switch (scope.id) {
Scope.Id.Defer => {
.Defer => {
const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope);
switch (defer_scope.kind) {
Scope.Defer.Kind.ScopeExit => result.scope_exit += 1,
Scope.Defer.Kind.ErrorExit => result.error_exit += 1,
.ScopeExit => result.scope_exit += 1,
.ErrorExit => result.error_exit += 1,
}
scope = scope.parent orelse break;
},
Scope.Id.FnDef => break,
.FnDef => break,
Scope.Id.CompTime,
Scope.Id.Block,
Scope.Id.Decls,
Scope.Id.Root,
Scope.Id.Var,
.CompTime,
.Block,
.Decls,
.Root,
.Var,
=> scope = scope.parent orelse break,
Scope.Id.DeferExpr => unreachable,
Scope.Id.AstTree => unreachable,
.DeferExpr => unreachable,
.AstTree => unreachable,
}
}
return result;
@ -1705,19 +1704,19 @@ pub const Builder = struct {
var is_noreturn = false;
while (true) {
switch (scope.id) {
Scope.Id.Defer => {
.Defer => {
const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope);
const generate = switch (defer_scope.kind) {
Scope.Defer.Kind.ScopeExit => true,
Scope.Defer.Kind.ErrorExit => gen_kind == Scope.Defer.Kind.ErrorExit,
.ScopeExit => true,
.ErrorExit => gen_kind == .ErrorExit,
};
if (generate) {
const defer_expr_scope = defer_scope.defer_expr_scope;
const instruction = try await (async irb.genNode(
const instruction = try irb.genNode(
defer_expr_scope.expr_node,
&defer_expr_scope.base,
LVal.None,
) catch unreachable);
.None,
);
if (instruction.isNoReturn()) {
is_noreturn = true;
} else {
@ -1730,32 +1729,32 @@ pub const Builder = struct {
}
}
},
Scope.Id.FnDef,
Scope.Id.Decls,
Scope.Id.Root,
.FnDef,
.Decls,
.Root,
=> return is_noreturn,
Scope.Id.CompTime,
Scope.Id.Block,
Scope.Id.Var,
.CompTime,
.Block,
.Var,
=> scope = scope.parent orelse return is_noreturn,
Scope.Id.DeferExpr => unreachable,
Scope.Id.AstTree => unreachable,
.DeferExpr => unreachable,
.AstTree => unreachable,
}
}
}
pub fn lvalWrap(irb: *Builder, scope: *Scope, instruction: *Inst, lval: LVal) !*Inst {
switch (lval) {
LVal.None => return instruction,
LVal.Ptr => {
.None => return instruction,
.Ptr => {
// We needed a pointer to a value, but we got a value. So we create
// an instruction which just makes a const pointer of it.
return irb.build(Inst.Ref, scope, instruction.span, Inst.Ref.Params{
.target = instruction,
.mut = Type.Pointer.Mut.Const,
.volatility = Type.Pointer.Vol.Non,
.mut = .Const,
.volatility = .Non,
});
},
}
@ -1781,9 +1780,9 @@ pub const Builder = struct {
.scope = scope,
.debug_id = self.next_debug_id,
.val = switch (I.ir_val_init) {
IrVal.Init.Unknown => IrVal.Unknown,
IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.comp).base },
IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.comp).base },
.Unknown => IrVal.Unknown,
.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.comp).base },
.Void => IrVal{ .KnownValue = &Value.Void.get(self.comp).base },
},
.ref_count = 0,
.span = span,
@ -1902,7 +1901,6 @@ pub const Builder = struct {
);
}
return error.Unimplemented;
}
const Ident = union(enum) {
@ -1915,16 +1913,16 @@ pub const Builder = struct {
var s = scope;
while (true) {
switch (s.id) {
Scope.Id.Root => return Ident.NotFound,
Scope.Id.Decls => {
.Root => return .NotFound,
.Decls => {
const decls = @fieldParentPtr(Scope.Decls, "base", s);
const locked_table = await (async decls.table.acquireRead() catch unreachable);
const locked_table = decls.table.acquireRead();
defer locked_table.release();
if (locked_table.value.get(name)) |entry| {
return Ident{ .Decl = entry.value };
}
},
Scope.Id.Var => {
.Var => {
const var_scope = @fieldParentPtr(Scope.Var, "base", s);
if (mem.eql(u8, var_scope.name, name)) {
return Ident{ .VarScope = var_scope };
@ -2047,7 +2045,7 @@ const Analyze = struct {
fn implicitCast(self: *Analyze, target: *Inst, optional_dest_type: ?*Type) Analyze.Error!*Inst {
const dest_type = optional_dest_type orelse return target;
const from_type = target.getKnownType();
if (from_type == dest_type or from_type.id == Type.Id.NoReturn) return target;
if (from_type == dest_type or from_type.id == .NoReturn) return target;
return self.analyzeCast(target, target, dest_type);
}
@ -2311,7 +2309,7 @@ const Analyze = struct {
//}
// cast from comptime-known integer to another integer where the value fits
if (target.isCompTime() and (from_type.id == Type.Id.Int or from_type.id == Type.Id.ComptimeInt)) cast: {
if (target.isCompTime() and (from_type.id == .Int or from_type.id == .ComptimeInt)) cast: {
const target_val = target.val.KnownValue;
const from_int = &target_val.cast(Value.Int).?.big_int;
const fits = fits: {
@ -2534,7 +2532,7 @@ pub async fn gen(
entry_block.ref(&irb); // Entry block gets a reference because we enter it to begin.
try irb.setCursorAtEndAndAppendBlock(entry_block);
const result = try await (async irb.genNode(body_node, scope, LVal.None) catch unreachable);
const result = try irb.genNode(body_node, scope, .None);
if (!result.isNoReturn()) {
// no need for save_err_ret_addr because this cannot return error
_ = try irb.genAsyncReturn(scope, Span.token(body_node.lastToken()), result, true);
@ -2564,7 +2562,7 @@ pub async fn analyze(comp: *Compilation, old_code: *Code, expected_type: ?*Type)
continue;
}
const return_inst = try await (async old_instruction.analyze(&ira) catch unreachable);
const return_inst = try old_instruction.analyze(&ira);
assert(return_inst.val != IrVal.Unknown); // at least the type should be known at this point
return_inst.linkToParent(old_instruction);
// Note: if we ever modify the above to handle error.CompileError by continuing analysis,

View File

@ -1,9 +1,11 @@
const std = @import("std");
const builtin = @import("builtin");
const event = std.event;
const Target = @import("target.zig").Target;
const util = @import("util.zig");
const Target = std.Target;
const c = @import("c.zig");
const fs = std.fs;
const Allocator = std.mem.Allocator;
/// See the render function implementation for documentation of the fields.
pub const LibCInstallation = struct {
@ -29,7 +31,7 @@ pub const LibCInstallation = struct {
pub fn parse(
self: *LibCInstallation,
allocator: *std.mem.Allocator,
allocator: *Allocator,
libc_file: []const u8,
stderr: *std.io.OutStream(fs.File.WriteError),
) !void {
@ -71,7 +73,7 @@ pub const LibCInstallation = struct {
if (std.mem.eql(u8, name, key)) {
found_keys[i].found = true;
switch (@typeInfo(@typeOf(@field(self, key)))) {
builtin.TypeId.Optional => {
.Optional => {
if (value.len == 0) {
@field(self, key) = null;
} else {
@ -136,15 +138,15 @@ pub const LibCInstallation = struct {
self.static_lib_dir orelse "",
self.msvc_lib_dir orelse "",
self.kernel32_lib_dir orelse "",
self.dynamic_linker_path orelse Target(Target.Native).getDynamicLinkerPath(),
self.dynamic_linker_path orelse util.getDynamicLinkerPath(Target{ .Native = {} }),
);
}
/// Finds the default, native libc.
pub async fn findNative(self: *LibCInstallation, loop: *event.Loop) !void {
pub async fn findNative(self: *LibCInstallation, allocator: *Allocator) !void {
self.initEmpty();
var group = event.Group(FindError!void).init(loop);
errdefer group.deinit();
var group = event.Group(FindError!void).init(allocator);
errdefer group.wait() catch {};
var windows_sdk: ?*c.ZigWindowsSDK = null;
errdefer if (windows_sdk) |sdk| c.zig_free_windows_sdk(@ptrCast(?[*]c.ZigWindowsSDK, sdk));
@ -156,11 +158,11 @@ pub const LibCInstallation = struct {
windows_sdk = sdk;
if (sdk.msvc_lib_dir_ptr != 0) {
self.msvc_lib_dir = try std.mem.dupe(loop.allocator, u8, sdk.msvc_lib_dir_ptr[0..sdk.msvc_lib_dir_len]);
self.msvc_lib_dir = try std.mem.dupe(allocator, u8, sdk.msvc_lib_dir_ptr[0..sdk.msvc_lib_dir_len]);
}
try group.call(findNativeKernel32LibDir, self, loop, sdk);
try group.call(findNativeIncludeDirWindows, self, loop, sdk);
try group.call(findNativeLibDirWindows, self, loop, sdk);
try group.call(findNativeKernel32LibDir, allocator, self, sdk);
try group.call(findNativeIncludeDirWindows, self, allocator, sdk);
try group.call(findNativeLibDirWindows, self, allocator, sdk);
},
c.ZigFindWindowsSdkError.OutOfMemory => return error.OutOfMemory,
c.ZigFindWindowsSdkError.NotFound => return error.NotFound,
@ -168,20 +170,20 @@ pub const LibCInstallation = struct {
}
},
.linux => {
try group.call(findNativeIncludeDirLinux, self, loop);
try group.call(findNativeLibDirLinux, self, loop);
try group.call(findNativeStaticLibDir, self, loop);
try group.call(findNativeDynamicLinker, self, loop);
try group.call(findNativeIncludeDirLinux, self, allocator);
try group.call(findNativeLibDirLinux, self, allocator);
try group.call(findNativeStaticLibDir, self, allocator);
try group.call(findNativeDynamicLinker, self, allocator);
},
.macosx, .freebsd, .netbsd => {
self.include_dir = try std.mem.dupe(loop.allocator, u8, "/usr/include");
self.include_dir = try std.mem.dupe(allocator, u8, "/usr/include");
},
else => @compileError("unimplemented: find libc for this OS"),
}
return await (async group.wait() catch unreachable);
return group.wait();
}
async fn findNativeIncludeDirLinux(self: *LibCInstallation, loop: *event.Loop) !void {
async fn findNativeIncludeDirLinux(self: *LibCInstallation, allocator: *Allocator) FindError!void {
const cc_exe = std.os.getenv("CC") orelse "cc";
const argv = [_][]const u8{
cc_exe,
@ -191,7 +193,7 @@ pub const LibCInstallation = struct {
"/dev/null",
};
// TODO make this use event loop
const errorable_result = std.ChildProcess.exec(loop.allocator, argv, null, null, 1024 * 1024);
const errorable_result = std.ChildProcess.exec(allocator, argv, null, null, 1024 * 1024);
const exec_result = if (std.debug.runtime_safety) blk: {
break :blk errorable_result catch unreachable;
} else blk: {
@ -201,12 +203,12 @@ pub const LibCInstallation = struct {
};
};
defer {
loop.allocator.free(exec_result.stdout);
loop.allocator.free(exec_result.stderr);
allocator.free(exec_result.stdout);
allocator.free(exec_result.stderr);
}
switch (exec_result.term) {
std.ChildProcess.Term.Exited => |code| {
.Exited => |code| {
if (code != 0) return error.CCompilerExitCode;
},
else => {
@ -215,7 +217,7 @@ pub const LibCInstallation = struct {
}
var it = std.mem.tokenize(exec_result.stderr, "\n\r");
var search_paths = std.ArrayList([]const u8).init(loop.allocator);
var search_paths = std.ArrayList([]const u8).init(allocator);
defer search_paths.deinit();
while (it.next()) |line| {
if (line.len != 0 and line[0] == ' ') {
@ -231,11 +233,11 @@ pub const LibCInstallation = struct {
while (path_i < search_paths.len) : (path_i += 1) {
const search_path_untrimmed = search_paths.at(search_paths.len - path_i - 1);
const search_path = std.mem.trimLeft(u8, search_path_untrimmed, " ");
const stdlib_path = try fs.path.join(loop.allocator, [_][]const u8{ search_path, "stdlib.h" });
defer loop.allocator.free(stdlib_path);
const stdlib_path = try fs.path.join(allocator, [_][]const u8{ search_path, "stdlib.h" });
defer allocator.free(stdlib_path);
if (try fileExists(stdlib_path)) {
self.include_dir = try std.mem.dupe(loop.allocator, u8, search_path);
self.include_dir = try std.mem.dupe(allocator, u8, search_path);
return;
}
}
@ -243,11 +245,11 @@ pub const LibCInstallation = struct {
return error.LibCStdLibHeaderNotFound;
}
async fn findNativeIncludeDirWindows(self: *LibCInstallation, loop: *event.Loop, sdk: *c.ZigWindowsSDK) !void {
async fn findNativeIncludeDirWindows(self: *LibCInstallation, allocator: *Allocator, sdk: *c.ZigWindowsSDK) !void {
var search_buf: [2]Search = undefined;
const searches = fillSearch(&search_buf, sdk);
var result_buf = try std.Buffer.initSize(loop.allocator, 0);
var result_buf = try std.Buffer.initSize(allocator, 0);
defer result_buf.deinit();
for (searches) |search| {
@ -256,10 +258,10 @@ pub const LibCInstallation = struct {
try stream.print("{}\\Include\\{}\\ucrt", search.path, search.version);
const stdlib_path = try fs.path.join(
loop.allocator,
allocator,
[_][]const u8{ result_buf.toSliceConst(), "stdlib.h" },
);
defer loop.allocator.free(stdlib_path);
defer allocator.free(stdlib_path);
if (try fileExists(stdlib_path)) {
self.include_dir = result_buf.toOwnedSlice();
@ -270,11 +272,11 @@ pub const LibCInstallation = struct {
return error.LibCStdLibHeaderNotFound;
}
async fn findNativeLibDirWindows(self: *LibCInstallation, loop: *event.Loop, sdk: *c.ZigWindowsSDK) FindError!void {
async fn findNativeLibDirWindows(self: *LibCInstallation, allocator: *Allocator, sdk: *c.ZigWindowsSDK) FindError!void {
var search_buf: [2]Search = undefined;
const searches = fillSearch(&search_buf, sdk);
var result_buf = try std.Buffer.initSize(loop.allocator, 0);
var result_buf = try std.Buffer.initSize(allocator, 0);
defer result_buf.deinit();
for (searches) |search| {
@ -282,16 +284,16 @@ pub const LibCInstallation = struct {
const stream = &std.io.BufferOutStream.init(&result_buf).stream;
try stream.print("{}\\Lib\\{}\\ucrt\\", search.path, search.version);
switch (builtin.arch) {
builtin.Arch.i386 => try stream.write("x86"),
builtin.Arch.x86_64 => try stream.write("x64"),
builtin.Arch.aarch64 => try stream.write("arm"),
.i386 => try stream.write("x86"),
.x86_64 => try stream.write("x64"),
.aarch64 => try stream.write("arm"),
else => return error.UnsupportedArchitecture,
}
const ucrt_lib_path = try fs.path.join(
loop.allocator,
allocator,
[_][]const u8{ result_buf.toSliceConst(), "ucrt.lib" },
);
defer loop.allocator.free(ucrt_lib_path);
defer allocator.free(ucrt_lib_path);
if (try fileExists(ucrt_lib_path)) {
self.lib_dir = result_buf.toOwnedSlice();
return;
@ -300,15 +302,15 @@ pub const LibCInstallation = struct {
return error.LibCRuntimeNotFound;
}
async fn findNativeLibDirLinux(self: *LibCInstallation, loop: *event.Loop) FindError!void {
self.lib_dir = try await (async ccPrintFileName(loop, "crt1.o", true) catch unreachable);
async fn findNativeLibDirLinux(self: *LibCInstallation, allocator: *Allocator) FindError!void {
self.lib_dir = try ccPrintFileName(allocator, "crt1.o", true);
}
async fn findNativeStaticLibDir(self: *LibCInstallation, loop: *event.Loop) FindError!void {
self.static_lib_dir = try await (async ccPrintFileName(loop, "crtbegin.o", true) catch unreachable);
async fn findNativeStaticLibDir(self: *LibCInstallation, allocator: *Allocator) FindError!void {
self.static_lib_dir = try ccPrintFileName(allocator, "crtbegin.o", true);
}
async fn findNativeDynamicLinker(self: *LibCInstallation, loop: *event.Loop) FindError!void {
async fn findNativeDynamicLinker(self: *LibCInstallation, allocator: *Allocator) FindError!void {
var dyn_tests = [_]DynTest{
DynTest{
.name = "ld-linux-x86-64.so.2",
@ -319,12 +321,12 @@ pub const LibCInstallation = struct {
.result = null,
},
};
var group = event.Group(FindError!void).init(loop);
errdefer group.deinit();
var group = event.Group(FindError!void).init(allocator);
errdefer group.wait() catch {};
for (dyn_tests) |*dyn_test| {
try group.call(testNativeDynamicLinker, self, loop, dyn_test);
try group.call(testNativeDynamicLinker, self, allocator, dyn_test);
}
try await (async group.wait() catch unreachable);
try group.wait();
for (dyn_tests) |*dyn_test| {
if (dyn_test.result) |result| {
self.dynamic_linker_path = result;
@ -338,8 +340,8 @@ pub const LibCInstallation = struct {
result: ?[]const u8,
};
async fn testNativeDynamicLinker(self: *LibCInstallation, loop: *event.Loop, dyn_test: *DynTest) FindError!void {
if (await (async ccPrintFileName(loop, dyn_test.name, false) catch unreachable)) |result| {
async fn testNativeDynamicLinker(self: *LibCInstallation, allocator: *Allocator, dyn_test: *DynTest) FindError!void {
if (ccPrintFileName(allocator, dyn_test.name, false)) |result| {
dyn_test.result = result;
return;
} else |err| switch (err) {
@ -348,11 +350,11 @@ pub const LibCInstallation = struct {
}
}
async fn findNativeKernel32LibDir(self: *LibCInstallation, loop: *event.Loop, sdk: *c.ZigWindowsSDK) FindError!void {
async fn findNativeKernel32LibDir(self: *LibCInstallation, allocator: *Allocator, sdk: *c.ZigWindowsSDK) FindError!void {
var search_buf: [2]Search = undefined;
const searches = fillSearch(&search_buf, sdk);
var result_buf = try std.Buffer.initSize(loop.allocator, 0);
var result_buf = try std.Buffer.initSize(allocator, 0);
defer result_buf.deinit();
for (searches) |search| {
@ -360,16 +362,16 @@ pub const LibCInstallation = struct {
const stream = &std.io.BufferOutStream.init(&result_buf).stream;
try stream.print("{}\\Lib\\{}\\um\\", search.path, search.version);
switch (builtin.arch) {
builtin.Arch.i386 => try stream.write("x86\\"),
builtin.Arch.x86_64 => try stream.write("x64\\"),
builtin.Arch.aarch64 => try stream.write("arm\\"),
.i386 => try stream.write("x86\\"),
.x86_64 => try stream.write("x64\\"),
.aarch64 => try stream.write("arm\\"),
else => return error.UnsupportedArchitecture,
}
const kernel32_path = try fs.path.join(
loop.allocator,
allocator,
[_][]const u8{ result_buf.toSliceConst(), "kernel32.lib" },
);
defer loop.allocator.free(kernel32_path);
defer allocator.free(kernel32_path);
if (try fileExists(kernel32_path)) {
self.kernel32_lib_dir = result_buf.toOwnedSlice();
return;
@ -380,7 +382,7 @@ pub const LibCInstallation = struct {
fn initEmpty(self: *LibCInstallation) void {
self.* = LibCInstallation{
.include_dir = ([*]const u8)(undefined)[0..0],
.include_dir = @as([*]const u8, undefined)[0..0],
.lib_dir = null,
.static_lib_dir = null,
.msvc_lib_dir = null,
@ -391,15 +393,15 @@ pub const LibCInstallation = struct {
};
/// caller owns returned memory
async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bool) ![]u8 {
async fn ccPrintFileName(allocator: *Allocator, o_file: []const u8, want_dirname: bool) ![]u8 {
const cc_exe = std.os.getenv("CC") orelse "cc";
const arg1 = try std.fmt.allocPrint(loop.allocator, "-print-file-name={}", o_file);
defer loop.allocator.free(arg1);
const arg1 = try std.fmt.allocPrint(allocator, "-print-file-name={}", o_file);
defer allocator.free(arg1);
const argv = [_][]const u8{ cc_exe, arg1 };
// TODO This simulates evented I/O for the child process exec
await (async loop.yield() catch unreachable);
const errorable_result = std.ChildProcess.exec(loop.allocator, argv, null, null, 1024 * 1024);
std.event.Loop.instance.?.yield();
const errorable_result = std.ChildProcess.exec(allocator, argv, null, null, 1024 * 1024);
const exec_result = if (std.debug.runtime_safety) blk: {
break :blk errorable_result catch unreachable;
} else blk: {
@ -409,8 +411,8 @@ async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bo
};
};
defer {
loop.allocator.free(exec_result.stdout);
loop.allocator.free(exec_result.stderr);
allocator.free(exec_result.stdout);
allocator.free(exec_result.stderr);
}
switch (exec_result.term) {
.Exited => |code| {
@ -425,9 +427,9 @@ async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bo
const dirname = fs.path.dirname(line) orelse return error.LibCRuntimeNotFound;
if (want_dirname) {
return std.mem.dupe(loop.allocator, u8, dirname);
return std.mem.dupe(allocator, u8, dirname);
} else {
return std.mem.dupe(loop.allocator, u8, line);
return std.mem.dupe(allocator, u8, line);
}
}

View File

@ -1,12 +1,12 @@
const std = @import("std");
const mem = std.mem;
const c = @import("c.zig");
const builtin = @import("builtin");
const ObjectFormat = builtin.ObjectFormat;
const Compilation = @import("compilation.zig").Compilation;
const Target = @import("target.zig").Target;
const Target = std.Target;
const ObjectFormat = Target.ObjectFormat;
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
const assert = std.debug.assert;
const util = @import("util.zig");
const Context = struct {
comp: *Compilation,
@ -26,7 +26,7 @@ pub async fn link(comp: *Compilation) !void {
.comp = comp,
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
.args = undefined,
.link_in_crt = comp.haveLibC() and comp.kind == Compilation.Kind.Exe,
.link_in_crt = comp.haveLibC() and comp.kind == .Exe,
.link_err = {},
.link_msg = undefined,
.libc = undefined,
@ -41,14 +41,14 @@ pub async fn link(comp: *Compilation) !void {
} else {
ctx.out_file_path = try std.Buffer.init(&ctx.arena.allocator, comp.name.toSliceConst());
switch (comp.kind) {
Compilation.Kind.Exe => {
.Exe => {
try ctx.out_file_path.append(comp.target.exeFileExt());
},
Compilation.Kind.Lib => {
try ctx.out_file_path.append(comp.target.libFileExt(comp.is_static));
.Lib => {
try ctx.out_file_path.append(if (comp.is_static) comp.target.staticLibSuffix() else comp.target.dynamicLibSuffix());
},
Compilation.Kind.Obj => {
try ctx.out_file_path.append(comp.target.objFileExt());
.Obj => {
try ctx.out_file_path.append(comp.target.oFileExt());
},
}
}
@ -61,7 +61,7 @@ pub async fn link(comp: *Compilation) !void {
ctx.libc = ctx.comp.override_libc orelse blk: {
switch (comp.target) {
Target.Native => {
break :blk (await (async comp.zig_compiler.getNativeLibC() catch unreachable)) catch return error.LibCRequiredButNotProvidedOrFound;
break :blk comp.zig_compiler.getNativeLibC() catch return error.LibCRequiredButNotProvidedOrFound;
},
else => return error.LibCRequiredButNotProvidedOrFound,
}
@ -78,12 +78,12 @@ pub async fn link(comp: *Compilation) !void {
std.debug.warn("\n");
}
const extern_ofmt = toExternObjectFormatType(comp.target.getObjectFormat());
const extern_ofmt = toExternObjectFormatType(util.getObjectFormat(comp.target));
const args_slice = ctx.args.toSlice();
{
// LLD is not thread-safe, so we grab a global lock.
const held = await (async comp.zig_compiler.lld_lock.acquire() catch unreachable);
const held = comp.zig_compiler.lld_lock.acquire();
defer held.release();
// Not evented I/O. LLD does its own multithreading internally.
@ -121,21 +121,21 @@ fn linkDiagCallbackErrorable(ctx: *Context, msg: []const u8) !void {
fn toExternObjectFormatType(ofmt: ObjectFormat) c.ZigLLVM_ObjectFormatType {
return switch (ofmt) {
ObjectFormat.unknown => c.ZigLLVM_UnknownObjectFormat,
ObjectFormat.coff => c.ZigLLVM_COFF,
ObjectFormat.elf => c.ZigLLVM_ELF,
ObjectFormat.macho => c.ZigLLVM_MachO,
ObjectFormat.wasm => c.ZigLLVM_Wasm,
.unknown => c.ZigLLVM_UnknownObjectFormat,
.coff => c.ZigLLVM_COFF,
.elf => c.ZigLLVM_ELF,
.macho => c.ZigLLVM_MachO,
.wasm => c.ZigLLVM_Wasm,
};
}
fn constructLinkerArgs(ctx: *Context) !void {
switch (ctx.comp.target.getObjectFormat()) {
ObjectFormat.unknown => unreachable,
ObjectFormat.coff => return constructLinkerArgsCoff(ctx),
ObjectFormat.elf => return constructLinkerArgsElf(ctx),
ObjectFormat.macho => return constructLinkerArgsMachO(ctx),
ObjectFormat.wasm => return constructLinkerArgsWasm(ctx),
switch (util.getObjectFormat(ctx.comp.target)) {
.unknown => unreachable,
.coff => return constructLinkerArgsCoff(ctx),
.elf => return constructLinkerArgsElf(ctx),
.macho => return constructLinkerArgsMachO(ctx),
.wasm => return constructLinkerArgsWasm(ctx),
}
}
@ -154,7 +154,7 @@ fn constructLinkerArgsElf(ctx: *Context) !void {
//bool shared = !g->is_static && is_lib;
//Buf *soname = nullptr;
if (ctx.comp.is_static) {
if (ctx.comp.target.isArmOrThumb()) {
if (util.isArmOrThumb(ctx.comp.target)) {
try ctx.args.append("-Bstatic");
} else {
try ctx.args.append("-static");
@ -222,7 +222,7 @@ fn constructLinkerArgsElf(ctx: *Context) !void {
if (!ctx.comp.is_static) {
const dl = blk: {
if (ctx.libc.dynamic_linker_path) |dl| break :blk dl;
if (ctx.comp.target.getDynamicLinkerPath()) |dl| break :blk dl;
if (util.getDynamicLinkerPath(ctx.comp.target)) |dl| break :blk dl;
return error.LibCMissingDynamicLinker;
};
try ctx.args.append("-dynamic-linker");
@ -324,9 +324,9 @@ fn constructLinkerArgsCoff(ctx: *Context) !void {
}
switch (ctx.comp.target.getArch()) {
builtin.Arch.i386 => try ctx.args.append("-MACHINE:X86"),
builtin.Arch.x86_64 => try ctx.args.append("-MACHINE:X64"),
builtin.Arch.aarch64 => try ctx.args.append("-MACHINE:ARM"),
.i386 => try ctx.args.append("-MACHINE:X86"),
.x86_64 => try ctx.args.append("-MACHINE:X64"),
.aarch64 => try ctx.args.append("-MACHINE:ARM"),
else => return error.UnsupportedLinkArchitecture,
}
@ -336,7 +336,7 @@ fn constructLinkerArgsCoff(ctx: *Context) !void {
try ctx.args.append("/SUBSYSTEM:console");
}
const is_library = ctx.comp.kind == Compilation.Kind.Lib;
const is_library = ctx.comp.kind == .Lib;
const out_arg = try std.fmt.allocPrint(&ctx.arena.allocator, "-OUT:{}\x00", ctx.out_file_path.toSliceConst());
try ctx.args.append(out_arg.ptr);
@ -349,7 +349,7 @@ fn constructLinkerArgsCoff(ctx: *Context) !void {
if (ctx.link_in_crt) {
const lib_str = if (ctx.comp.is_static) "lib" else "";
const d_str = if (ctx.comp.build_mode == builtin.Mode.Debug) "d" else "";
const d_str = if (ctx.comp.build_mode == .Debug) "d" else "";
if (ctx.comp.is_static) {
const cmt_lib_name = try std.fmt.allocPrint(&ctx.arena.allocator, "libcmt{}.lib\x00", d_str);
@ -400,7 +400,7 @@ fn constructLinkerArgsCoff(ctx: *Context) !void {
try addFnObjects(ctx);
switch (ctx.comp.kind) {
Compilation.Kind.Exe, Compilation.Kind.Lib => {
.Exe, .Lib => {
if (!ctx.comp.haveLibC()) {
@panic("TODO");
//Buf *builtin_o_path = build_o(g, "builtin");
@ -412,7 +412,7 @@ fn constructLinkerArgsCoff(ctx: *Context) !void {
//Buf *compiler_rt_o_path = build_compiler_rt(g);
//lj->args.append(buf_ptr(compiler_rt_o_path));
},
Compilation.Kind.Obj => {},
.Obj => {},
}
//Buf *def_contents = buf_alloc();
@ -469,7 +469,7 @@ fn constructLinkerArgsMachO(ctx: *Context) !void {
try ctx.args.append("-export_dynamic");
}
const is_lib = ctx.comp.kind == Compilation.Kind.Lib;
const is_lib = ctx.comp.kind == .Lib;
const shared = !ctx.comp.is_static and is_lib;
if (ctx.comp.is_static) {
try ctx.args.append("-static");
@ -512,14 +512,14 @@ fn constructLinkerArgsMachO(ctx: *Context) !void {
const platform = try DarwinPlatform.get(ctx.comp);
switch (platform.kind) {
DarwinPlatform.Kind.MacOS => try ctx.args.append("-macosx_version_min"),
DarwinPlatform.Kind.IPhoneOS => try ctx.args.append("-iphoneos_version_min"),
DarwinPlatform.Kind.IPhoneOSSimulator => try ctx.args.append("-ios_simulator_version_min"),
.MacOS => try ctx.args.append("-macosx_version_min"),
.IPhoneOS => try ctx.args.append("-iphoneos_version_min"),
.IPhoneOSSimulator => try ctx.args.append("-ios_simulator_version_min"),
}
const ver_str = try std.fmt.allocPrint(&ctx.arena.allocator, "{}.{}.{}\x00", platform.major, platform.minor, platform.micro);
try ctx.args.append(ver_str.ptr);
if (ctx.comp.kind == Compilation.Kind.Exe) {
if (ctx.comp.kind == .Exe) {
if (ctx.comp.is_static) {
try ctx.args.append("-no_pie");
} else {
@ -542,7 +542,7 @@ fn constructLinkerArgsMachO(ctx: *Context) !void {
try ctx.args.append("-lcrt0.o");
} else {
switch (platform.kind) {
DarwinPlatform.Kind.MacOS => {
.MacOS => {
if (platform.versionLessThan(10, 5)) {
try ctx.args.append("-lcrt1.o");
} else if (platform.versionLessThan(10, 6)) {
@ -551,8 +551,8 @@ fn constructLinkerArgsMachO(ctx: *Context) !void {
try ctx.args.append("-lcrt1.10.6.o");
}
},
DarwinPlatform.Kind.IPhoneOS => {
if (ctx.comp.target.getArch() == builtin.Arch.aarch64) {
.IPhoneOS => {
if (ctx.comp.target.getArch() == .aarch64) {
// iOS does not need any crt1 files for arm64
} else if (platform.versionLessThan(3, 1)) {
try ctx.args.append("-lcrt1.o");
@ -560,7 +560,7 @@ fn constructLinkerArgsMachO(ctx: *Context) !void {
try ctx.args.append("-lcrt1.3.1.o");
}
},
DarwinPlatform.Kind.IPhoneOSSimulator => {}, // no crt1.o needed
.IPhoneOSSimulator => {}, // no crt1.o needed
}
}
@ -605,7 +605,7 @@ fn constructLinkerArgsMachO(ctx: *Context) !void {
try ctx.args.append("dynamic_lookup");
}
if (platform.kind == DarwinPlatform.Kind.MacOS) {
if (platform.kind == .MacOS) {
if (platform.versionLessThan(10, 5)) {
try ctx.args.append("-lgcc_s.10.4");
} else if (platform.versionLessThan(10, 6)) {
@ -659,17 +659,17 @@ const DarwinPlatform = struct {
fn get(comp: *Compilation) !DarwinPlatform {
var result: DarwinPlatform = undefined;
const ver_str = switch (comp.darwin_version_min) {
Compilation.DarwinVersionMin.MacOS => |ver| blk: {
result.kind = Kind.MacOS;
.MacOS => |ver| blk: {
result.kind = .MacOS;
break :blk ver;
},
Compilation.DarwinVersionMin.Ios => |ver| blk: {
result.kind = Kind.IPhoneOS;
.Ios => |ver| blk: {
result.kind = .IPhoneOS;
break :blk ver;
},
Compilation.DarwinVersionMin.None => blk: {
.None => blk: {
assert(comp.target.getOs() == .macosx);
result.kind = Kind.MacOS;
result.kind = .MacOS;
break :blk "10.14";
},
};
@ -686,11 +686,11 @@ const DarwinPlatform = struct {
return error.InvalidDarwinVersionString;
}
if (result.kind == Kind.IPhoneOS) {
if (result.kind == .IPhoneOS) {
switch (comp.target.getArch()) {
builtin.Arch.i386,
builtin.Arch.x86_64,
=> result.kind = Kind.IPhoneOSSimulator,
.i386,
.x86_64,
=> result.kind = .IPhoneOSSimulator,
else => {},
}
}

View File

@ -1,4 +1,3 @@
const builtin = @import("builtin");
const c = @import("c.zig");
const assert = @import("std").debug.assert;
@ -268,7 +267,7 @@ pub const FnInline = extern enum {
};
fn removeNullability(comptime T: type) type {
comptime assert(@typeInfo(T).Pointer.size == @import("builtin").TypeInfo.Pointer.Size.C);
comptime assert(@typeInfo(T).Pointer.size == .C);
return *T.Child;
}

View File

@ -18,7 +18,7 @@ const Args = arg.Args;
const Flag = arg.Flag;
const ZigCompiler = @import("compilation.zig").ZigCompiler;
const Compilation = @import("compilation.zig").Compilation;
const Target = @import("target.zig").Target;
const Target = std.Target;
const errmsg = @import("errmsg.zig");
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
@ -26,6 +26,8 @@ var stderr_file: fs.File = undefined;
var stderr: *io.OutStream(fs.File.WriteError) = undefined;
var stdout: *io.OutStream(fs.File.WriteError) = undefined;
pub const io_mode = .evented;
pub const max_src_size = 2 * 1024 * 1024 * 1024; // 2 GiB
const usage =
@ -258,47 +260,47 @@ fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Co
process.exit(0);
}
const build_mode = blk: {
const build_mode: std.builtin.Mode = blk: {
if (flags.single("mode")) |mode_flag| {
if (mem.eql(u8, mode_flag, "debug")) {
break :blk builtin.Mode.Debug;
break :blk .Debug;
} else if (mem.eql(u8, mode_flag, "release-fast")) {
break :blk builtin.Mode.ReleaseFast;
break :blk .ReleaseFast;
} else if (mem.eql(u8, mode_flag, "release-safe")) {
break :blk builtin.Mode.ReleaseSafe;
break :blk .ReleaseSafe;
} else if (mem.eql(u8, mode_flag, "release-small")) {
break :blk builtin.Mode.ReleaseSmall;
break :blk .ReleaseSmall;
} else unreachable;
} else {
break :blk builtin.Mode.Debug;
break :blk .Debug;
}
};
const color = blk: {
const color: errmsg.Color = blk: {
if (flags.single("color")) |color_flag| {
if (mem.eql(u8, color_flag, "auto")) {
break :blk errmsg.Color.Auto;
break :blk .Auto;
} else if (mem.eql(u8, color_flag, "on")) {
break :blk errmsg.Color.On;
break :blk .On;
} else if (mem.eql(u8, color_flag, "off")) {
break :blk errmsg.Color.Off;
break :blk .Off;
} else unreachable;
} else {
break :blk errmsg.Color.Auto;
break :blk .Auto;
}
};
const emit_type = blk: {
const emit_type: Compilation.Emit = blk: {
if (flags.single("emit")) |emit_flag| {
if (mem.eql(u8, emit_flag, "asm")) {
break :blk Compilation.Emit.Assembly;
break :blk .Assembly;
} else if (mem.eql(u8, emit_flag, "bin")) {
break :blk Compilation.Emit.Binary;
break :blk .Binary;
} else if (mem.eql(u8, emit_flag, "llvm-ir")) {
break :blk Compilation.Emit.LlvmIr;
break :blk .LlvmIr;
} else unreachable;
} else {
break :blk Compilation.Emit.Binary;
break :blk .Binary;
}
};
@ -383,11 +385,7 @@ fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Co
var override_libc: LibCInstallation = undefined;
var loop: event.Loop = undefined;
try loop.initMultiThreaded(allocator);
defer loop.deinit();
var zig_compiler = try ZigCompiler.init(&loop);
var zig_compiler = try ZigCompiler.init(allocator);
defer zig_compiler.deinit();
var comp = try Compilation.create(
@ -403,7 +401,7 @@ fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Co
defer comp.destroy();
if (flags.single("libc")) |libc_path| {
parseLibcPaths(loop.allocator, &override_libc, libc_path);
parseLibcPaths(allocator, &override_libc, libc_path);
comp.override_libc = &override_libc;
}
@ -463,25 +461,24 @@ fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Co
comp.link_objects = link_objects;
comp.start();
// TODO const process_build_events_handle = try async<loop.allocator> processBuildEvents(comp, color);
loop.run();
const frame = async processBuildEvents(comp, color);
}
async fn processBuildEvents(comp: *Compilation, color: errmsg.Color) void {
var count: usize = 0;
while (true) {
// TODO directly awaiting async should guarantee memory allocation elision
const build_event = await (async comp.events.get() catch unreachable);
const build_event = comp.events.get();
count += 1;
switch (build_event) {
Compilation.Event.Ok => {
.Ok => {
stderr.print("Build {} succeeded\n", count) catch process.exit(1);
},
Compilation.Event.Error => |err| {
.Error => |err| {
stderr.print("Build {} failed: {}\n", count, @errorName(err)) catch process.exit(1);
},
Compilation.Event.Fail => |msgs| {
.Fail => |msgs| {
stderr.print("Build {} compile errors:\n", count) catch process.exit(1);
for (msgs) |msg| {
defer msg.destroy();
@ -536,7 +533,7 @@ const Fmt = struct {
seen: event.Locked(SeenMap),
any_error: bool,
color: errmsg.Color,
loop: *event.Loop,
allocator: *Allocator,
const SeenMap = std.StringHashMap(void);
};
@ -567,20 +564,14 @@ fn cmdLibC(allocator: *Allocator, args: []const []const u8) !void {
},
}
var loop: event.Loop = undefined;
try loop.initMultiThreaded(allocator);
defer loop.deinit();
var zig_compiler = try ZigCompiler.init(&loop);
var zig_compiler = try ZigCompiler.init(allocator);
defer zig_compiler.deinit();
// TODO const handle = try async<loop.allocator> findLibCAsync(&zig_compiler);
loop.run();
const frame = async findLibCAsync(&zig_compiler);
}
async fn findLibCAsync(zig_compiler: *ZigCompiler) void {
const libc = (await (async zig_compiler.getNativeLibC() catch unreachable)) catch |err| {
const libc = zig_compiler.getNativeLibC() catch |err| {
stderr.print("unable to find libc: {}\n", @errorName(err)) catch process.exit(1);
process.exit(1);
};
@ -596,17 +587,17 @@ fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void {
process.exit(0);
}
const color = blk: {
const color: errmsg.Color = blk: {
if (flags.single("color")) |color_flag| {
if (mem.eql(u8, color_flag, "auto")) {
break :blk errmsg.Color.Auto;
break :blk .Auto;
} else if (mem.eql(u8, color_flag, "on")) {
break :blk errmsg.Color.On;
break :blk .On;
} else if (mem.eql(u8, color_flag, "off")) {
break :blk errmsg.Color.Off;
break :blk .Off;
} else unreachable;
} else {
break :blk errmsg.Color.Auto;
break :blk .Auto;
}
};
@ -640,7 +631,7 @@ fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void {
}
if (flags.present("check")) {
const anything_changed = try std.zig.render(allocator, io.null_out_stream, tree);
const code = if (anything_changed) u8(1) else u8(0);
const code: u8 = if (anything_changed) 1 else 0;
process.exit(code);
}
@ -653,28 +644,11 @@ fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void {
process.exit(1);
}
var loop: event.Loop = undefined;
try loop.initMultiThreaded(allocator);
defer loop.deinit();
var result: FmtError!void = undefined;
// TODO const main_handle = try async<allocator> asyncFmtMainChecked(
// TODO &result,
// TODO &loop,
// TODO &flags,
// TODO color,
// TODO );
loop.run();
return result;
}
async fn asyncFmtMainChecked(
result: *(FmtError!void),
loop: *event.Loop,
flags: *const Args,
color: errmsg.Color,
) void {
result.* = await (async asyncFmtMain(loop, flags, color) catch unreachable);
return asyncFmtMain(
allocator,
&flags,
color,
);
}
const FmtError = error{
@ -700,72 +674,69 @@ const FmtError = error{
} || fs.File.OpenError;
async fn asyncFmtMain(
loop: *event.Loop,
allocator: *Allocator,
flags: *const Args,
color: errmsg.Color,
) FmtError!void {
suspend {
resume @handle();
}
var fmt = Fmt{
.seen = event.Locked(Fmt.SeenMap).init(loop, Fmt.SeenMap.init(loop.allocator)),
.allocator = allocator,
.seen = event.Locked(Fmt.SeenMap).init(Fmt.SeenMap.init(allocator)),
.any_error = false,
.color = color,
.loop = loop,
};
const check_mode = flags.present("check");
var group = event.Group(FmtError!void).init(loop);
var group = event.Group(FmtError!void).init(allocator);
for (flags.positionals.toSliceConst()) |file_path| {
try group.call(fmtPath, &fmt, file_path, check_mode);
}
try await (async group.wait() catch unreachable);
try group.wait();
if (fmt.any_error) {
process.exit(1);
}
}
async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8, check_mode: bool) FmtError!void {
const file_path = try std.mem.dupe(fmt.loop.allocator, u8, file_path_ref);
defer fmt.loop.allocator.free(file_path);
const file_path = try std.mem.dupe(fmt.allocator, u8, file_path_ref);
defer fmt.allocator.free(file_path);
{
const held = await (async fmt.seen.acquire() catch unreachable);
const held = fmt.seen.acquire();
defer held.release();
if (try held.value.put(file_path, {})) |_| return;
}
const source_code = (await try async event.fs.readFile(
fmt.loop,
file_path,
max_src_size,
)) catch |err| switch (err) {
error.IsDir, error.AccessDenied => {
// TODO make event based (and dir.next())
var dir = try fs.Dir.open(file_path);
defer dir.close();
const source_code = "";
// const source_code = event.fs.readFile(
// file_path,
// max_src_size,
// ) catch |err| switch (err) {
// error.IsDir, error.AccessDenied => {
// // TODO make event based (and dir.next())
// var dir = try fs.Dir.open(file_path);
// defer dir.close();
var group = event.Group(FmtError!void).init(fmt.loop);
while (try dir.next()) |entry| {
if (entry.kind == fs.Dir.Entry.Kind.Directory or mem.endsWith(u8, entry.name, ".zig")) {
const full_path = try fs.path.join(fmt.loop.allocator, [_][]const u8{ file_path, entry.name });
try group.call(fmtPath, fmt, full_path, check_mode);
}
}
return await (async group.wait() catch unreachable);
},
else => {
// TODO lock stderr printing
try stderr.print("unable to open '{}': {}\n", file_path, err);
fmt.any_error = true;
return;
},
};
defer fmt.loop.allocator.free(source_code);
// var group = event.Group(FmtError!void).init(fmt.allocator);
// while (try dir.next()) |entry| {
// if (entry.kind == fs.Dir.Entry.Kind.Directory or mem.endsWith(u8, entry.name, ".zig")) {
// const full_path = try fs.path.join(fmt.allocator, [_][]const u8{ file_path, entry.name });
// try group.call(fmtPath, fmt, full_path, check_mode);
// }
// }
// return group.wait();
// },
// else => {
// // TODO lock stderr printing
// try stderr.print("unable to open '{}': {}\n", file_path, err);
// fmt.any_error = true;
// return;
// },
// };
// defer fmt.allocator.free(source_code);
const tree = std.zig.parse(fmt.loop.allocator, source_code) catch |err| {
const tree = std.zig.parse(fmt.allocator, source_code) catch |err| {
try stderr.print("error parsing file '{}': {}\n", file_path, err);
fmt.any_error = true;
return;
@ -774,8 +745,8 @@ async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8, check_mode: bool) FmtErro
var error_it = tree.errors.iterator(0);
while (error_it.next()) |parse_error| {
const msg = try errmsg.Msg.createFromParseError(fmt.loop.allocator, parse_error, tree, file_path);
defer fmt.loop.allocator.destroy(msg);
const msg = try errmsg.Msg.createFromParseError(fmt.allocator, parse_error, tree, file_path);
defer fmt.allocator.destroy(msg);
try msg.printToFile(stderr_file, fmt.color);
}
@ -785,17 +756,17 @@ async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8, check_mode: bool) FmtErro
}
if (check_mode) {
const anything_changed = try std.zig.render(fmt.loop.allocator, io.null_out_stream, tree);
const anything_changed = try std.zig.render(fmt.allocator, io.null_out_stream, tree);
if (anything_changed) {
try stderr.print("{}\n", file_path);
fmt.any_error = true;
}
} else {
// TODO make this evented
const baf = try io.BufferedAtomicFile.create(fmt.loop.allocator, file_path);
const baf = try io.BufferedAtomicFile.create(fmt.allocator, file_path);
defer baf.destroy();
const anything_changed = try std.zig.render(fmt.loop.allocator, baf.stream(), tree);
const anything_changed = try std.zig.render(fmt.allocator, baf.stream(), tree);
if (anything_changed) {
try stderr.print("{}\n", file_path);
try baf.finish();
@ -822,8 +793,8 @@ fn cmdTargets(allocator: *Allocator, args: []const []const u8) !void {
try stdout.write("Operating Systems:\n");
{
comptime var i: usize = 0;
inline while (i < @memberCount(builtin.Os)) : (i += 1) {
comptime const os_tag = @memberName(builtin.Os, i);
inline while (i < @memberCount(Target.Os)) : (i += 1) {
comptime const os_tag = @memberName(Target.Os, i);
// NOTE: Cannot use empty string, see #918.
comptime const native_str = if (comptime mem.eql(u8, os_tag, @tagName(builtin.os))) " (native)\n" else "\n";
@ -835,8 +806,8 @@ fn cmdTargets(allocator: *Allocator, args: []const []const u8) !void {
try stdout.write("C ABIs:\n");
{
comptime var i: usize = 0;
inline while (i < @memberCount(builtin.Abi)) : (i += 1) {
comptime const abi_tag = @memberName(builtin.Abi, i);
inline while (i < @memberCount(Target.Abi)) : (i += 1) {
comptime const abi_tag = @memberName(Target.Abi, i);
// NOTE: Cannot use empty string, see #918.
comptime const native_str = if (comptime mem.eql(u8, abi_tag, @tagName(builtin.abi))) " (native)\n" else "\n";
@ -911,21 +882,17 @@ fn cmdInternalBuildInfo(allocator: *Allocator, args: []const []const u8) !void {
try stdout.print(
\\ZIG_CMAKE_BINARY_DIR {}
\\ZIG_CXX_COMPILER {}
\\ZIG_LLVM_CONFIG_EXE {}
\\ZIG_LLD_INCLUDE_PATH {}
\\ZIG_LLD_LIBRARIES {}
\\ZIG_STD_FILES {}
\\ZIG_C_HEADER_FILES {}
\\ZIG_LLVM_CONFIG_EXE {}
\\ZIG_DIA_GUIDS_LIB {}
\\
,
std.mem.toSliceConst(u8, c.ZIG_CMAKE_BINARY_DIR),
std.mem.toSliceConst(u8, c.ZIG_CXX_COMPILER),
std.mem.toSliceConst(u8, c.ZIG_LLVM_CONFIG_EXE),
std.mem.toSliceConst(u8, c.ZIG_LLD_INCLUDE_PATH),
std.mem.toSliceConst(u8, c.ZIG_LLD_LIBRARIES),
std.mem.toSliceConst(u8, c.ZIG_STD_FILES),
std.mem.toSliceConst(u8, c.ZIG_C_HEADER_FILES),
std.mem.toSliceConst(u8, c.ZIG_LLVM_CONFIG_EXE),
std.mem.toSliceConst(u8, c.ZIG_DIA_GUIDS_LIB),
);
}

View File

@ -1,5 +1,4 @@
const std = @import("std");
const builtin = @import("builtin");
const Allocator = mem.Allocator;
const Decl = @import("decl.zig").Decl;
const Compilation = @import("compilation.zig").Compilation;
@ -28,15 +27,15 @@ pub const Scope = struct {
if (base.ref_count.decr() == 1) {
if (base.parent) |parent| parent.deref(comp);
switch (base.id) {
Id.Root => @fieldParentPtr(Root, "base", base).destroy(comp),
Id.Decls => @fieldParentPtr(Decls, "base", base).destroy(comp),
Id.Block => @fieldParentPtr(Block, "base", base).destroy(comp),
Id.FnDef => @fieldParentPtr(FnDef, "base", base).destroy(comp),
Id.CompTime => @fieldParentPtr(CompTime, "base", base).destroy(comp),
Id.Defer => @fieldParentPtr(Defer, "base", base).destroy(comp),
Id.DeferExpr => @fieldParentPtr(DeferExpr, "base", base).destroy(comp),
Id.Var => @fieldParentPtr(Var, "base", base).destroy(comp),
Id.AstTree => @fieldParentPtr(AstTree, "base", base).destroy(comp),
.Root => @fieldParentPtr(Root, "base", base).destroy(comp),
.Decls => @fieldParentPtr(Decls, "base", base).destroy(comp),
.Block => @fieldParentPtr(Block, "base", base).destroy(comp),
.FnDef => @fieldParentPtr(FnDef, "base", base).destroy(comp),
.CompTime => @fieldParentPtr(CompTime, "base", base).destroy(comp),
.Defer => @fieldParentPtr(Defer, "base", base).destroy(comp),
.DeferExpr => @fieldParentPtr(DeferExpr, "base", base).destroy(comp),
.Var => @fieldParentPtr(Var, "base", base).destroy(comp),
.AstTree => @fieldParentPtr(AstTree, "base", base).destroy(comp),
}
}
}
@ -46,7 +45,7 @@ pub const Scope = struct {
while (scope.parent) |parent| {
scope = parent;
}
assert(scope.id == Id.Root);
assert(scope.id == .Root);
return @fieldParentPtr(Root, "base", scope);
}
@ -54,17 +53,17 @@ pub const Scope = struct {
var scope = base;
while (true) {
switch (scope.id) {
Id.FnDef => return @fieldParentPtr(FnDef, "base", scope),
Id.Root, Id.Decls => return null,
.FnDef => return @fieldParentPtr(FnDef, "base", scope),
.Root, .Decls => return null,
Id.Block,
Id.Defer,
Id.DeferExpr,
Id.CompTime,
Id.Var,
.Block,
.Defer,
.DeferExpr,
.CompTime,
.Var,
=> scope = scope.parent.?,
Id.AstTree => unreachable,
.AstTree => unreachable,
}
}
}
@ -73,20 +72,20 @@ pub const Scope = struct {
var scope = base;
while (true) {
switch (scope.id) {
Id.DeferExpr => return @fieldParentPtr(DeferExpr, "base", scope),
.DeferExpr => return @fieldParentPtr(DeferExpr, "base", scope),
Id.FnDef,
Id.Decls,
.FnDef,
.Decls,
=> return null,
Id.Block,
Id.Defer,
Id.CompTime,
Id.Root,
Id.Var,
.Block,
.Defer,
.CompTime,
.Root,
.Var,
=> scope = scope.parent orelse return null,
Id.AstTree => unreachable,
.AstTree => unreachable,
}
}
}
@ -123,7 +122,7 @@ pub const Scope = struct {
const self = try comp.gpa().create(Root);
self.* = Root{
.base = Scope{
.id = Id.Root,
.id = .Root,
.parent = null,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -155,7 +154,7 @@ pub const Scope = struct {
.base = undefined,
.tree = tree,
};
self.base.init(Id.AstTree, &root_scope.base);
self.base.init(.AstTree, &root_scope.base);
return self;
}
@ -184,9 +183,9 @@ pub const Scope = struct {
const self = try comp.gpa().create(Decls);
self.* = Decls{
.base = undefined,
.table = event.RwLocked(Decl.Table).init(comp.loop, Decl.Table.init(comp.gpa())),
.table = event.RwLocked(Decl.Table).init(Decl.Table.init(comp.gpa())),
};
self.base.init(Id.Decls, parent);
self.base.init(.Decls, parent);
return self;
}
@ -219,15 +218,15 @@ pub const Scope = struct {
fn get(self: Safety, comp: *Compilation) bool {
return switch (self) {
Safety.Auto => switch (comp.build_mode) {
builtin.Mode.Debug,
builtin.Mode.ReleaseSafe,
.Auto => switch (comp.build_mode) {
.Debug,
.ReleaseSafe,
=> true,
builtin.Mode.ReleaseFast,
builtin.Mode.ReleaseSmall,
.ReleaseFast,
.ReleaseSmall,
=> false,
},
@TagType(Safety).Manual => |man| man.enabled,
.Manual => |man| man.enabled,
};
}
};
@ -243,7 +242,7 @@ pub const Scope = struct {
.is_comptime = undefined,
.safety = Safety.Auto,
};
self.base.init(Id.Block, parent);
self.base.init(.Block, parent);
return self;
}
@ -266,7 +265,7 @@ pub const Scope = struct {
.base = undefined,
.fn_val = null,
};
self.base.init(Id.FnDef, parent);
self.base.init(.FnDef, parent);
return self;
}
@ -282,7 +281,7 @@ pub const Scope = struct {
pub fn create(comp: *Compilation, parent: *Scope) !*CompTime {
const self = try comp.gpa().create(CompTime);
self.* = CompTime{ .base = undefined };
self.base.init(Id.CompTime, parent);
self.base.init(.CompTime, parent);
return self;
}
@ -314,7 +313,7 @@ pub const Scope = struct {
.defer_expr_scope = defer_expr_scope,
.kind = kind,
};
self.base.init(Id.Defer, parent);
self.base.init(.Defer, parent);
defer_expr_scope.base.ref();
return self;
}
@ -338,7 +337,7 @@ pub const Scope = struct {
.expr_node = expr_node,
.reported_err = false,
};
self.base.init(Id.DeferExpr, parent);
self.base.init(.DeferExpr, parent);
return self;
}
@ -404,14 +403,14 @@ pub const Scope = struct {
.src_node = src_node,
.data = undefined,
};
self.base.init(Id.Var, parent);
self.base.init(.Var, parent);
return self;
}
pub fn destroy(self: *Var, comp: *Compilation) void {
switch (self.data) {
Data.Param => {},
Data.Const => |value| value.deref(comp),
.Param => {},
.Const => |value| value.deref(comp),
}
comp.gpa().destroy(self);
}

View File

@ -1,7 +1,6 @@
// This is Zig code that is used by both stage1 and stage2.
// The prototypes in src/userland.h must match these definitions.
const builtin = @import("builtin");
const std = @import("std");
const io = std.io;
const mem = std.mem;
@ -354,9 +353,9 @@ fn printErrMsgToFile(
color: errmsg.Color,
) !void {
const color_on = switch (color) {
errmsg.Color.Auto => file.isTty(),
errmsg.Color.On => true,
errmsg.Color.Off => false,
.Auto => file.isTty(),
.On => true,
.Off => false,
};
const lok_token = parse_error.loc();
const span = errmsg.Span{
@ -421,8 +420,8 @@ export fn stage2_DepTokenizer_next(self: *stage2_DepTokenizer) stage2_DepNextRes
const textz = std.Buffer.init(&self.handle.arena.allocator, token.bytes) catch @panic("failed to create .d tokenizer token text");
return stage2_DepNextResult{
.type_id = switch (token.id) {
.target => stage2_DepNextResult.TypeId.target,
.prereq => stage2_DepNextResult.TypeId.prereq,
.target => .target,
.prereq => .prereq,
},
.textz = textz.toSlice().ptr,
};

View File

@ -1,440 +0,0 @@
const std = @import("std");
const builtin = @import("builtin");
const llvm = @import("llvm.zig");
const CInt = @import("c_int.zig").CInt;
// TODO delete this file and use std.Target
pub const FloatAbi = enum {
Hard,
Soft,
SoftFp,
};
pub const Target = union(enum) {
Native,
Cross: Cross,
pub const Cross = struct {
arch: builtin.Arch,
os: builtin.Os,
abi: builtin.Abi,
object_format: builtin.ObjectFormat,
};
pub fn objFileExt(self: Target) []const u8 {
return switch (self.getObjectFormat()) {
builtin.ObjectFormat.coff => ".obj",
else => ".o",
};
}
pub fn exeFileExt(self: Target) []const u8 {
return switch (self.getOs()) {
builtin.Os.windows => ".exe",
else => "",
};
}
pub fn libFileExt(self: Target, is_static: bool) []const u8 {
return switch (self.getOs()) {
builtin.Os.windows => if (is_static) ".lib" else ".dll",
else => if (is_static) ".a" else ".so",
};
}
pub fn getOs(self: Target) builtin.Os {
return switch (self) {
Target.Native => builtin.os,
@TagType(Target).Cross => |t| t.os,
};
}
pub fn getArch(self: Target) builtin.Arch {
switch (self) {
Target.Native => return builtin.arch,
@TagType(Target).Cross => |t| return t.arch,
}
}
pub fn getAbi(self: Target) builtin.Abi {
return switch (self) {
Target.Native => builtin.abi,
@TagType(Target).Cross => |t| t.abi,
};
}
pub fn getObjectFormat(self: Target) builtin.ObjectFormat {
return switch (self) {
Target.Native => builtin.object_format,
@TagType(Target).Cross => |t| t.object_format,
};
}
pub fn isWasm(self: Target) bool {
return switch (self.getArch()) {
builtin.Arch.wasm32, builtin.Arch.wasm64 => true,
else => false,
};
}
pub fn isDarwin(self: Target) bool {
return switch (self.getOs()) {
builtin.Os.ios, builtin.Os.macosx => true,
else => false,
};
}
pub fn isWindows(self: Target) bool {
return switch (self.getOs()) {
builtin.Os.windows => true,
else => false,
};
}
/// TODO expose the arch and subarch separately
pub fn isArmOrThumb(self: Target) bool {
return switch (self.getArch()) {
builtin.Arch.arm,
builtin.Arch.armeb,
builtin.Arch.aarch64,
builtin.Arch.aarch64_be,
builtin.Arch.thumb,
builtin.Arch.thumbeb,
=> true,
else => false,
};
}
pub fn initializeAll() void {
llvm.InitializeAllTargets();
llvm.InitializeAllTargetInfos();
llvm.InitializeAllTargetMCs();
llvm.InitializeAllAsmPrinters();
llvm.InitializeAllAsmParsers();
}
pub fn getTriple(self: Target, allocator: *std.mem.Allocator) !std.Buffer {
var result = try std.Buffer.initSize(allocator, 0);
errdefer result.deinit();
// LLVM WebAssembly output support requires the target to be activated at
// build type with -DCMAKE_LLVM_EXPIERMENTAL_TARGETS_TO_BUILD=WebAssembly.
//
// LLVM determines the output format based on the abi suffix,
// defaulting to an object based on the architecture. The default format in
// LLVM 6 sets the wasm arch output incorrectly to ELF. We need to
// explicitly set this ourself in order for it to work.
//
// This is fixed in LLVM 7 and you will be able to get wasm output by
// using the target triple `wasm32-unknown-unknown-unknown`.
const env_name = if (self.isWasm()) "wasm" else @tagName(self.getAbi());
var out = &std.io.BufferOutStream.init(&result).stream;
try out.print("{}-unknown-{}-{}", @tagName(self.getArch()), @tagName(self.getOs()), env_name);
return result;
}
pub fn is64bit(self: Target) bool {
return self.getArchPtrBitWidth() == 64;
}
pub fn getArchPtrBitWidth(self: Target) u32 {
switch (self.getArch()) {
builtin.Arch.avr,
builtin.Arch.msp430,
=> return 16,
builtin.Arch.arc,
builtin.Arch.arm,
builtin.Arch.armeb,
builtin.Arch.hexagon,
builtin.Arch.le32,
builtin.Arch.mips,
builtin.Arch.mipsel,
builtin.Arch.powerpc,
builtin.Arch.r600,
builtin.Arch.riscv32,
builtin.Arch.sparc,
builtin.Arch.sparcel,
builtin.Arch.tce,
builtin.Arch.tcele,
builtin.Arch.thumb,
builtin.Arch.thumbeb,
builtin.Arch.i386,
builtin.Arch.xcore,
builtin.Arch.nvptx,
builtin.Arch.amdil,
builtin.Arch.hsail,
builtin.Arch.spir,
builtin.Arch.kalimba,
builtin.Arch.shave,
builtin.Arch.lanai,
builtin.Arch.wasm32,
builtin.Arch.renderscript32,
=> return 32,
builtin.Arch.aarch64,
builtin.Arch.aarch64_be,
builtin.Arch.mips64,
builtin.Arch.mips64el,
builtin.Arch.powerpc64,
builtin.Arch.powerpc64le,
builtin.Arch.riscv64,
builtin.Arch.x86_64,
builtin.Arch.nvptx64,
builtin.Arch.le64,
builtin.Arch.amdil64,
builtin.Arch.hsail64,
builtin.Arch.spir64,
builtin.Arch.wasm64,
builtin.Arch.renderscript64,
builtin.Arch.amdgcn,
builtin.Arch.bpfel,
builtin.Arch.bpfeb,
builtin.Arch.sparcv9,
builtin.Arch.s390x,
=> return 64,
}
}
pub fn getFloatAbi(self: Target) FloatAbi {
return switch (self.getAbi()) {
builtin.Abi.gnueabihf,
builtin.Abi.eabihf,
builtin.Abi.musleabihf,
=> FloatAbi.Hard,
else => FloatAbi.Soft,
};
}
pub fn getDynamicLinkerPath(self: Target) ?[]const u8 {
const env = self.getAbi();
const arch = self.getArch();
const os = self.getOs();
switch (os) {
builtin.Os.freebsd => {
return "/libexec/ld-elf.so.1";
},
builtin.Os.linux => {
switch (env) {
builtin.Abi.android => {
if (self.is64bit()) {
return "/system/bin/linker64";
} else {
return "/system/bin/linker";
}
},
builtin.Abi.gnux32 => {
if (arch == builtin.Arch.x86_64) {
return "/libx32/ld-linux-x32.so.2";
}
},
builtin.Abi.musl,
builtin.Abi.musleabi,
builtin.Abi.musleabihf,
=> {
if (arch == builtin.Arch.x86_64) {
return "/lib/ld-musl-x86_64.so.1";
}
},
else => {},
}
switch (arch) {
builtin.Arch.i386,
builtin.Arch.sparc,
builtin.Arch.sparcel,
=> return "/lib/ld-linux.so.2",
builtin.Arch.aarch64 => return "/lib/ld-linux-aarch64.so.1",
builtin.Arch.aarch64_be => return "/lib/ld-linux-aarch64_be.so.1",
builtin.Arch.arm,
builtin.Arch.thumb,
=> return switch (self.getFloatAbi()) {
FloatAbi.Hard => return "/lib/ld-linux-armhf.so.3",
else => return "/lib/ld-linux.so.3",
},
builtin.Arch.armeb,
builtin.Arch.thumbeb,
=> return switch (self.getFloatAbi()) {
FloatAbi.Hard => return "/lib/ld-linux-armhf.so.3",
else => return "/lib/ld-linux.so.3",
},
builtin.Arch.mips,
builtin.Arch.mipsel,
builtin.Arch.mips64,
builtin.Arch.mips64el,
=> return null,
builtin.Arch.powerpc => return "/lib/ld.so.1",
builtin.Arch.powerpc64 => return "/lib64/ld64.so.2",
builtin.Arch.powerpc64le => return "/lib64/ld64.so.2",
builtin.Arch.s390x => return "/lib64/ld64.so.1",
builtin.Arch.sparcv9 => return "/lib64/ld-linux.so.2",
builtin.Arch.x86_64 => return "/lib64/ld-linux-x86-64.so.2",
builtin.Arch.arc,
builtin.Arch.avr,
builtin.Arch.bpfel,
builtin.Arch.bpfeb,
builtin.Arch.hexagon,
builtin.Arch.msp430,
builtin.Arch.r600,
builtin.Arch.amdgcn,
builtin.Arch.riscv32,
builtin.Arch.riscv64,
builtin.Arch.tce,
builtin.Arch.tcele,
builtin.Arch.xcore,
builtin.Arch.nvptx,
builtin.Arch.nvptx64,
builtin.Arch.le32,
builtin.Arch.le64,
builtin.Arch.amdil,
builtin.Arch.amdil64,
builtin.Arch.hsail,
builtin.Arch.hsail64,
builtin.Arch.spir,
builtin.Arch.spir64,
builtin.Arch.kalimba,
builtin.Arch.shave,
builtin.Arch.lanai,
builtin.Arch.wasm32,
builtin.Arch.wasm64,
builtin.Arch.renderscript32,
builtin.Arch.renderscript64,
=> return null,
}
},
else => return null,
}
}
pub fn llvmTargetFromTriple(triple: std.Buffer) !*llvm.Target {
var result: *llvm.Target = undefined;
var err_msg: [*]u8 = undefined;
if (llvm.GetTargetFromTriple(triple.ptr(), &result, &err_msg) != 0) {
std.debug.warn("triple: {s} error: {s}\n", triple.ptr(), err_msg);
return error.UnsupportedTarget;
}
return result;
}
pub fn cIntTypeSizeInBits(self: Target, id: CInt.Id) u32 {
const arch = self.getArch();
switch (self.getOs()) {
builtin.Os.freestanding => switch (self.getArch()) {
builtin.Arch.msp430 => switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
CInt.Id.Int,
CInt.Id.UInt,
=> return 16,
CInt.Id.Long,
CInt.Id.ULong,
=> return 32,
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
else => switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
=> return 16,
CInt.Id.Int,
CInt.Id.UInt,
=> return 32,
CInt.Id.Long,
CInt.Id.ULong,
=> return self.getArchPtrBitWidth(),
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
},
builtin.Os.linux,
builtin.Os.macosx,
builtin.Os.freebsd,
builtin.Os.openbsd,
builtin.Os.zen,
=> switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
=> return 16,
CInt.Id.Int,
CInt.Id.UInt,
=> return 32,
CInt.Id.Long,
CInt.Id.ULong,
=> return self.getArchPtrBitWidth(),
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
builtin.Os.windows, builtin.Os.uefi => switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
=> return 16,
CInt.Id.Int,
CInt.Id.UInt,
=> return 32,
CInt.Id.Long,
CInt.Id.ULong,
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
builtin.Os.ananas,
builtin.Os.cloudabi,
builtin.Os.dragonfly,
builtin.Os.fuchsia,
builtin.Os.ios,
builtin.Os.kfreebsd,
builtin.Os.lv2,
builtin.Os.netbsd,
builtin.Os.solaris,
builtin.Os.haiku,
builtin.Os.minix,
builtin.Os.rtems,
builtin.Os.nacl,
builtin.Os.cnk,
builtin.Os.aix,
builtin.Os.cuda,
builtin.Os.nvcl,
builtin.Os.amdhsa,
builtin.Os.ps4,
builtin.Os.elfiamcu,
builtin.Os.tvos,
builtin.Os.watchos,
builtin.Os.mesa3d,
builtin.Os.contiki,
builtin.Os.amdpal,
builtin.Os.hermit,
builtin.Os.hurd,
builtin.Os.wasi,
=> @panic("TODO specify the C integer type sizes for this OS"),
}
}
pub fn getDarwinArchString(self: Target) []const u8 {
const arch = self.getArch();
switch (arch) {
builtin.Arch.aarch64 => return "arm64",
builtin.Arch.thumb,
builtin.Arch.arm,
=> return "arm",
builtin.Arch.powerpc => return "ppc",
builtin.Arch.powerpc64 => return "ppc64",
builtin.Arch.powerpc64le => return "ppc64le",
else => return @tagName(arch),
}
}
};

View File

@ -1,7 +1,6 @@
const std = @import("std");
const mem = std.mem;
const builtin = @import("builtin");
const Target = @import("target.zig").Target;
const Target = std.Target;
const Compilation = @import("compilation.zig").Compilation;
const introspect = @import("introspect.zig");
const testing = std.testing;
@ -11,11 +10,17 @@ const ZigCompiler = @import("compilation.zig").ZigCompiler;
var ctx: TestContext = undefined;
test "stage2" {
// TODO provide a way to run tests in evented I/O mode
if (!std.io.is_async) return error.SkipZigTest;
// TODO https://github.com/ziglang/zig/issues/1364
// TODO https://github.com/ziglang/zig/issues/3117
if (true) return error.SkipZigTest;
try ctx.init();
defer ctx.deinit();
try @import("../test/stage2/compile_errors.zig").addCases(&ctx);
try @import("../test/stage2/compare_output.zig").addCases(&ctx);
try @import("stage2_tests").addCases(&ctx);
try ctx.run();
}
@ -24,7 +29,6 @@ const file1 = "1.zig";
const allocator = std.heap.c_allocator;
pub const TestContext = struct {
loop: std.event.Loop,
zig_compiler: ZigCompiler,
zig_lib_dir: []u8,
file_index: std.atomic.Int(usize),
@ -36,21 +40,17 @@ pub const TestContext = struct {
fn init(self: *TestContext) !void {
self.* = TestContext{
.any_err = {},
.loop = undefined,
.zig_compiler = undefined,
.zig_lib_dir = undefined,
.group = undefined,
.file_index = std.atomic.Int(usize).init(0),
};
try self.loop.initSingleThreaded(allocator);
errdefer self.loop.deinit();
self.zig_compiler = try ZigCompiler.init(&self.loop);
self.zig_compiler = try ZigCompiler.init(allocator);
errdefer self.zig_compiler.deinit();
self.group = std.event.Group(anyerror!void).init(&self.loop);
errdefer self.group.deinit();
self.group = std.event.Group(anyerror!void).init(allocator);
errdefer self.group.wait() catch {};
self.zig_lib_dir = try introspect.resolveZigLibDir(allocator);
errdefer allocator.free(self.zig_lib_dir);
@ -63,20 +63,14 @@ pub const TestContext = struct {
std.fs.deleteTree(tmp_dir_name) catch {};
allocator.free(self.zig_lib_dir);
self.zig_compiler.deinit();
self.loop.deinit();
}
fn run(self: *TestContext) !void {
const handle = try self.loop.call(waitForGroup, self);
defer cancel handle;
self.loop.run();
std.event.Loop.startCpuBoundOperation();
self.any_err = self.group.wait();
return self.any_err;
}
async fn waitForGroup(self: *TestContext) void {
self.any_err = await (async self.group.wait() catch unreachable);
}
fn testCompileError(
self: *TestContext,
source: []const u8,
@ -87,7 +81,7 @@ pub const TestContext = struct {
) !void {
var file_index_buf: [20]u8 = undefined;
const file_index = try std.fmt.bufPrint(file_index_buf[0..], "{}", self.file_index.incr());
const file1_path = try std.fs.path.join(allocator, [][]const u8{ tmp_dir_name, file_index, file1 });
const file1_path = try std.fs.path.join(allocator, [_][]const u8{ tmp_dir_name, file_index, file1 });
if (std.fs.path.dirname(file1_path)) |dirname| {
try std.fs.makePath(allocator, dirname);
@ -102,7 +96,7 @@ pub const TestContext = struct {
file1_path,
Target.Native,
Compilation.Kind.Obj,
builtin.Mode.Debug,
.Debug,
true, // is_static
self.zig_lib_dir,
);
@ -120,9 +114,9 @@ pub const TestContext = struct {
) !void {
var file_index_buf: [20]u8 = undefined;
const file_index = try std.fmt.bufPrint(file_index_buf[0..], "{}", self.file_index.incr());
const file1_path = try std.fs.path.join(allocator, [][]const u8{ tmp_dir_name, file_index, file1 });
const file1_path = try std.fs.path.join(allocator, [_][]const u8{ tmp_dir_name, file_index, file1 });
const output_file = try std.fmt.allocPrint(allocator, "{}-out{}", file1_path, Target(Target.Native).exeFileExt());
const output_file = try std.fmt.allocPrint(allocator, "{}-out{}", file1_path, (Target{.Native = {}}).exeFileExt());
if (std.fs.path.dirname(file1_path)) |dirname| {
try std.fs.makePath(allocator, dirname);
}
@ -136,7 +130,7 @@ pub const TestContext = struct {
file1_path,
Target.Native,
Compilation.Kind.Exe,
builtin.Mode.Debug,
.Debug,
false,
self.zig_lib_dir,
);
@ -153,16 +147,16 @@ pub const TestContext = struct {
comp: *Compilation,
exe_file: []const u8,
expected_output: []const u8,
) !void {
) anyerror!void {
// TODO this should not be necessary
const exe_file_2 = try std.mem.dupe(allocator, u8, exe_file);
defer comp.destroy();
const build_event = await (async comp.events.get() catch unreachable);
const build_event = comp.events.get();
switch (build_event) {
Compilation.Event.Ok => {
const argv = []const []const u8{exe_file_2};
.Ok => {
const argv = [_][]const u8{exe_file_2};
// TODO use event loop
const child = try std.ChildProcess.exec(allocator, argv, null, null, 1024 * 1024);
switch (child.term) {
@ -198,18 +192,18 @@ pub const TestContext = struct {
line: usize,
column: usize,
text: []const u8,
) !void {
) anyerror!void {
defer comp.destroy();
const build_event = await (async comp.events.get() catch unreachable);
const build_event = comp.events.get();
switch (build_event) {
Compilation.Event.Ok => {
.Ok => {
@panic("build incorrectly succeeded");
},
Compilation.Event.Error => |err| {
.Error => |err| {
@panic("build incorrectly failed");
},
Compilation.Event.Fail => |msgs| {
.Fail => |msgs| {
testing.expect(msgs.len != 0);
for (msgs) |msg| {
if (mem.endsWith(u8, msg.realpath, path) and mem.eql(u8, msg.text, text)) {

View File

@ -2,7 +2,6 @@
// and stage2. Currently the only way it is used is with `zig translate-c-2`.
const std = @import("std");
const builtin = @import("builtin");
const assert = std.debug.assert;
const ast = std.zig.ast;
const Token = std.zig.Token;
@ -13,8 +12,7 @@ pub const Mode = enum {
translate,
};
// TODO merge with Type.Fn.CallingConvention
const CallingConvention = builtin.TypeInfo.CallingConvention;
const CallingConvention = std.builtin.TypeInfo.CallingConvention;
pub const ClangErrMsg = Stage2ErrorMsg;

View File

@ -1,5 +1,5 @@
const std = @import("std");
const builtin = @import("builtin");
const builtin = std.builtin;
const Scope = @import("scope.zig").Scope;
const Compilation = @import("compilation.zig").Compilation;
const Value = @import("value.zig").Value;
@ -20,31 +20,32 @@ pub const Type = struct {
pub fn destroy(base: *Type, comp: *Compilation) void {
switch (base.id) {
Id.Struct => @fieldParentPtr(Struct, "base", base).destroy(comp),
Id.Fn => @fieldParentPtr(Fn, "base", base).destroy(comp),
Id.Type => @fieldParentPtr(MetaType, "base", base).destroy(comp),
Id.Void => @fieldParentPtr(Void, "base", base).destroy(comp),
Id.Bool => @fieldParentPtr(Bool, "base", base).destroy(comp),
Id.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(comp),
Id.Int => @fieldParentPtr(Int, "base", base).destroy(comp),
Id.Float => @fieldParentPtr(Float, "base", base).destroy(comp),
Id.Pointer => @fieldParentPtr(Pointer, "base", base).destroy(comp),
Id.Array => @fieldParentPtr(Array, "base", base).destroy(comp),
Id.ComptimeFloat => @fieldParentPtr(ComptimeFloat, "base", base).destroy(comp),
Id.ComptimeInt => @fieldParentPtr(ComptimeInt, "base", base).destroy(comp),
Id.EnumLiteral => @fieldParentPtr(EnumLiteral, "base", base).destroy(comp),
Id.Undefined => @fieldParentPtr(Undefined, "base", base).destroy(comp),
Id.Null => @fieldParentPtr(Null, "base", base).destroy(comp),
Id.Optional => @fieldParentPtr(Optional, "base", base).destroy(comp),
Id.ErrorUnion => @fieldParentPtr(ErrorUnion, "base", base).destroy(comp),
Id.ErrorSet => @fieldParentPtr(ErrorSet, "base", base).destroy(comp),
Id.Enum => @fieldParentPtr(Enum, "base", base).destroy(comp),
Id.Union => @fieldParentPtr(Union, "base", base).destroy(comp),
Id.BoundFn => @fieldParentPtr(BoundFn, "base", base).destroy(comp),
Id.ArgTuple => @fieldParentPtr(ArgTuple, "base", base).destroy(comp),
Id.Opaque => @fieldParentPtr(Opaque, "base", base).destroy(comp),
Id.Promise => @fieldParentPtr(Promise, "base", base).destroy(comp),
Id.Vector => @fieldParentPtr(Vector, "base", base).destroy(comp),
.Struct => @fieldParentPtr(Struct, "base", base).destroy(comp),
.Fn => @fieldParentPtr(Fn, "base", base).destroy(comp),
.Type => @fieldParentPtr(MetaType, "base", base).destroy(comp),
.Void => @fieldParentPtr(Void, "base", base).destroy(comp),
.Bool => @fieldParentPtr(Bool, "base", base).destroy(comp),
.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(comp),
.Int => @fieldParentPtr(Int, "base", base).destroy(comp),
.Float => @fieldParentPtr(Float, "base", base).destroy(comp),
.Pointer => @fieldParentPtr(Pointer, "base", base).destroy(comp),
.Array => @fieldParentPtr(Array, "base", base).destroy(comp),
.ComptimeFloat => @fieldParentPtr(ComptimeFloat, "base", base).destroy(comp),
.ComptimeInt => @fieldParentPtr(ComptimeInt, "base", base).destroy(comp),
.EnumLiteral => @fieldParentPtr(EnumLiteral, "base", base).destroy(comp),
.Undefined => @fieldParentPtr(Undefined, "base", base).destroy(comp),
.Null => @fieldParentPtr(Null, "base", base).destroy(comp),
.Optional => @fieldParentPtr(Optional, "base", base).destroy(comp),
.ErrorUnion => @fieldParentPtr(ErrorUnion, "base", base).destroy(comp),
.ErrorSet => @fieldParentPtr(ErrorSet, "base", base).destroy(comp),
.Enum => @fieldParentPtr(Enum, "base", base).destroy(comp),
.Union => @fieldParentPtr(Union, "base", base).destroy(comp),
.BoundFn => @fieldParentPtr(BoundFn, "base", base).destroy(comp),
.ArgTuple => @fieldParentPtr(ArgTuple, "base", base).destroy(comp),
.Opaque => @fieldParentPtr(Opaque, "base", base).destroy(comp),
.Frame => @fieldParentPtr(Frame, "base", base).destroy(comp),
.AnyFrame => @fieldParentPtr(AnyFrame, "base", base).destroy(comp),
.Vector => @fieldParentPtr(Vector, "base", base).destroy(comp),
}
}
@ -54,105 +55,108 @@ pub const Type = struct {
llvm_context: *llvm.Context,
) (error{OutOfMemory}!*llvm.Type) {
switch (base.id) {
Id.Struct => return @fieldParentPtr(Struct, "base", base).getLlvmType(allocator, llvm_context),
Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmType(allocator, llvm_context),
Id.Type => unreachable,
Id.Void => unreachable,
Id.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmType(allocator, llvm_context),
Id.NoReturn => unreachable,
Id.Int => return @fieldParentPtr(Int, "base", base).getLlvmType(allocator, llvm_context),
Id.Float => return @fieldParentPtr(Float, "base", base).getLlvmType(allocator, llvm_context),
Id.Pointer => return @fieldParentPtr(Pointer, "base", base).getLlvmType(allocator, llvm_context),
Id.Array => return @fieldParentPtr(Array, "base", base).getLlvmType(allocator, llvm_context),
Id.ComptimeFloat => unreachable,
Id.ComptimeInt => unreachable,
Id.EnumLiteral => unreachable,
Id.Undefined => unreachable,
Id.Null => unreachable,
Id.Optional => return @fieldParentPtr(Optional, "base", base).getLlvmType(allocator, llvm_context),
Id.ErrorUnion => return @fieldParentPtr(ErrorUnion, "base", base).getLlvmType(allocator, llvm_context),
Id.ErrorSet => return @fieldParentPtr(ErrorSet, "base", base).getLlvmType(allocator, llvm_context),
Id.Enum => return @fieldParentPtr(Enum, "base", base).getLlvmType(allocator, llvm_context),
Id.Union => return @fieldParentPtr(Union, "base", base).getLlvmType(allocator, llvm_context),
Id.BoundFn => return @fieldParentPtr(BoundFn, "base", base).getLlvmType(allocator, llvm_context),
Id.ArgTuple => unreachable,
Id.Opaque => return @fieldParentPtr(Opaque, "base", base).getLlvmType(allocator, llvm_context),
Id.Promise => return @fieldParentPtr(Promise, "base", base).getLlvmType(allocator, llvm_context),
Id.Vector => return @fieldParentPtr(Vector, "base", base).getLlvmType(allocator, llvm_context),
.Struct => return @fieldParentPtr(Struct, "base", base).getLlvmType(allocator, llvm_context),
.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmType(allocator, llvm_context),
.Type => unreachable,
.Void => unreachable,
.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmType(allocator, llvm_context),
.NoReturn => unreachable,
.Int => return @fieldParentPtr(Int, "base", base).getLlvmType(allocator, llvm_context),
.Float => return @fieldParentPtr(Float, "base", base).getLlvmType(allocator, llvm_context),
.Pointer => return @fieldParentPtr(Pointer, "base", base).getLlvmType(allocator, llvm_context),
.Array => return @fieldParentPtr(Array, "base", base).getLlvmType(allocator, llvm_context),
.ComptimeFloat => unreachable,
.ComptimeInt => unreachable,
.EnumLiteral => unreachable,
.Undefined => unreachable,
.Null => unreachable,
.Optional => return @fieldParentPtr(Optional, "base", base).getLlvmType(allocator, llvm_context),
.ErrorUnion => return @fieldParentPtr(ErrorUnion, "base", base).getLlvmType(allocator, llvm_context),
.ErrorSet => return @fieldParentPtr(ErrorSet, "base", base).getLlvmType(allocator, llvm_context),
.Enum => return @fieldParentPtr(Enum, "base", base).getLlvmType(allocator, llvm_context),
.Union => return @fieldParentPtr(Union, "base", base).getLlvmType(allocator, llvm_context),
.BoundFn => return @fieldParentPtr(BoundFn, "base", base).getLlvmType(allocator, llvm_context),
.ArgTuple => unreachable,
.Opaque => return @fieldParentPtr(Opaque, "base", base).getLlvmType(allocator, llvm_context),
.Frame => return @fieldParentPtr(Frame, "base", base).getLlvmType(allocator, llvm_context),
.AnyFrame => return @fieldParentPtr(AnyFrame, "base", base).getLlvmType(allocator, llvm_context),
.Vector => return @fieldParentPtr(Vector, "base", base).getLlvmType(allocator, llvm_context),
}
}
pub fn handleIsPtr(base: *Type) bool {
switch (base.id) {
Id.Type,
Id.ComptimeFloat,
Id.ComptimeInt,
Id.EnumLiteral,
Id.Undefined,
Id.Null,
Id.BoundFn,
Id.ArgTuple,
Id.Opaque,
.Type,
.ComptimeFloat,
.ComptimeInt,
.EnumLiteral,
.Undefined,
.Null,
.BoundFn,
.ArgTuple,
.Opaque,
=> unreachable,
Id.NoReturn,
Id.Void,
Id.Bool,
Id.Int,
Id.Float,
Id.Pointer,
Id.ErrorSet,
Id.Enum,
Id.Fn,
Id.Promise,
Id.Vector,
.NoReturn,
.Void,
.Bool,
.Int,
.Float,
.Pointer,
.ErrorSet,
.Enum,
.Fn,
.Frame,
.AnyFrame,
.Vector,
=> return false,
Id.Struct => @panic("TODO"),
Id.Array => @panic("TODO"),
Id.Optional => @panic("TODO"),
Id.ErrorUnion => @panic("TODO"),
Id.Union => @panic("TODO"),
.Struct => @panic("TODO"),
.Array => @panic("TODO"),
.Optional => @panic("TODO"),
.ErrorUnion => @panic("TODO"),
.Union => @panic("TODO"),
}
}
pub fn hasBits(base: *Type) bool {
switch (base.id) {
Id.Type,
Id.ComptimeFloat,
Id.ComptimeInt,
Id.EnumLiteral,
Id.Undefined,
Id.Null,
Id.BoundFn,
Id.ArgTuple,
Id.Opaque,
.Type,
.ComptimeFloat,
.ComptimeInt,
.EnumLiteral,
.Undefined,
.Null,
.BoundFn,
.ArgTuple,
.Opaque,
=> unreachable,
Id.Void,
Id.NoReturn,
.Void,
.NoReturn,
=> return false,
Id.Bool,
Id.Int,
Id.Float,
Id.Fn,
Id.Promise,
Id.Vector,
.Bool,
.Int,
.Float,
.Fn,
.Frame,
.AnyFrame,
.Vector,
=> return true,
Id.Pointer => {
.Pointer => {
const ptr_type = @fieldParentPtr(Pointer, "base", base);
return ptr_type.key.child_type.hasBits();
},
Id.ErrorSet => @panic("TODO"),
Id.Enum => @panic("TODO"),
Id.Struct => @panic("TODO"),
Id.Array => @panic("TODO"),
Id.Optional => @panic("TODO"),
Id.ErrorUnion => @panic("TODO"),
Id.Union => @panic("TODO"),
.ErrorSet => @panic("TODO"),
.Enum => @panic("TODO"),
.Struct => @panic("TODO"),
.Array => @panic("TODO"),
.Optional => @panic("TODO"),
.ErrorUnion => @panic("TODO"),
.Union => @panic("TODO"),
}
}
@ -168,20 +172,20 @@ pub const Type = struct {
fn init(base: *Type, comp: *Compilation, id: Id, name: []const u8) void {
base.* = Type{
.base = Value{
.id = Value.Id.Type,
.id = .Type,
.typ = &MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = id,
.name = name,
.abi_alignment = AbiAlignment.init(comp.loop),
.abi_alignment = AbiAlignment.init(),
};
}
/// If you happen to have an llvm context handy, use getAbiAlignmentInContext instead.
/// Otherwise, this one will grab one from the pool and then release it.
pub async fn getAbiAlignment(base: *Type, comp: *Compilation) !u32 {
if (await (async base.abi_alignment.start() catch unreachable)) |ptr| return ptr.*;
if (base.abi_alignment.start()) |ptr| return ptr.*;
{
const held = try comp.zig_compiler.getAnyLlvmContext();
@ -189,7 +193,7 @@ pub const Type = struct {
const llvm_context = held.node.data;
base.abi_alignment.data = await (async base.resolveAbiAlignment(comp, llvm_context) catch unreachable);
base.abi_alignment.data = base.resolveAbiAlignment(comp, llvm_context);
}
base.abi_alignment.resolve();
return base.abi_alignment.data;
@ -197,9 +201,9 @@ pub const Type = struct {
/// If you have an llvm conext handy, you can use it here.
pub async fn getAbiAlignmentInContext(base: *Type, comp: *Compilation, llvm_context: *llvm.Context) !u32 {
if (await (async base.abi_alignment.start() catch unreachable)) |ptr| return ptr.*;
if (base.abi_alignment.start()) |ptr| return ptr.*;
base.abi_alignment.data = await (async base.resolveAbiAlignment(comp, llvm_context) catch unreachable);
base.abi_alignment.data = base.resolveAbiAlignment(comp, llvm_context);
base.abi_alignment.resolve();
return base.abi_alignment.data;
}
@ -261,30 +265,18 @@ pub const Type = struct {
pub const Generic = struct {
param_count: usize,
cc: CC,
pub const CC = union(CallingConvention) {
Auto,
C,
Cold,
Naked,
Stdcall,
Async: *Type, // allocator type
};
cc: CallingConvention,
};
pub fn hash(self: *const Key) u32 {
var result: u32 = 0;
result +%= hashAny(self.alignment, 0);
switch (self.data) {
Kind.Generic => |generic| {
.Generic => |generic| {
result +%= hashAny(generic.param_count, 1);
switch (generic.cc) {
CallingConvention.Async => |allocator_type| result +%= hashAny(allocator_type, 2),
else => result +%= hashAny(CallingConvention(generic.cc), 3),
}
result +%= hashAny(generic.cc, 3);
},
Kind.Normal => |normal| {
.Normal => |normal| {
result +%= hashAny(normal.return_type, 4);
result +%= hashAny(normal.is_var_args, 5);
result +%= hashAny(normal.cc, 6);
@ -302,21 +294,14 @@ pub const Type = struct {
if (self.alignment) |self_align| {
if (self_align != other.alignment.?) return false;
}
if (@TagType(Data)(self.data) != @TagType(Data)(other.data)) return false;
if (@as(@TagType(Data), self.data) != @as(@TagType(Data), other.data)) return false;
switch (self.data) {
Kind.Generic => |*self_generic| {
.Generic => |*self_generic| {
const other_generic = &other.data.Generic;
if (self_generic.param_count != other_generic.param_count) return false;
if (CallingConvention(self_generic.cc) != CallingConvention(other_generic.cc)) return false;
switch (self_generic.cc) {
CallingConvention.Async => |self_allocator_type| {
const other_allocator_type = other_generic.cc.Async;
if (self_allocator_type != other_allocator_type) return false;
},
else => {},
}
if (self_generic.cc != other_generic.cc) return false;
},
Kind.Normal => |*self_normal| {
.Normal => |*self_normal| {
const other_normal = &other.data.Normal;
if (self_normal.cc != other_normal.cc) return false;
if (self_normal.is_var_args != other_normal.is_var_args) return false;
@ -333,13 +318,8 @@ pub const Type = struct {
pub fn deref(key: Key, comp: *Compilation) void {
switch (key.data) {
Kind.Generic => |generic| {
switch (generic.cc) {
CallingConvention.Async => |allocator_type| allocator_type.base.deref(comp),
else => {},
}
},
Kind.Normal => |normal| {
.Generic => {},
.Normal => |normal| {
normal.return_type.base.deref(comp);
for (normal.params) |param| {
param.typ.base.deref(comp);
@ -350,13 +330,8 @@ pub const Type = struct {
pub fn ref(key: Key) void {
switch (key.data) {
Kind.Generic => |generic| {
switch (generic.cc) {
CallingConvention.Async => |allocator_type| allocator_type.base.ref(),
else => {},
}
},
Kind.Normal => |normal| {
.Generic => {},
.Normal => |normal| {
normal.return_type.base.ref();
for (normal.params) |param| {
param.typ.base.ref();
@ -366,14 +341,7 @@ pub const Type = struct {
}
};
pub const CallingConvention = enum {
Auto,
C,
Cold,
Naked,
Stdcall,
Async,
};
const CallingConvention = builtin.TypeInfo.CallingConvention;
pub const Param = struct {
is_noalias: bool,
@ -382,26 +350,26 @@ pub const Type = struct {
fn ccFnTypeStr(cc: CallingConvention) []const u8 {
return switch (cc) {
CallingConvention.Auto => "",
CallingConvention.C => "extern ",
CallingConvention.Cold => "coldcc ",
CallingConvention.Naked => "nakedcc ",
CallingConvention.Stdcall => "stdcallcc ",
CallingConvention.Async => unreachable,
.Unspecified => "",
.C => "extern ",
.Cold => "coldcc ",
.Naked => "nakedcc ",
.Stdcall => "stdcallcc ",
.Async => "async ",
};
}
pub fn paramCount(self: *Fn) usize {
return switch (self.key.data) {
Kind.Generic => |generic| generic.param_count,
Kind.Normal => |normal| normal.params.len,
.Generic => |generic| generic.param_count,
.Normal => |normal| normal.params.len,
};
}
/// takes ownership of key.Normal.params on success
pub async fn get(comp: *Compilation, key: Key) !*Fn {
{
const held = await (async comp.fn_type_table.acquire() catch unreachable);
const held = comp.fn_type_table.acquire();
defer held.release();
if (held.value.get(&key)) |entry| {
@ -428,18 +396,10 @@ pub const Type = struct {
const name_stream = &std.io.BufferOutStream.init(&name_buf).stream;
switch (key.data) {
Kind.Generic => |generic| {
.Generic => |generic| {
self.non_key = NonKey{ .Generic = {} };
switch (generic.cc) {
CallingConvention.Async => |async_allocator_type| {
try name_stream.print("async<{}> ", async_allocator_type.name);
},
else => {
const cc_str = ccFnTypeStr(generic.cc);
try name_stream.write(cc_str);
},
}
try name_stream.write("fn(");
const cc_str = ccFnTypeStr(generic.cc);
try name_stream.print("{}fn(", cc_str);
var param_i: usize = 0;
while (param_i < generic.param_count) : (param_i += 1) {
const arg = if (param_i == 0) "var" else ", var";
@ -447,11 +407,11 @@ pub const Type = struct {
}
try name_stream.write(")");
if (key.alignment) |alignment| {
try name_stream.print(" align<{}>", alignment);
try name_stream.print(" align({})", alignment);
}
try name_stream.write(" var");
},
Kind.Normal => |normal| {
.Normal => |normal| {
self.non_key = NonKey{
.Normal = NonKey.Normal{ .variable_list = std.ArrayList(*Scope.Var).init(comp.gpa()) },
};
@ -468,16 +428,16 @@ pub const Type = struct {
}
try name_stream.write(")");
if (key.alignment) |alignment| {
try name_stream.print(" align<{}>", alignment);
try name_stream.print(" align({})", alignment);
}
try name_stream.print(" {}", normal.return_type.name);
},
}
self.base.init(comp, Id.Fn, name_buf.toOwnedSlice());
self.base.init(comp, .Fn, name_buf.toOwnedSlice());
{
const held = await (async comp.fn_type_table.acquire() catch unreachable);
const held = comp.fn_type_table.acquire();
defer held.release();
_ = try held.value.put(&self.key, self);
@ -488,8 +448,8 @@ pub const Type = struct {
pub fn destroy(self: *Fn, comp: *Compilation) void {
self.key.deref(comp);
switch (self.key.data) {
Kind.Generic => {},
Kind.Normal => {
.Generic => {},
.Normal => {
self.non_key.Normal.variable_list.deinit();
},
}
@ -499,7 +459,7 @@ pub const Type = struct {
pub fn getLlvmType(self: *Fn, allocator: *Allocator, llvm_context: *llvm.Context) !*llvm.Type {
const normal = &self.key.data.Normal;
const llvm_return_type = switch (normal.return_type.id) {
Type.Id.Void => llvm.VoidTypeInContext(llvm_context) orelse return error.OutOfMemory,
.Void => llvm.VoidTypeInContext(llvm_context) orelse return error.OutOfMemory,
else => try normal.return_type.getLlvmType(allocator, llvm_context),
};
const llvm_param_types = try allocator.alloc(*llvm.Type, normal.params.len);
@ -606,7 +566,7 @@ pub const Type = struct {
pub async fn get(comp: *Compilation, key: Key) !*Int {
{
const held = await (async comp.int_type_table.acquire() catch unreachable);
const held = comp.int_type_table.acquire();
defer held.release();
if (held.value.get(&key)) |entry| {
@ -627,10 +587,10 @@ pub const Type = struct {
const name = try std.fmt.allocPrint(comp.gpa(), "{c}{}", u_or_i, key.bit_count);
errdefer comp.gpa().free(name);
self.base.init(comp, Id.Int, name);
self.base.init(comp, .Int, name);
{
const held = await (async comp.int_type_table.acquire() catch unreachable);
const held = comp.int_type_table.acquire();
defer held.release();
_ = try held.value.put(&self.key, self);
@ -648,7 +608,7 @@ pub const Type = struct {
pub async fn gcDestroy(self: *Int, comp: *Compilation) void {
{
const held = await (async comp.int_type_table.acquire() catch unreachable);
const held = comp.int_type_table.acquire();
defer held.release();
_ = held.value.remove(&self.key).?;
@ -689,8 +649,8 @@ pub const Type = struct {
pub fn hash(self: *const Key) u32 {
var result: u32 = 0;
result +%= switch (self.alignment) {
Align.Abi => 0xf201c090,
Align.Override => |x| hashAny(x, 0),
.Abi => 0xf201c090,
.Override => |x| hashAny(x, 0),
};
result +%= hashAny(self.child_type, 1);
result +%= hashAny(self.mut, 2);
@ -704,13 +664,13 @@ pub const Type = struct {
self.mut != other.mut or
self.vol != other.vol or
self.size != other.size or
@TagType(Align)(self.alignment) != @TagType(Align)(other.alignment))
@as(@TagType(Align), self.alignment) != @as(@TagType(Align), other.alignment))
{
return false;
}
switch (self.alignment) {
Align.Abi => return true,
Align.Override => |x| return x == other.alignment.Override,
.Abi => return true,
.Override => |x| return x == other.alignment.Override,
}
}
};
@ -742,7 +702,7 @@ pub const Type = struct {
pub async fn gcDestroy(self: *Pointer, comp: *Compilation) void {
{
const held = await (async comp.ptr_type_table.acquire() catch unreachable);
const held = comp.ptr_type_table.acquire();
defer held.release();
_ = held.value.remove(&self.key).?;
@ -753,8 +713,8 @@ pub const Type = struct {
pub async fn getAlignAsInt(self: *Pointer, comp: *Compilation) u32 {
switch (self.key.alignment) {
Align.Abi => return await (async self.key.child_type.getAbiAlignment(comp) catch unreachable),
Align.Override => |alignment| return alignment,
.Abi => return self.key.child_type.getAbiAlignment(comp),
.Override => |alignment| return alignment,
}
}
@ -764,16 +724,16 @@ pub const Type = struct {
) !*Pointer {
var normal_key = key;
switch (key.alignment) {
Align.Abi => {},
Align.Override => |alignment| {
const abi_align = try await (async key.child_type.getAbiAlignment(comp) catch unreachable);
.Abi => {},
.Override => |alignment| {
const abi_align = try key.child_type.getAbiAlignment(comp);
if (abi_align == alignment) {
normal_key.alignment = Align.Abi;
normal_key.alignment = .Abi;
}
},
}
{
const held = await (async comp.ptr_type_table.acquire() catch unreachable);
const held = comp.ptr_type_table.acquire();
defer held.release();
if (held.value.get(&normal_key)) |entry| {
@ -791,21 +751,21 @@ pub const Type = struct {
errdefer comp.gpa().destroy(self);
const size_str = switch (self.key.size) {
Size.One => "*",
Size.Many => "[*]",
Size.Slice => "[]",
Size.C => "[*c]",
.One => "*",
.Many => "[*]",
.Slice => "[]",
.C => "[*c]",
};
const mut_str = switch (self.key.mut) {
Mut.Const => "const ",
Mut.Mut => "",
.Const => "const ",
.Mut => "",
};
const vol_str = switch (self.key.vol) {
Vol.Volatile => "volatile ",
Vol.Non => "",
.Volatile => "volatile ",
.Non => "",
};
const name = switch (self.key.alignment) {
Align.Abi => try std.fmt.allocPrint(
.Abi => try std.fmt.allocPrint(
comp.gpa(),
"{}{}{}{}",
size_str,
@ -813,7 +773,7 @@ pub const Type = struct {
vol_str,
self.key.child_type.name,
),
Align.Override => |alignment| try std.fmt.allocPrint(
.Override => |alignment| try std.fmt.allocPrint(
comp.gpa(),
"{}align<{}> {}{}{}",
size_str,
@ -825,10 +785,10 @@ pub const Type = struct {
};
errdefer comp.gpa().free(name);
self.base.init(comp, Id.Pointer, name);
self.base.init(comp, .Pointer, name);
{
const held = await (async comp.ptr_type_table.acquire() catch unreachable);
const held = comp.ptr_type_table.acquire();
defer held.release();
_ = try held.value.put(&self.key, self);
@ -873,7 +833,7 @@ pub const Type = struct {
errdefer key.elem_type.base.deref(comp);
{
const held = await (async comp.array_type_table.acquire() catch unreachable);
const held = comp.array_type_table.acquire();
defer held.release();
if (held.value.get(&key)) |entry| {
@ -893,10 +853,10 @@ pub const Type = struct {
const name = try std.fmt.allocPrint(comp.gpa(), "[{}]{}", key.len, key.elem_type.name);
errdefer comp.gpa().free(name);
self.base.init(comp, Id.Array, name);
self.base.init(comp, .Array, name);
{
const held = await (async comp.array_type_table.acquire() catch unreachable);
const held = comp.array_type_table.acquire();
defer held.release();
_ = try held.value.put(&self.key, self);
@ -1066,14 +1026,26 @@ pub const Type = struct {
}
};
pub const Promise = struct {
pub const Frame = struct {
base: Type,
pub fn destroy(self: *Promise, comp: *Compilation) void {
pub fn destroy(self: *Frame, comp: *Compilation) void {
comp.gpa().destroy(self);
}
pub fn getLlvmType(self: *Promise, allocator: *Allocator, llvm_context: *llvm.Context) *llvm.Type {
pub fn getLlvmType(self: *Frame, allocator: *Allocator, llvm_context: *llvm.Context) *llvm.Type {
@panic("TODO");
}
};
pub const AnyFrame = struct {
base: Type,
pub fn destroy(self: *AnyFrame, comp: *Compilation) void {
comp.gpa().destroy(self);
}
pub fn getLlvmType(self: *AnyFrame, allocator: *Allocator, llvm_context: *llvm.Context) *llvm.Type {
@panic("TODO");
}
};
@ -1081,34 +1053,34 @@ pub const Type = struct {
fn hashAny(x: var, comptime seed: u64) u32 {
switch (@typeInfo(@typeOf(x))) {
builtin.TypeId.Int => |info| {
.Int => |info| {
comptime var rng = comptime std.rand.DefaultPrng.init(seed);
const unsigned_x = @bitCast(@IntType(false, info.bits), x);
if (info.bits <= 32) {
return u32(unsigned_x) *% comptime rng.random.scalar(u32);
return @as(u32, unsigned_x) *% comptime rng.random.scalar(u32);
} else {
return @truncate(u32, unsigned_x *% comptime rng.random.scalar(@typeOf(unsigned_x)));
}
},
builtin.TypeId.Pointer => |info| {
.Pointer => |info| {
switch (info.size) {
builtin.TypeInfo.Pointer.Size.One => return hashAny(@ptrToInt(x), seed),
builtin.TypeInfo.Pointer.Size.Many => @compileError("implement hash function"),
builtin.TypeInfo.Pointer.Size.Slice => @compileError("implement hash function"),
builtin.TypeInfo.Pointer.Size.C => unreachable,
.One => return hashAny(@ptrToInt(x), seed),
.Many => @compileError("implement hash function"),
.Slice => @compileError("implement hash function"),
.C => unreachable,
}
},
builtin.TypeId.Enum => return hashAny(@enumToInt(x), seed),
builtin.TypeId.Bool => {
.Enum => return hashAny(@enumToInt(x), seed),
.Bool => {
comptime var rng = comptime std.rand.DefaultPrng.init(seed);
const vals = comptime [2]u32{ rng.random.scalar(u32), rng.random.scalar(u32) };
return vals[@boolToInt(x)];
},
builtin.TypeId.Optional => {
.Optional => {
if (x) |non_opt| {
return hashAny(non_opt, seed);
} else {
return hashAny(u32(1), seed);
return hashAny(@as(u32, 1), seed);
}
},
else => @compileError("implement hash function for " ++ @typeName(@typeOf(x))),

211
src-self-hosted/util.zig Normal file
View File

@ -0,0 +1,211 @@
const std = @import("std");
const Target = std.Target;
const llvm = @import("llvm.zig");
pub const FloatAbi = enum {
Hard,
Soft,
SoftFp,
};
/// TODO expose the arch and subarch separately
pub fn isArmOrThumb(self: Target) bool {
return switch (self.getArch()) {
.arm,
.armeb,
.aarch64,
.aarch64_be,
.thumb,
.thumbeb,
=> true,
else => false,
};
}
pub fn getFloatAbi(self: Target) FloatAbi {
return switch (self.getAbi()) {
.gnueabihf,
.eabihf,
.musleabihf,
=> .Hard,
else => .Soft,
};
}
pub fn getObjectFormat(self: Target) Target.ObjectFormat {
return switch (self) {
.Native => @import("builtin").object_format,
.Cross => {
if (target.isWindows() or target.isUefi()) {
break .coff;
} else if (target.isDarwin()) {
break .macho;
}
if (target.isWasm()) {
break .wasm;
}
break .elf;
},
};
}
pub fn getDynamicLinkerPath(self: Target) ?[]const u8 {
const env = self.getAbi();
const arch = self.getArch();
const os = self.getOs();
switch (os) {
.freebsd => {
return "/libexec/ld-elf.so.1";
},
.linux => {
switch (env) {
.android => {
if (self.getArchPtrBitWidth() == 64) {
return "/system/bin/linker64";
} else {
return "/system/bin/linker";
}
},
.gnux32 => {
if (arch == .x86_64) {
return "/libx32/ld-linux-x32.so.2";
}
},
.musl,
.musleabi,
.musleabihf,
=> {
if (arch == .x86_64) {
return "/lib/ld-musl-x86_64.so.1";
}
},
else => {},
}
switch (arch) {
.i386,
.sparc,
.sparcel,
=> return "/lib/ld-linux.so.2",
.aarch64 => return "/lib/ld-linux-aarch64.so.1",
.aarch64_be => return "/lib/ld-linux-aarch64_be.so.1",
.arm,
.thumb,
=> return switch (getFloatAbi(self)) {
.Hard => return "/lib/ld-linux-armhf.so.3",
else => return "/lib/ld-linux.so.3",
},
.armeb,
.thumbeb,
=> return switch (getFloatAbi(self)) {
.Hard => return "/lib/ld-linux-armhf.so.3",
else => return "/lib/ld-linux.so.3",
},
.mips,
.mipsel,
.mips64,
.mips64el,
=> return null,
.powerpc => return "/lib/ld.so.1",
.powerpc64 => return "/lib64/ld64.so.2",
.powerpc64le => return "/lib64/ld64.so.2",
.s390x => return "/lib64/ld64.so.1",
.sparcv9 => return "/lib64/ld-linux.so.2",
.x86_64 => return "/lib64/ld-linux-x86-64.so.2",
.arc,
.avr,
.bpfel,
.bpfeb,
.hexagon,
.msp430,
.r600,
.amdgcn,
.riscv32,
.riscv64,
.tce,
.tcele,
.xcore,
.nvptx,
.nvptx64,
.le32,
.le64,
.amdil,
.amdil64,
.hsail,
.hsail64,
.spir,
.spir64,
.kalimba,
.shave,
.lanai,
.wasm32,
.wasm64,
.renderscript32,
.renderscript64,
.aarch64_32,
=> return null,
}
},
else => return null,
}
}
pub fn getDarwinArchString(self: Target) []const u8 {
const arch = self.getArch();
switch (arch) {
.aarch64 => return "arm64",
.thumb,
.arm,
=> return "arm",
.powerpc => return "ppc",
.powerpc64 => return "ppc64",
.powerpc64le => return "ppc64le",
else => return @tagName(arch),
}
}
pub fn llvmTargetFromTriple(triple: std.Buffer) !*llvm.Target {
var result: *llvm.Target = undefined;
var err_msg: [*]u8 = undefined;
if (llvm.GetTargetFromTriple(triple.ptr(), &result, &err_msg) != 0) {
std.debug.warn("triple: {s} error: {s}\n", triple.ptr(), err_msg);
return error.UnsupportedTarget;
}
return result;
}
pub fn initializeAllTargets() void {
llvm.InitializeAllTargets();
llvm.InitializeAllTargetInfos();
llvm.InitializeAllTargetMCs();
llvm.InitializeAllAsmPrinters();
llvm.InitializeAllAsmParsers();
}
pub fn getTriple(allocator: *std.mem.Allocator, self: std.Target) !std.Buffer {
var result = try std.Buffer.initSize(allocator, 0);
errdefer result.deinit();
// LLVM WebAssembly output support requires the target to be activated at
// build type with -DCMAKE_LLVM_EXPIERMENTAL_TARGETS_TO_BUILD=WebAssembly.
//
// LLVM determines the output format based on the abi suffix,
// defaulting to an object based on the architecture. The default format in
// LLVM 6 sets the wasm arch output incorrectly to ELF. We need to
// explicitly set this ourself in order for it to work.
//
// This is fixed in LLVM 7 and you will be able to get wasm output by
// using the target triple `wasm32-unknown-unknown-unknown`.
const env_name = if (self.isWasm()) "wasm" else @tagName(self.getAbi());
var out = &std.io.BufferOutStream.init(&result).stream;
try out.print("{}-unknown-{}-{}", @tagName(self.getArch()), @tagName(self.getOs()), env_name);
return result;
}

View File

@ -1,5 +1,4 @@
const std = @import("std");
const builtin = @import("builtin");
const Scope = @import("scope.zig").Scope;
const Compilation = @import("compilation.zig").Compilation;
const ObjectFile = @import("codegen.zig").ObjectFile;
@ -24,15 +23,15 @@ pub const Value = struct {
if (base.ref_count.decr() == 1) {
base.typ.base.deref(comp);
switch (base.id) {
Id.Type => @fieldParentPtr(Type, "base", base).destroy(comp),
Id.Fn => @fieldParentPtr(Fn, "base", base).destroy(comp),
Id.FnProto => @fieldParentPtr(FnProto, "base", base).destroy(comp),
Id.Void => @fieldParentPtr(Void, "base", base).destroy(comp),
Id.Bool => @fieldParentPtr(Bool, "base", base).destroy(comp),
Id.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(comp),
Id.Ptr => @fieldParentPtr(Ptr, "base", base).destroy(comp),
Id.Int => @fieldParentPtr(Int, "base", base).destroy(comp),
Id.Array => @fieldParentPtr(Array, "base", base).destroy(comp),
.Type => @fieldParentPtr(Type, "base", base).destroy(comp),
.Fn => @fieldParentPtr(Fn, "base", base).destroy(comp),
.FnProto => @fieldParentPtr(FnProto, "base", base).destroy(comp),
.Void => @fieldParentPtr(Void, "base", base).destroy(comp),
.Bool => @fieldParentPtr(Bool, "base", base).destroy(comp),
.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(comp),
.Ptr => @fieldParentPtr(Ptr, "base", base).destroy(comp),
.Int => @fieldParentPtr(Int, "base", base).destroy(comp),
.Array => @fieldParentPtr(Array, "base", base).destroy(comp),
}
}
}
@ -59,15 +58,15 @@ pub const Value = struct {
pub fn getLlvmConst(base: *Value, ofile: *ObjectFile) (error{OutOfMemory}!?*llvm.Value) {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmConst(ofile),
Id.FnProto => return @fieldParentPtr(FnProto, "base", base).getLlvmConst(ofile),
Id.Void => return null,
Id.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmConst(ofile),
Id.NoReturn => unreachable,
Id.Ptr => return @fieldParentPtr(Ptr, "base", base).getLlvmConst(ofile),
Id.Int => return @fieldParentPtr(Int, "base", base).getLlvmConst(ofile),
Id.Array => return @fieldParentPtr(Array, "base", base).getLlvmConst(ofile),
.Type => unreachable,
.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmConst(ofile),
.FnProto => return @fieldParentPtr(FnProto, "base", base).getLlvmConst(ofile),
.Void => return null,
.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmConst(ofile),
.NoReturn => unreachable,
.Ptr => return @fieldParentPtr(Ptr, "base", base).getLlvmConst(ofile),
.Int => return @fieldParentPtr(Int, "base", base).getLlvmConst(ofile),
.Array => return @fieldParentPtr(Array, "base", base).getLlvmConst(ofile),
}
}
@ -83,15 +82,15 @@ pub const Value = struct {
pub fn copy(base: *Value, comp: *Compilation) (error{OutOfMemory}!*Value) {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => unreachable,
Id.FnProto => unreachable,
Id.Void => unreachable,
Id.Bool => unreachable,
Id.NoReturn => unreachable,
Id.Ptr => unreachable,
Id.Array => unreachable,
Id.Int => return &(try @fieldParentPtr(Int, "base", base).copy(comp)).base,
.Type => unreachable,
.Fn => unreachable,
.FnProto => unreachable,
.Void => unreachable,
.Bool => unreachable,
.NoReturn => unreachable,
.Ptr => unreachable,
.Array => unreachable,
.Int => return &(try @fieldParentPtr(Int, "base", base).copy(comp)).base,
}
}
@ -138,7 +137,7 @@ pub const Value = struct {
const self = try comp.gpa().create(FnProto);
self.* = FnProto{
.base = Value{
.id = Value.Id.FnProto,
.id = .FnProto,
.typ = &fn_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -202,7 +201,7 @@ pub const Value = struct {
const self = try comp.gpa().create(Fn);
self.* = Fn{
.base = Value{
.id = Value.Id.Fn,
.id = .Fn,
.typ = &fn_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -346,20 +345,20 @@ pub const Value = struct {
errdefer array_val.base.deref(comp);
const elem_type = array_val.base.typ.cast(Type.Array).?.key.elem_type;
const ptr_type = try await (async Type.Pointer.get(comp, Type.Pointer.Key{
const ptr_type = try Type.Pointer.get(comp, Type.Pointer.Key{
.child_type = elem_type,
.mut = mut,
.vol = Type.Pointer.Vol.Non,
.size = size,
.alignment = Type.Pointer.Align.Abi,
}) catch unreachable);
});
var ptr_type_consumed = false;
errdefer if (!ptr_type_consumed) ptr_type.base.base.deref(comp);
const self = try comp.gpa().create(Value.Ptr);
self.* = Value.Ptr{
.base = Value{
.id = Value.Id.Ptr,
.id = .Ptr,
.typ = &ptr_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -385,8 +384,8 @@ pub const Value = struct {
const llvm_type = self.base.typ.getLlvmType(ofile.arena, ofile.context);
// TODO carefully port the logic from codegen.cpp:gen_const_val_ptr
switch (self.special) {
Special.Scalar => |scalar| @panic("TODO"),
Special.BaseArray => |base_array| {
.Scalar => |scalar| @panic("TODO"),
.BaseArray => |base_array| {
// TODO put this in one .o file only, and after that, generate extern references to it
const array_llvm_value = (try base_array.val.getLlvmConst(ofile)).?;
const ptr_bit_count = ofile.comp.target_ptr_bits;
@ -401,9 +400,9 @@ pub const Value = struct {
@intCast(c_uint, indices.len),
) orelse return error.OutOfMemory;
},
Special.BaseStruct => |base_struct| @panic("TODO"),
Special.HardCodedAddr => |addr| @panic("TODO"),
Special.Discard => unreachable,
.BaseStruct => |base_struct| @panic("TODO"),
.HardCodedAddr => |addr| @panic("TODO"),
.Discard => unreachable,
}
}
};
@ -428,16 +427,16 @@ pub const Value = struct {
const u8_type = Type.Int.get_u8(comp);
defer u8_type.base.base.deref(comp);
const array_type = try await (async Type.Array.get(comp, Type.Array.Key{
const array_type = try Type.Array.get(comp, Type.Array.Key{
.elem_type = &u8_type.base,
.len = buffer.len,
}) catch unreachable);
});
errdefer array_type.base.base.deref(comp);
const self = try comp.gpa().create(Value.Array);
self.* = Value.Array{
.base = Value{
.id = Value.Id.Array,
.id = .Array,
.typ = &array_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -450,22 +449,22 @@ pub const Value = struct {
pub fn destroy(self: *Array, comp: *Compilation) void {
switch (self.special) {
Special.Undefined => {},
Special.OwnedBuffer => |buf| {
.Undefined => {},
.OwnedBuffer => |buf| {
comp.gpa().free(buf);
},
Special.Explicit => {},
.Explicit => {},
}
comp.gpa().destroy(self);
}
pub fn getLlvmConst(self: *Array, ofile: *ObjectFile) !?*llvm.Value {
switch (self.special) {
Special.Undefined => {
.Undefined => {
const llvm_type = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
return llvm.GetUndef(llvm_type);
},
Special.OwnedBuffer => |buf| {
.OwnedBuffer => |buf| {
const dont_null_terminate = 1;
const llvm_str_init = llvm.ConstStringInContext(
ofile.context,
@ -482,7 +481,7 @@ pub const Value = struct {
llvm.SetAlignment(global, llvm.ABIAlignmentOfType(ofile.comp.target_data_ref, str_init_type));
return global;
},
Special.Explicit => @panic("TODO"),
.Explicit => @panic("TODO"),
}
//{
@ -517,7 +516,7 @@ pub const Value = struct {
const self = try comp.gpa().create(Value.Int);
self.* = Value.Int{
.base = Value{
.id = Value.Id.Int,
.id = .Int,
.typ = typ,
.ref_count = std.atomic.Int(usize).init(1),
},
@ -536,7 +535,7 @@ pub const Value = struct {
pub fn getLlvmConst(self: *Int, ofile: *ObjectFile) !?*llvm.Value {
switch (self.base.typ.id) {
Type.Id.Int => {
.Int => {
const type_ref = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
if (self.big_int.len() == 0) {
return llvm.ConstNull(type_ref);
@ -554,7 +553,7 @@ pub const Value = struct {
};
return if (self.big_int.isPositive()) unsigned_val else llvm.ConstNeg(unsigned_val);
},
Type.Id.ComptimeInt => unreachable,
.ComptimeInt => unreachable,
else => unreachable,
}
}
@ -566,7 +565,7 @@ pub const Value = struct {
const new = try comp.gpa().create(Value.Int);
new.* = Value.Int{
.base = Value{
.id = Value.Id.Int,
.id = .Int,
.typ = old.base.typ,
.ref_count = std.atomic.Int(usize).init(1),
},

View File

@ -1,4 +1,4 @@
/*
/*
* Copyright (c) 2015 Andrew Kelley
*
* This file is part of zig, which is MIT licensed.

View File

@ -7740,7 +7740,7 @@ static void do_code_gen(CodeGen *g) {
char *error = nullptr;
if (LLVMVerifyModule(g->module, LLVMReturnStatusAction, &error)) {
zig_panic("broken LLVM module found: %s", error);
zig_panic("broken LLVM module found: %s\nThis is a bug in the Zig compiler.", error);
}
}

View File

@ -13873,16 +13873,6 @@ static IrInstruction *ir_analyze_instruction_return(IrAnalyze *ira, IrInstructio
if (type_is_invalid(operand->value.type))
return ir_unreach_error(ira);
if (!instr_is_comptime(operand) && ira->explicit_return_type != nullptr &&
handle_is_ptr(ira->explicit_return_type))
{
// result location mechanism took care of it.
IrInstruction *result = ir_build_return(&ira->new_irb, instruction->base.scope,
instruction->base.source_node, nullptr);
result->value.type = ira->codegen->builtin_types.entry_unreachable;
return ir_finish_anal(ira, result);
}
IrInstruction *casted_operand = ir_implicit_cast(ira, operand, ira->explicit_return_type);
if (type_is_invalid(casted_operand->value.type)) {
AstNode *source_node = ira->explicit_return_type_source_node;
@ -13894,6 +13884,16 @@ static IrInstruction *ir_analyze_instruction_return(IrAnalyze *ira, IrInstructio
return ir_unreach_error(ira);
}
if (!instr_is_comptime(operand) && ira->explicit_return_type != nullptr &&
handle_is_ptr(ira->explicit_return_type))
{
// result location mechanism took care of it.
IrInstruction *result = ir_build_return(&ira->new_irb, instruction->base.scope,
instruction->base.source_node, nullptr);
result->value.type = ira->codegen->builtin_types.entry_unreachable;
return ir_finish_anal(ira, result);
}
if (casted_operand->value.special == ConstValSpecialRuntime &&
casted_operand->value.type->id == ZigTypeIdPointer &&
casted_operand->value.data.rh_ptr == RuntimeHintPtrStack)

View File

@ -40,6 +40,9 @@ void rangeset_sort(RangeSet *rs) {
}
bool rangeset_spans(RangeSet *rs, BigInt *first, BigInt *last) {
if (rs->src_range_list.length == 0)
return false;
rangeset_sort(rs);
const Range *first_range = &rs->src_range_list.at(0).range;

View File

@ -465,7 +465,7 @@ ZIG_EXTERN_C bool ZigLLDLink(enum ZigLLVM_ObjectFormatType oformat, const char *
ZIG_EXTERN_C bool ZigLLVMWriteArchive(const char *archive_name, const char **file_names, size_t file_name_count,
enum ZigLLVM_OSType os_type);
bool ZigLLVMWriteImportLibrary(const char *def_path, const ZigLLVM_ArchType arch,
bool ZigLLVMWriteImportLibrary(const char *def_path, const enum ZigLLVM_ArchType arch,
const char *output_lib_path, const bool kill_at);
ZIG_EXTERN_C void ZigLLVMGetNativeTarget(enum ZigLLVM_ArchType *arch_type, enum ZigLLVM_SubArchType *sub_arch_type,

View File

@ -29,6 +29,37 @@ pub fn addCases(cases: *tests.CompileErrorContext) void {
);
cases.add(
"empty switch on an integer",
\\export fn entry() void {
\\ var x: u32 = 0;
\\ switch(x) {}
\\}
,
"tmp.zig:3:5: error: switch must handle all possibilities",
);
cases.add(
"incorrect return type",
\\ pub export fn entry() void{
\\ _ = foo();
\\ }
\\ const A = struct {
\\ a: u32,
\\ };
\\ fn foo() A {
\\ return bar();
\\ }
\\ const B = struct {
\\ a: u32,
\\ };
\\ fn bar() B {
\\ unreachable;
\\ }
,
"tmp.zig:8:16: error: expected type 'A', found 'B'",
);
cases.add(
"regression test #2980: base type u32 is not type checked properly when assigning a value within a struct",
\\const Foo = struct {

6
test/stage2/test.zig Normal file
View File

@ -0,0 +1,6 @@
const TestContext = @import("../../src-self-hosted/test.zig").TestContext;
pub fn addCases(ctx: *TestContext) !void {
try @import("compile_errors.zig").addCases(ctx);
try @import("compare_output.zig").addCases(ctx);
}