self-hosted: fix compile errors, except for codegen.zig

This commit is contained in:
Andrew Kelley 2020-05-13 20:06:01 -04:00
parent a3da584248
commit 080022f6c6
8 changed files with 525 additions and 264 deletions

View File

@ -269,13 +269,6 @@ pub fn ArrayListAligned(comptime T: type, comptime alignment: ?u29) type {
/// Bring-your-own allocator with every function call.
/// Initialize directly and deinitialize with `deinit` or use `toOwnedSlice`.
pub fn init() Self {
return .{
.items = &[_]T{},
.capacity = 0,
};
}
pub fn ArrayListUnmanaged(comptime T: type) type {
return ArrayListAlignedUnmanaged(T, null);
}
@ -317,7 +310,7 @@ pub fn ArrayListAlignedUnmanaged(comptime T: type, comptime alignment: ?u29) typ
/// The caller owns the returned memory. ArrayList becomes empty.
pub fn toOwnedSlice(self: *Self, allocator: *Allocator) Slice {
const result = allocator.shrink(self.allocatedSlice(), self.items.len);
self.* = init(allocator);
self.* = Self{};
return result;
}

View File

@ -279,6 +279,21 @@ pub const Allocator = struct {
const shrink_result = self.shrinkFn(self, non_const_ptr[0..bytes_len], Slice.alignment, 0, 1);
assert(shrink_result.len == 0);
}
/// Copies `m` to newly allocated memory. Caller owns the memory.
pub fn dupe(allocator: *Allocator, comptime T: type, m: []const T) ![]T {
const new_buf = try allocator.alloc(T, m.len);
copy(T, new_buf, m);
return new_buf;
}
/// Copies `m` to newly allocated memory, with a null-terminated element. Caller owns the memory.
pub fn dupeZ(allocator: *Allocator, comptime T: type, m: []const T) ![:0]T {
const new_buf = try allocator.alloc(T, m.len + 1);
copy(T, new_buf, m);
new_buf[m.len] = 0;
return new_buf[0..m.len :0];
}
};
/// Copy all of source into dest at position 0.
@ -762,19 +777,14 @@ pub fn allEqual(comptime T: type, slice: []const T, scalar: T) bool {
return true;
}
/// Copies `m` to newly allocated memory. Caller owns the memory.
/// Deprecated, use `Allocator.dupe`.
pub fn dupe(allocator: *Allocator, comptime T: type, m: []const T) ![]T {
const new_buf = try allocator.alloc(T, m.len);
copy(T, new_buf, m);
return new_buf;
return allocator.dupe(T, m);
}
/// Copies `m` to newly allocated memory, with a null-terminated element. Caller owns the memory.
/// Deprecated, use `Allocator.dupeZ`.
pub fn dupeZ(allocator: *Allocator, comptime T: type, m: []const T) ![:0]T {
const new_buf = try allocator.alloc(T, m.len + 1);
copy(T, new_buf, m);
new_buf[m.len] = 0;
return new_buf[0..m.len :0];
return allocator.dupeZ(T, m);
}
/// Remove values from the beginning of a slice.

View File

@ -16,7 +16,7 @@ pub const Managed = struct {
/// If this is `null` then there is no memory management needed.
arena: ?*std.heap.ArenaAllocator.State = null,
pub fn deinit(self: *ManagedTypedValue, allocator: *Allocator) void {
pub fn deinit(self: *Managed, allocator: *Allocator) void {
if (self.arena) |a| a.promote(allocator).deinit();
self.* = undefined;
}

View File

@ -4,10 +4,11 @@ const assert = std.debug.assert;
const ir = @import("ir.zig");
const Type = @import("type.zig").Type;
const Value = @import("value.zig").Value;
const TypedValue = @import("TypedValue.zig");
const Target = std.Target;
const Allocator = mem.Allocator;
pub fn generateSymbol(typed_value: ir.TypedValue, module: ir.Module, code: *std.ArrayList(u8)) !?*ir.ErrorMsg {
pub fn generateSymbol(typed_value: TypedValue, module: ir.Module, code: *std.ArrayList(u8)) !?*ir.ErrorMsg {
switch (typed_value.ty.zigTypeTag()) {
.Fn => {
const module_fn = typed_value.val.cast(Value.Payload.Function).?.func;

View File

@ -196,11 +196,9 @@ pub const Module = struct {
/// We optimize memory usage for a compilation with no compile errors by storing the
/// error messages and mapping outside of `Decl`.
/// The ErrorMsg memory is owned by the decl, using Module's allocator.
/// Note that a Decl can succeed but the Fn it represents can fail. In this case,
/// a Decl can have a failed_decls entry but have analysis status of success.
failed_decls: std.AutoHashMap(*Decl, *ErrorMsg),
/// We optimize memory usage for a compilation with no compile errors by storing the
/// error messages and mapping outside of `Fn`.
/// The ErrorMsg memory is owned by the `Fn`, using Module's allocator.
failed_fns: std.AutoHashMap(*Fn, *ErrorMsg),
/// Using a map here for consistency with the other fields here.
/// The ErrorMsg memory is owned by the `Scope.ZIRModule`, using Module's allocator.
failed_files: std.AutoHashMap(*Scope.ZIRModule, *ErrorMsg),
@ -221,7 +219,14 @@ pub const Module = struct {
link: link.ElfFile.Export,
/// The Decl that performs the export. Note that this is *not* the Decl being exported.
owner_decl: *Decl,
status: enum { in_progress, failed, complete },
status: enum {
in_progress,
failed,
/// Indicates that the failure was due to a temporary issue, such as an I/O error
/// when writing to the output file. Retrying the export may succeed.
failed_retryable,
complete,
},
};
pub const Decl = struct {
@ -260,6 +265,11 @@ pub const Module = struct {
/// In this case the `typed_value.most_recent` can still be accessed.
/// There will be a corresponding ErrorMsg in Module.failed_decls.
codegen_failure,
/// In this case the `typed_value.most_recent` can still be accessed.
/// There will be a corresponding ErrorMsg in Module.failed_decls.
/// This indicates the failure was something like running out of disk space,
/// and attempting codegen again may succeed.
codegen_failure_retryable,
/// This Decl might be OK but it depends on another one which did not successfully complete
/// semantic analysis. There is a most recent value available.
repeat_dependency_failure,
@ -280,40 +290,63 @@ pub const Module = struct {
/// The shallow set of other decls whose typed_value could possibly change if this Decl's
/// typed_value is modified.
/// TODO look into using a lightweight map/set data structure rather than a linear array.
dependants: ArrayListUnmanaged(*Decl) = .{},
pub fn typedValue(self: Decl) ?TypedValue {
switch (self.analysis) {
.initial_in_progress,
.initial_dependency_failure,
.initial_sema_failure,
=> return null,
.codegen_failure,
.repeat_dependency_failure,
.repeat_sema_failure,
.repeat_in_progress,
.complete,
=> return self.typed_value.most_recent,
}
}
dependants: ArrayListUnmanaged(*Decl) = ArrayListUnmanaged(*Decl){},
pub fn destroy(self: *Decl, allocator: *Allocator) void {
allocator.free(mem.spanZ(u8, self.name));
if (self.typedValue()) |tv| tv.deinit(allocator);
allocator.free(mem.spanZ(self.name));
if (self.typedValueManaged()) |tvm| {
tvm.deinit(allocator);
}
allocator.destroy(self);
}
pub const Hash = [16]u8;
/// If the name is small enough, it is used directly as the hash.
/// If it is long, blake3 hash is computed.
pub fn hashSimpleName(name: []const u8) Hash {
var out: Hash = undefined;
if (name.len <= Hash.len) {
mem.copy(u8, &out, name);
mem.set(u8, out[name.len..], 0);
} else {
std.crypto.Blake3.hash(name, &out);
}
return out;
}
/// Must generate unique bytes with no collisions with other decls.
/// The point of hashing here is only to limit the number of bytes of
/// the unique identifier to a fixed size (16 bytes).
pub fn fullyQualifiedNameHash(self: Decl) Hash {
// Right now we only have ZIRModule as the source. So this is simply the
// relative name of the decl.
var out: Hash = undefined;
std.crypto.Blake3.hash(mem.spanZ(u8, self.name), &out);
return out;
return hashSimpleName(mem.spanZ(u8, self.name));
}
pub fn typedValue(self: *Decl) error{AnalysisFail}!TypedValue {
const tvm = self.typedValueManaged() orelse return error.AnalysisFail;
return tvm.typed_value;
}
pub fn value(self: *Decl) error{AnalysisFail}!Value {
return (try self.typedValue()).val;
}
fn typedValueManaged(self: *Decl) ?*TypedValue.Managed {
switch (self.analysis) {
.initial_in_progress,
.initial_dependency_failure,
.initial_sema_failure,
=> return null,
.codegen_failure,
.codegen_failure_retryable,
.repeat_dependency_failure,
.repeat_sema_failure,
.repeat_in_progress,
.complete,
=> return &self.typed_value.most_recent,
}
}
};
@ -325,22 +358,19 @@ pub const Module = struct {
/// The value is the source instruction.
queued: *text.Inst.Fn,
in_progress: *Analysis,
/// There will be a corresponding ErrorMsg in Module.failed_fns
/// There will be a corresponding ErrorMsg in Module.failed_decls
failure,
success: Body,
},
/// The direct container of the Fn. This field will need to get more fleshed out when
/// self-hosted supports proper struct types and Zig AST => ZIR.
scope: *Scope.ZIRModule,
/// This memory is temporary and points to stack memory for the duration
/// of Fn analysis.
pub const Analysis = struct {
inner_block: Scope.Block,
/// null value means a semantic analysis error happened.
inst_table: std.AutoHashMap(*text.Inst, ?*Inst),
/// Owns the memory for instructions
arena: std.heap.ArenaAllocator,
/// TODO Performance optimization idea: instead of this inst_table,
/// use a field in the text.Inst instead to track corresponding instructions
inst_table: std.AutoHashMap(*text.Inst, *Inst),
needed_inst_capacity: usize,
};
};
@ -374,6 +404,16 @@ pub const Module = struct {
}
}
/// Asserts the scope has a parent which is a ZIRModule and
/// returns it.
pub fn namespace(self: *Scope) *ZIRModule {
switch (self.tag) {
.block => return self.cast(Block).?.decl.scope,
.decl => return self.cast(DeclAnalysis).?.decl.scope,
.zir_module => return self.cast(ZIRModule).?,
}
}
pub const Tag = enum {
zir_module,
block,
@ -407,11 +447,11 @@ pub const Module = struct {
.unloaded_parse_failure,
=> {},
.loaded_success => {
allocator.free(contents.source);
allocator.free(self.source.bytes);
self.contents.module.deinit(allocator);
},
.loaded_parse_failure => {
allocator.free(contents.source);
allocator.free(self.source.bytes);
},
}
self.* = undefined;
@ -469,8 +509,8 @@ pub const Module = struct {
) !void {
const loc = std.zig.findLineColumn(source, simple_err_msg.byte_offset);
try errors.append(.{
.src_path = try mem.dupe(u8, &arena.allocator, sub_file_path),
.msg = try mem.dupe(u8, &arena.allocator, simple_err_msg.msg),
.src_path = try arena.allocator.dupe(u8, sub_file_path),
.msg = try arena.allocator.dupe(u8, simple_err_msg.msg),
.byte_offset = simple_err_msg.byte_offset,
.line = loc.line,
.column = loc.column,
@ -480,7 +520,7 @@ pub const Module = struct {
pub fn deinit(self: *Module) void {
const allocator = self.allocator;
allocator.free(self.errors);
self.work_stack.deinit(allocator);
{
var it = self.decl_table.iterator();
while (it.next()) |kv| {
@ -488,8 +528,44 @@ pub const Module = struct {
}
self.decl_table.deinit();
}
{
var it = self.failed_decls.iterator();
while (it.next()) |kv| {
kv.value.destroy(allocator);
}
self.failed_decls.deinit();
}
{
var it = self.failed_files.iterator();
while (it.next()) |kv| {
kv.value.destroy(allocator);
}
self.failed_files.deinit();
}
{
var it = self.failed_exports.iterator();
while (it.next()) |kv| {
kv.value.destroy(allocator);
}
self.failed_exports.deinit();
}
self.decl_exports.deinit();
{
var it = self.export_owners.iterator();
while (it.next()) |kv| {
const export_list = kv.value;
for (export_list) |exp| {
allocator.destroy(exp);
}
allocator.free(export_list);
}
self.failed_exports.deinit();
}
self.root_pkg.destroy();
self.root_scope.deinit();
{
self.root_scope.deinit(allocator);
allocator.destroy(self.root_scope);
}
self.* = undefined;
}
@ -504,19 +580,20 @@ pub const Module = struct {
// Analyze the root source file now.
self.analyzeRoot(self.root_scope) catch |err| switch (err) {
error.AnalysisFail => {
assert(self.failed_files.size != 0);
assert(self.totalErrorCount() != 0);
},
else => |e| return e,
};
try self.performAllTheWork();
try self.bin_file.flush();
self.link_error_flags = self.bin_file.error_flags;
}
pub fn totalErrorCount(self: *Module) usize {
return self.failed_decls.size +
self.failed_fns.size +
self.failed_decls.size +
self.failed_files.size +
self.failed_exports.size +
@boolToInt(self.link_error_flags.no_entry_point_found);
}
@ -533,17 +610,8 @@ pub const Module = struct {
while (it.next()) |kv| {
const scope = kv.key;
const err_msg = kv.value;
const source = scope.parse_failure.source;
AllErrors.add(&arena, &errors, scope.sub_file_path, source, err_msg);
}
}
{
var it = self.failed_fns.iterator();
while (it.next()) |kv| {
const func = kv.key;
const err_msg = kv.value;
const source = func.scope.success.source;
AllErrors.add(&arena, &errors, func.scope.sub_file_path, source, err_msg);
const source = scope.source.bytes;
try AllErrors.add(&arena, &errors, scope.sub_file_path, source, err_msg.*);
}
}
{
@ -551,8 +619,8 @@ pub const Module = struct {
while (it.next()) |kv| {
const decl = kv.key;
const err_msg = kv.value;
const source = decl.scope.success.source;
AllErrors.add(&arena, &errors, decl.scope.sub_file_path, source, err_msg);
const source = decl.scope.source.bytes;
try AllErrors.add(&arena, &errors, decl.scope.sub_file_path, source, err_msg.*);
}
}
{
@ -560,14 +628,14 @@ pub const Module = struct {
while (it.next()) |kv| {
const decl = kv.key.owner_decl;
const err_msg = kv.value;
const source = decl.scope.success.source;
try AllErrors.add(&arena, &errors, decl.scope.sub_file_path, source, err_msg);
const source = decl.scope.source.bytes;
try AllErrors.add(&arena, &errors, decl.scope.sub_file_path, source, err_msg.*);
}
}
if (self.link_error_flags.no_entry_point_found) {
try errors.append(.{
.src_path = self.module.root_src_path,
.src_path = self.root_pkg.root_src_path,
.line = 0,
.column = 0,
.byte_offset = 0,
@ -579,12 +647,56 @@ pub const Module = struct {
return AllErrors{
.arena = arena.state,
.list = try mem.dupe(&arena.allocator, AllErrors.Message, errors.items),
.list = try arena.allocator.dupe(AllErrors.Message, errors.items),
};
}
const InnerError = error{ OutOfMemory, AnalysisFail };
pub fn performAllTheWork(self: *Module) error{OutOfMemory}!void {
while (self.work_stack.popOrNull()) |work_item| switch (work_item) {
.codegen_decl => |decl| switch (decl.analysis) {
.initial_in_progress,
.repeat_in_progress,
=> unreachable,
.initial_sema_failure,
.repeat_sema_failure,
.codegen_failure,
.initial_dependency_failure,
.repeat_dependency_failure,
=> continue,
.complete, .codegen_failure_retryable => {
if (decl.typed_value.most_recent.typed_value.val.cast(Value.Payload.Function)) |payload| {
switch (payload.func.analysis) {
.queued => self.analyzeFnBody(decl, payload.func) catch |err| switch (err) {
error.AnalysisFail => continue,
else => |e| return e,
},
.in_progress => unreachable,
.failure => continue,
.success => {},
}
}
self.bin_file.updateDecl(self, decl) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => {
try self.failed_decls.ensureCapacity(self.failed_decls.size + 1);
self.failed_decls.putAssumeCapacityNoClobber(decl, try ErrorMsg.create(
self.allocator,
decl.src,
"unable to codegen: {}",
.{@errorName(err)},
));
decl.analysis = .codegen_failure_retryable;
},
};
},
},
};
}
fn analyzeRoot(self: *Module, root_scope: *Scope.ZIRModule) !void {
// TODO use the cache to identify, from the modified source files, the decls which have
// changed based on the span of memory that represents the decl in the re-parsed source file.
@ -650,56 +762,39 @@ pub const Module = struct {
try analyzeExport(self, &root_scope.base, export_inst);
}
}
while (self.work_stack.pop()) |work_item| switch (work_item) {
.codegen_decl => |decl| switch (decl.analysis) {
.success => {
if (decl.typed_value.most_recent.typed_value.val.cast(Value.Function)) |payload| {
switch (payload.func.analysis) {
.queued => self.analyzeFnBody(decl, payload.func) catch |err| switch (err) {
error.AnalysisFail => {
assert(func_payload.func.analysis == .failure);
continue;
},
else => |e| return e,
},
.in_progress => unreachable,
.failure => continue,
.success => {},
}
}
try self.bin_file.updateDecl(self, decl);
},
},
};
}
fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
// Use the Decl's arena for function memory.
var arena = decl.typed_value.most_recent.arena.?.promote(self.allocator);
defer decl.typed_value.most_recent.arena.?.* = arena.state;
var analysis: Analysis = .{
var analysis: Fn.Analysis = .{
.inner_block = .{
.func = func,
.decl = decl,
.instructions = .{},
.arena = &arena.allocator,
},
.inst_table = std.AutoHashMap(*text.Inst, ?*Inst).init(self.allocator),
.needed_inst_capacity = 0,
.inst_table = std.AutoHashMap(*text.Inst, *Inst).init(self.allocator),
};
defer analysis.inner_block.instructions.deinit();
defer analysis.inner_block.instructions.deinit(self.allocator);
defer analysis.inst_table.deinit();
const fn_inst = func.analysis.queued;
func.analysis = .{ .in_progress = &analysis };
try self.analyzeBody(&analysis.inner_block, fn_inst.positionals.body);
try self.analyzeBody(&analysis.inner_block.base, fn_inst.positionals.body);
func.analysis = .{ .success = .{ .instructions = analysis.inner_block.instructions.toOwnedSlice() } };
func.analysis = .{
.success = .{
.instructions = try arena.allocator.dupe(*Inst, analysis.inner_block.instructions.items),
},
};
}
fn resolveDecl(self: *Module, scope: *Scope, old_inst: *text.Inst) InnerError!*Decl {
const hash = old_inst.fullyQualifiedNameHash();
const hash = Decl.hashSimpleName(old_inst.name);
if (self.decl_table.get(hash)) |kv| {
return kv.value;
} else {
@ -711,7 +806,7 @@ pub const Module = struct {
errdefer self.allocator.free(name);
new_decl.* = .{
.name = name,
.scope = scope.findZIRModule(),
.scope = scope.namespace(),
.src = old_inst.src,
.typed_value = .{ .never_succeeded = {} },
.analysis = .initial_in_progress,
@ -726,12 +821,11 @@ pub const Module = struct {
};
errdefer decl_scope.arena.deinit();
const arena_state = try self.allocator.create(std.heap.ArenaAllocator.State);
errdefer self.allocator.destroy(arena_state);
const arena_state = try decl_scope.arena.allocator.create(std.heap.ArenaAllocator.State);
const typed_value = try self.analyzeInstConst(&decl_scope.base, old_inst);
arena_state.* = decl_scope.arena;
arena_state.* = decl_scope.arena.state;
new_decl.typed_value = .{
.most_recent = .{
@ -741,7 +835,7 @@ pub const Module = struct {
};
new_decl.analysis = .complete;
// We ensureCapacity when scanning for decls.
self.work_stack.appendAssumeCapacity(self.allocator, .{ .codegen_decl = new_decl });
self.work_stack.appendAssumeCapacity(.{ .codegen_decl = new_decl });
return new_decl;
}
}
@ -756,6 +850,7 @@ pub const Module = struct {
.initial_sema_failure,
.repeat_sema_failure,
.codegen_failure,
.codegen_failure_retryable,
=> return error.AnalysisFail,
.complete => return decl,
@ -764,14 +859,14 @@ pub const Module = struct {
fn resolveInst(self: *Module, scope: *Scope, old_inst: *text.Inst) InnerError!*Inst {
if (scope.cast(Scope.Block)) |block| {
if (block.func.inst_table.get(old_inst)) |kv| {
return kv.value.ptr orelse return error.AnalysisFail;
if (block.func.analysis.in_progress.inst_table.get(old_inst)) |kv| {
return kv.value;
}
}
const decl = try self.resolveCompleteDecl(scope, old_inst);
const decl_ref = try self.analyzeDeclRef(scope, old_inst.src, decl);
return self.analyzeDeref(scope, old_inst.src, decl_ref);
return self.analyzeDeref(scope, old_inst.src, decl_ref, old_inst.src);
}
fn requireRuntimeBlock(self: *Module, scope: *Scope, src: usize) !*Scope.Block {
@ -819,7 +914,7 @@ pub const Module = struct {
return val.toType();
}
fn analyzeExport(self: *Module, scope: *Scope, export_inst: *text.Inst.Export) !void {
fn analyzeExport(self: *Module, scope: *Scope, export_inst: *text.Inst.Export) InnerError!void {
try self.decl_exports.ensureCapacity(self.decl_exports.size + 1);
try self.export_owners.ensureCapacity(self.export_owners.size + 1);
const symbol_name = try self.resolveConstString(scope, export_inst.positionals.symbol_name);
@ -840,7 +935,7 @@ pub const Module = struct {
const owner_decl = scope.decl();
new_export.* = .{
.options = .{ .data = .{ .name = symbol_name } },
.options = .{ .name = symbol_name },
.src = export_inst.base.src,
.link = .{},
.owner_decl = owner_decl,
@ -865,7 +960,19 @@ pub const Module = struct {
de_gop.kv.value[de_gop.kv.value.len - 1] = new_export;
errdefer de_gop.kv.value = self.allocator.shrink(de_gop.kv.value, de_gop.kv.value.len - 1);
try self.bin_file.updateDeclExports(self, decl, de_gop.kv.value);
self.bin_file.updateDeclExports(self, exported_decl, de_gop.kv.value) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
else => {
try self.failed_exports.ensureCapacity(self.failed_exports.size + 1);
self.failed_exports.putAssumeCapacityNoClobber(new_export, try ErrorMsg.create(
self.allocator,
export_inst.base.src,
"unable to export: {}",
.{@errorName(err)},
));
new_export.status = .failed_retryable;
},
};
}
/// TODO should not need the cast on the last parameter at the callsites
@ -976,7 +1083,7 @@ pub const Module = struct {
fn constIntBig(self: *Module, scope: *Scope, src: usize, ty: Type, big_int: BigIntConst) !*Inst {
const val_payload = if (big_int.positive) blk: {
if (big_int.to(u64)) |x| {
return self.constIntUnsigned(src, ty, x);
return self.constIntUnsigned(scope, src, ty, x);
} else |err| switch (err) {
error.NegativeIntoUnsigned => unreachable,
error.TargetTooSmall => {}, // handled below
@ -986,7 +1093,7 @@ pub const Module = struct {
break :blk &big_int_payload.base;
} else blk: {
if (big_int.to(i64)) |x| {
return self.constIntSigned(src, ty, x);
return self.constIntSigned(scope, src, ty, x);
} else |err| switch (err) {
error.NegativeIntoUnsigned => unreachable,
error.TargetTooSmall => {}, // handled below
@ -1014,15 +1121,17 @@ pub const Module = struct {
switch (old_inst.tag) {
.breakpoint => return self.analyzeInstBreakpoint(scope, old_inst.cast(text.Inst.Breakpoint).?),
.call => return self.analyzeInstCall(scope, old_inst.cast(text.Inst.Call).?),
.declref => return self.analyzeInstDeclRef(scope, old_inst.cast(text.Inst.DeclRef).?),
.str => {
// We can use this reference because Inst.Const's Value is arena-allocated.
// The value would get copied to a MemoryCell before the `text.Inst.Str` lifetime ends.
const bytes = old_inst.cast(text.Inst.Str).?.positionals.bytes;
return self.constStr(old_inst.src, bytes);
// The bytes references memory inside the ZIR text module, which can get deallocated
// after semantic analysis is complete. We need the memory to be in the Decl's arena.
const arena_bytes = try scope.arena().dupe(u8, bytes);
return self.constStr(scope, old_inst.src, arena_bytes);
},
.int => {
const big_int = old_inst.cast(text.Inst.Int).?.positionals.int;
return self.constIntBig(old_inst.src, Type.initTag(.comptime_int), big_int);
return self.constIntBig(scope, old_inst.src, Type.initTag(.comptime_int), big_int);
},
.ptrtoint => return self.analyzeInstPtrToInt(scope, old_inst.cast(text.Inst.PtrToInt).?),
.fieldptr => return self.analyzeInstFieldPtr(scope, old_inst.cast(text.Inst.FieldPtr).?),
@ -1036,7 +1145,7 @@ pub const Module = struct {
try self.analyzeExport(scope, old_inst.cast(text.Inst.Export).?);
return self.constVoid(scope, old_inst.src);
},
.primitive => return self.analyzeInstPrimitive(old_inst.cast(text.Inst.Primitive).?),
.primitive => return self.analyzeInstPrimitive(scope, old_inst.cast(text.Inst.Primitive).?),
.fntype => return self.analyzeInstFnType(scope, old_inst.cast(text.Inst.FnType).?),
.intcast => return self.analyzeInstIntCast(scope, old_inst.cast(text.Inst.IntCast).?),
.bitcast => return self.analyzeInstBitCast(scope, old_inst.cast(text.Inst.BitCast).?),
@ -1054,6 +1163,14 @@ pub const Module = struct {
return self.addNewInstArgs(b, inst.base.src, Type.initTag(.void), Inst.Breakpoint, Inst.Args(Inst.Breakpoint){});
}
fn analyzeInstDeclRef(self: *Module, scope: *Scope, inst: *text.Inst.DeclRef) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstDeclFef", .{});
}
fn analyzeDeclRef(self: *Module, scope: *Scope, src: usize, decl: *Decl) InnerError!*Inst {
return self.fail(scope, src, "TODO implement analyzeDeclRef", .{});
}
fn analyzeInstCall(self: *Module, scope: *Scope, inst: *text.Inst.Call) InnerError!*Inst {
const func = try self.resolveInst(scope, inst.positionals.func);
if (func.ty.zigTypeTag() != .Fn)
@ -1123,8 +1240,7 @@ pub const Module = struct {
const new_func = try scope.arena().create(Fn);
new_func.* = .{
.fn_type = fn_type,
.analysis = .{ .queued = fn_inst.positionals.body },
.scope = scope.namespace(),
.analysis = .{ .queued = fn_inst },
};
const fn_payload = try scope.arena().create(Value.Payload.Function);
fn_payload.* = .{ .func = new_func };
@ -1141,28 +1257,28 @@ pub const Module = struct {
fntype.positionals.param_types.len == 0 and
fntype.kw_args.cc == .Unspecified)
{
return self.constType(fntype.base.src, Type.initTag(.fn_noreturn_no_args));
return self.constType(scope, fntype.base.src, Type.initTag(.fn_noreturn_no_args));
}
if (return_type.zigTypeTag() == .NoReturn and
fntype.positionals.param_types.len == 0 and
fntype.kw_args.cc == .Naked)
{
return self.constType(fntype.base.src, Type.initTag(.fn_naked_noreturn_no_args));
return self.constType(scope, fntype.base.src, Type.initTag(.fn_naked_noreturn_no_args));
}
if (return_type.zigTypeTag() == .Void and
fntype.positionals.param_types.len == 0 and
fntype.kw_args.cc == .C)
{
return self.constType(fntype.base.src, Type.initTag(.fn_ccc_void_no_args));
return self.constType(scope, fntype.base.src, Type.initTag(.fn_ccc_void_no_args));
}
return self.fail(scope, fntype.base.src, "TODO implement fntype instruction more", .{});
}
fn analyzeInstPrimitive(self: *Module, primitive: *text.Inst.Primitive) InnerError!*Inst {
return self.constType(primitive.base.src, primitive.positionals.tag.toType());
fn analyzeInstPrimitive(self: *Module, scope: *Scope, primitive: *text.Inst.Primitive) InnerError!*Inst {
return self.constType(scope, primitive.base.src, primitive.positionals.tag.toType());
}
fn analyzeInstAs(self: *Module, scope: *Scope, as: *text.Inst.As) InnerError!*Inst {
@ -1332,18 +1448,22 @@ pub const Module = struct {
fn analyzeInstDeref(self: *Module, scope: *Scope, deref: *text.Inst.Deref) InnerError!*Inst {
const ptr = try self.resolveInst(scope, deref.positionals.ptr);
return self.analyzeDeref(scope, deref.base.src, ptr, deref.positionals.ptr.src);
}
fn analyzeDeref(self: *Module, scope: *Scope, src: usize, ptr: *Inst, ptr_src: usize) InnerError!*Inst {
const elem_ty = switch (ptr.ty.zigTypeTag()) {
.Pointer => ptr.ty.elemType(),
else => return self.fail(scope, deref.positionals.ptr.src, "expected pointer, found '{}'", .{ptr.ty}),
else => return self.fail(scope, ptr_src, "expected pointer, found '{}'", .{ptr.ty}),
};
if (ptr.value()) |val| {
return self.constInst(scope, deref.base.src, .{
return self.constInst(scope, src, .{
.ty = elem_ty,
.val = val.pointerDeref(),
.val = try val.pointerDeref(scope.arena()),
});
}
return self.fail(scope, deref.base.src, "TODO implement runtime deref", .{});
return self.fail(scope, src, "TODO implement runtime deref", .{});
}
fn analyzeInstAsm(self: *Module, scope: *Scope, assembly: *text.Inst.Asm) InnerError!*Inst {
@ -1390,7 +1510,7 @@ pub const Module = struct {
const rhs_ty_tag = rhs.ty.zigTypeTag();
if (is_equality_cmp and lhs_ty_tag == .Null and rhs_ty_tag == .Null) {
// null == null, null != null
return self.constBool(inst.base.src, op == .eq);
return self.constBool(scope, inst.base.src, op == .eq);
} else if (is_equality_cmp and
((lhs_ty_tag == .Null and rhs_ty_tag == .Optional) or
rhs_ty_tag == .Null and lhs_ty_tag == .Optional))
@ -1399,7 +1519,7 @@ pub const Module = struct {
const opt_operand = if (lhs_ty_tag == .Optional) lhs else rhs;
if (opt_operand.value()) |opt_val| {
const is_null = opt_val.isNull();
return self.constBool(inst.base.src, if (op == .eq) is_null else !is_null);
return self.constBool(scope, inst.base.src, if (op == .eq) is_null else !is_null);
}
const b = try self.requireRuntimeBlock(scope, inst.base.src);
switch (op) {
@ -1468,32 +1588,27 @@ pub const Module = struct {
const parent_block = try self.requireRuntimeBlock(scope, inst.base.src);
var true_block: Scope.Block = .{
.base = .{ .parent = scope },
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
};
defer true_block.instructions.deinit();
defer true_block.instructions.deinit(self.allocator);
try self.analyzeBody(&true_block.base, inst.positionals.true_body);
var false_block: Scope.Block = .{
.base = .{ .parent = scope },
.func = parent_block.func,
.decl = parent_block.decl,
.instructions = .{},
.arena = parent_block.arena,
};
defer false_block.instructions.deinit();
defer false_block.instructions.deinit(self.allocator);
try self.analyzeBody(&false_block.base, inst.positionals.false_body);
// Copy the instruction pointers to the arena memory
const true_instructions = try scope.arena().alloc(*Inst, true_block.instructions.items.len);
const false_instructions = try scope.arena().alloc(*Inst, false_block.instructions.items.len);
mem.copy(*Inst, true_instructions, true_block.instructions.items);
mem.copy(*Inst, false_instructions, false_block.instructions.items);
return self.addNewInstArgs(parent_block, inst.base.src, Type.initTag(.void), Inst.CondBr, Inst.Args(Inst.CondBr){
.condition = cond,
.true_body = .{ .instructions = true_instructions },
.false_body = .{ .instructions = false_instructions },
.true_body = .{ .instructions = try scope.arena().dupe(*Inst, true_block.instructions.items) },
.false_body = .{ .instructions = try scope.arena().dupe(*Inst, false_block.instructions.items) },
});
}
@ -1521,15 +1636,18 @@ pub const Module = struct {
}
fn analyzeBody(self: *Module, scope: *Scope, body: text.Module.Body) !void {
for (body.instructions) |src_inst| {
const new_inst = self.analyzeInst(scope, src_inst) catch |err| {
if (scope.cast(Scope.Block)) |b| {
self.fns.items[b.func.fn_index].analysis_status = .failure;
try b.func.inst_table.putNoClobber(src_inst, .{ .ptr = null });
const analysis = b.func.analysis.in_progress;
analysis.needed_inst_capacity += body.instructions.len;
try analysis.inst_table.ensureCapacity(analysis.needed_inst_capacity);
for (body.instructions) |src_inst| {
const new_inst = try self.analyzeInst(scope, src_inst);
analysis.inst_table.putAssumeCapacityNoClobber(src_inst, new_inst);
}
} else {
for (body.instructions) |src_inst| {
_ = try self.analyzeInst(scope, src_inst);
}
return err;
};
if (scope.cast(Scope.Block)) |b| try b.func.inst_table.putNoClobber(src_inst, .{ .ptr = new_inst });
}
}
@ -1575,7 +1693,7 @@ pub const Module = struct {
if (lhs.value()) |lhs_val| {
if (rhs.value()) |rhs_val| {
return self.constBool(src, Value.compare(lhs_val, op, rhs_val));
return self.constBool(scope, src, Value.compare(lhs_val, op, rhs_val));
}
}
@ -1647,8 +1765,8 @@ pub const Module = struct {
const zcmp = lhs_val.orderAgainstZero();
if (lhs_val.floatHasFraction()) {
switch (op) {
.eq => return self.constBool(src, false),
.neq => return self.constBool(src, true),
.eq => return self.constBool(scope, src, false),
.neq => return self.constBool(scope, src, true),
else => {},
}
if (zcmp == .lt) {
@ -1682,8 +1800,8 @@ pub const Module = struct {
const zcmp = rhs_val.orderAgainstZero();
if (rhs_val.floatHasFraction()) {
switch (op) {
.eq => return self.constBool(src, false),
.neq => return self.constBool(src, true),
.eq => return self.constBool(scope, src, false),
.neq => return self.constBool(scope, src, true),
else => {},
}
if (zcmp == .lt) {
@ -1711,7 +1829,7 @@ pub const Module = struct {
const casted_bits = std.math.cast(u16, max_bits) catch |err| switch (err) {
error.Overflow => return self.fail(scope, src, "{} exceeds maximum integer bit count", .{max_bits}),
};
break :blk try self.makeIntType(dest_int_is_signed, casted_bits);
break :blk try self.makeIntType(scope, dest_int_is_signed, casted_bits);
};
const casted_lhs = try self.coerce(scope, dest_type, lhs);
const casted_rhs = try self.coerce(scope, dest_type, lhs);
@ -1807,7 +1925,6 @@ pub const Module = struct {
fn fail(self: *Module, scope: *Scope, src: usize, comptime format: []const u8, args: var) InnerError {
@setCold(true);
try self.failed_decls.ensureCapacity(self.failed_decls.size + 1);
try self.failed_fns.ensureCapacity(self.failed_fns.size + 1);
const err_msg = try ErrorMsg.create(self.allocator, src, format, args);
switch (scope.tag) {
.decl => {
@ -1820,10 +1937,11 @@ pub const Module = struct {
self.failed_decls.putAssumeCapacityNoClobber(decl, err_msg);
},
.block => {
const func = scope.cast(Scope.Block).?.func;
func.analysis = .failure;
self.failed_fns.putAssumeCapacityNoClobber(func, err_msg);
const block = scope.cast(Scope.Block).?;
block.func.analysis = .failure;
self.failed_decls.putAssumeCapacityNoClobber(block.decl, err_msg);
},
.zir_module => unreachable,
}
return error.AnalysisFail;
}
@ -1868,7 +1986,7 @@ pub const ErrorMsg = struct {
}
pub fn deinit(self: *ErrorMsg, allocator: *Allocator) void {
allocator.free(err_msg.msg);
allocator.free(self.msg);
self.* = undefined;
}
};
@ -1920,7 +2038,6 @@ pub fn main() anyerror!void {
.decl_exports = std.AutoHashMap(*Module.Decl, []*Module.Export).init(allocator),
.export_owners = std.AutoHashMap(*Module.Decl, []*Module.Export).init(allocator),
.failed_decls = std.AutoHashMap(*Module.Decl, *ErrorMsg).init(allocator),
.failed_fns = std.AutoHashMap(*Module.Fn, *ErrorMsg).init(allocator),
.failed_files = std.AutoHashMap(*Module.Scope.ZIRModule, *ErrorMsg).init(allocator),
.failed_exports = std.AutoHashMap(*Module.Export, *ErrorMsg).init(allocator),
};
@ -1929,8 +2046,8 @@ pub fn main() anyerror!void {
try module.update();
const errors = try module.getAllErrorsAlloc();
defer errors.deinit();
var errors = try module.getAllErrorsAlloc();
defer errors.deinit(allocator);
if (errors.list.len != 0) {
for (errors.list) |full_err_msg| {
@ -1954,6 +2071,3 @@ pub fn main() anyerror!void {
try bos.flush();
}
}
// Performance optimization ideas:
// * when analyzing use a field in the Inst instead of HashMap to track corresponding instructions

View File

@ -8,6 +8,7 @@ const BigIntConst = std.math.big.int.Const;
const BigIntMutable = std.math.big.int.Mutable;
const Type = @import("../type.zig").Type;
const Value = @import("../value.zig").Value;
const TypedValue = @import("../TypedValue.zig");
const ir = @import("../ir.zig");
/// These are instructions that correspond to the ZIR text format. See `ir.Inst` for
@ -462,6 +463,7 @@ pub const Module = struct {
switch (decl.tag) {
.breakpoint => return self.writeInstToStreamGeneric(stream, .breakpoint, decl, inst_table),
.call => return self.writeInstToStreamGeneric(stream, .call, decl, inst_table),
.declref => return self.writeInstToStreamGeneric(stream, .declref, decl, inst_table),
.str => return self.writeInstToStreamGeneric(stream, .str, decl, inst_table),
.int => return self.writeInstToStreamGeneric(stream, .int, decl, inst_table),
.ptrtoint => return self.writeInstToStreamGeneric(stream, .ptrtoint, decl, inst_table),
@ -576,6 +578,7 @@ pub fn parse(allocator: *Allocator, source: [:0]const u8) Allocator.Error!Module
.source = source,
.global_name_map = &global_name_map,
.decls = .{},
.unnamed_index = 0,
};
errdefer parser.arena.deinit();
@ -601,6 +604,7 @@ const Parser = struct {
decls: std.ArrayListUnmanaged(*Inst),
global_name_map: *std.StringHashMap(usize),
error_msg: ?ErrorMsg = null,
unnamed_index: usize,
const Body = struct {
instructions: std.ArrayList(*Inst),
@ -626,12 +630,12 @@ const Parser = struct {
skipSpace(self);
try requireEatBytes(self, "=");
skipSpace(self);
const inst = try parseInstruction(self, &body_context);
const inst = try parseInstruction(self, &body_context, ident[1..]);
const ident_index = body_context.instructions.items.len;
if (try body_context.name_map.put(ident, ident_index)) |_| {
return self.fail("redefinition of identifier '{}'", .{ident});
}
try body_context.instructions.append(self.allocator, inst);
try body_context.instructions.append(inst);
continue;
},
' ', '\n' => continue,
@ -712,7 +716,7 @@ const Parser = struct {
skipSpace(self);
try requireEatBytes(self, "=");
skipSpace(self);
const inst = try parseInstruction(self, null);
const inst = try parseInstruction(self, null, ident[1..]);
const ident_index = self.decls.items.len;
if (try self.global_name_map.put(ident, ident_index)) |_| {
return self.fail("redefinition of identifier '{}'", .{ident});
@ -781,12 +785,12 @@ const Parser = struct {
return error.ParseFailure;
}
fn parseInstruction(self: *Parser, body_ctx: ?*Body) InnerError!*Inst {
fn parseInstruction(self: *Parser, body_ctx: ?*Body, name: []const u8) InnerError!*Inst {
const fn_name = try skipToAndOver(self, '(');
inline for (@typeInfo(Inst.Tag).Enum.fields) |field| {
if (mem.eql(u8, field.name, fn_name)) {
const tag = @field(Inst.Tag, field.name);
return parseInstructionGeneric(self, field.name, Inst.TagToType(tag), body_ctx);
return parseInstructionGeneric(self, field.name, Inst.TagToType(tag), body_ctx, name);
}
}
return self.fail("unknown instruction '{}'", .{fn_name});
@ -797,9 +801,11 @@ const Parser = struct {
comptime fn_name: []const u8,
comptime InstType: type,
body_ctx: ?*Body,
) !*Inst {
inst_name: []const u8,
) InnerError!*Inst {
const inst_specific = try self.arena.allocator.create(InstType);
inst_specific.base = .{
.name = inst_name,
.src = self.i,
.tag = InstType.base_tag,
};
@ -885,7 +891,7 @@ const Parser = struct {
var instructions = std.ArrayList(*Inst).init(&self.arena.allocator);
while (true) {
skipSpace(self);
try instructions.append(self.allocator, try parseParameterInst(self, body_ctx));
try instructions.append(try parseParameterInst(self, body_ctx));
skipSpace(self);
if (!eatByte(self, ',')) break;
}
@ -930,13 +936,21 @@ const Parser = struct {
} else {
const name = try self.arena.allocator.create(Inst.Str);
name.* = .{
.base = .{ .src = src, .tag = Inst.Str.base_tag },
.base = .{
.name = try self.generateName(),
.src = src,
.tag = Inst.Str.base_tag,
},
.positionals = .{ .bytes = ident },
.kw_args = .{},
};
const declref = try self.arena.allocator.create(Inst.DeclRef);
declref.* = .{
.base = .{ .src = src, .tag = Inst.DeclRef.base_tag },
.base = .{
.name = try self.generateName(),
.src = src,
.tag = Inst.DeclRef.base_tag,
},
.positionals = .{ .name = &name.base },
.kw_args = .{},
};
@ -949,25 +963,31 @@ const Parser = struct {
return self.decls.items[kv.value];
}
}
fn generateName(self: *Parser) ![]u8 {
const result = try std.fmt.allocPrint(&self.arena.allocator, "unnamed${}", .{self.unnamed_index});
self.unnamed_index += 1;
return result;
}
};
pub fn emit_zir(allocator: *Allocator, old_module: ir.Module) !Module {
var ctx: EmitZIR = .{
.allocator = allocator,
.decls = std.ArrayList(*Inst).init(allocator),
.decls = .{},
.decl_table = std.AutoHashMap(*ir.Inst, *Inst).init(allocator),
.arena = std.heap.ArenaAllocator.init(allocator),
.old_module = &old_module,
};
defer ctx.decls.deinit();
defer ctx.decls.deinit(allocator);
defer ctx.decl_table.deinit();
errdefer ctx.arena.deinit();
try ctx.emit();
return Module{
.decls = ctx.decls.toOwnedSlice(),
.arena = ctx.arena,
.decls = ctx.decls.toOwnedSlice(allocator),
.arena = ctx.arena.state,
};
}
@ -975,16 +995,24 @@ const EmitZIR = struct {
allocator: *Allocator,
arena: std.heap.ArenaAllocator,
old_module: *const ir.Module,
decls: std.ArrayList(*Inst),
decls: std.ArrayListUnmanaged(*Inst),
decl_table: std.AutoHashMap(*ir.Inst, *Inst),
fn emit(self: *EmitZIR) !void {
for (self.old_module.exports) |module_export| {
const export_value = try self.emitTypedValue(module_export.src, module_export.typed_value);
const symbol_name = try self.emitStringLiteral(module_export.src, module_export.name);
var it = self.old_module.decl_exports.iterator();
while (it.next()) |kv| {
const decl = kv.key;
const exports = kv.value;
const export_value = try self.emitTypedValue(decl.src, decl.typed_value.most_recent.typed_value);
for (exports) |module_export| {
const symbol_name = try self.emitStringLiteral(module_export.src, module_export.options.name);
const export_inst = try self.arena.allocator.create(Inst.Export);
export_inst.* = .{
.base = .{ .src = module_export.src, .tag = Inst.Export.base_tag },
.base = .{
.name = try self.autoName(),
.src = module_export.src,
.tag = Inst.Export.base_tag,
},
.positionals = .{
.symbol_name = symbol_name,
.value = export_value,
@ -994,6 +1022,7 @@ const EmitZIR = struct {
try self.decls.append(self.allocator, &export_inst.base);
}
}
}
fn resolveInst(self: *EmitZIR, inst_table: *const std.AutoHashMap(*ir.Inst, *Inst), inst: *ir.Inst) !*Inst {
if (inst.cast(ir.Inst.Constant)) |const_inst| {
@ -1012,7 +1041,11 @@ const EmitZIR = struct {
const big_int_space = try self.arena.allocator.create(Value.BigIntSpace);
const int_inst = try self.arena.allocator.create(Inst.Int);
int_inst.* = .{
.base = .{ .src = src, .tag = Inst.Int.base_tag },
.base = .{
.name = try self.autoName(),
.src = src,
.tag = Inst.Int.base_tag,
},
.positionals = .{
.int = val.toBigInt(big_int_space),
},
@ -1022,7 +1055,7 @@ const EmitZIR = struct {
return &int_inst.base;
}
fn emitTypedValue(self: *EmitZIR, src: usize, typed_value: ir.TypedValue) Allocator.Error!*Inst {
fn emitTypedValue(self: *EmitZIR, src: usize, typed_value: TypedValue) Allocator.Error!*Inst {
switch (typed_value.ty.zigTypeTag()) {
.Pointer => {
const ptr_elem_type = typed_value.ty.elemType();
@ -1044,7 +1077,11 @@ const EmitZIR = struct {
.Int => {
const as_inst = try self.arena.allocator.create(Inst.As);
as_inst.* = .{
.base = .{ .src = src, .tag = Inst.As.base_tag },
.base = .{
.name = try self.autoName(),
.src = src,
.tag = Inst.As.base_tag,
},
.positionals = .{
.dest_type = try self.emitType(src, typed_value.ty),
.value = try self.emitComptimeIntVal(src, typed_value.val),
@ -1060,8 +1097,7 @@ const EmitZIR = struct {
return self.emitType(src, ty);
},
.Fn => {
const index = typed_value.val.cast(Value.Payload.Function).?.index;
const module_fn = self.old_module.fns[index];
const module_fn = typed_value.val.cast(Value.Payload.Function).?.func;
var inst_table = std.AutoHashMap(*ir.Inst, *Inst).init(self.allocator);
defer inst_table.deinit();
@ -1069,7 +1105,7 @@ const EmitZIR = struct {
var instructions = std.ArrayList(*Inst).init(self.allocator);
defer instructions.deinit();
try self.emitBody(module_fn.body, &inst_table, &instructions);
try self.emitBody(module_fn.analysis.success, &inst_table, &instructions);
const fn_type = try self.emitType(src, module_fn.fn_type);
@ -1078,7 +1114,11 @@ const EmitZIR = struct {
const fn_inst = try self.arena.allocator.create(Inst.Fn);
fn_inst.* = .{
.base = .{ .src = src, .tag = Inst.Fn.base_tag },
.base = .{
.name = try self.autoName(),
.src = src,
.tag = Inst.Fn.base_tag,
},
.positionals = .{
.fn_type = fn_type,
.body = .{ .instructions = arena_instrs },
@ -1095,7 +1135,11 @@ const EmitZIR = struct {
fn emitTrivial(self: *EmitZIR, src: usize, comptime T: type) Allocator.Error!*Inst {
const new_inst = try self.arena.allocator.create(T);
new_inst.* = .{
.base = .{ .src = src, .tag = T.base_tag },
.base = .{
.name = try self.autoName(),
.src = src,
.tag = T.base_tag,
},
.positionals = .{},
.kw_args = .{},
};
@ -1120,7 +1164,11 @@ const EmitZIR = struct {
elem.* = try self.resolveInst(inst_table, old_inst.args.args[i]);
}
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.Call.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.Call.base_tag,
},
.positionals = .{
.func = try self.resolveInst(inst_table, old_inst.args.func),
.args = args,
@ -1152,7 +1200,11 @@ const EmitZIR = struct {
}
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.Asm.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.Asm.base_tag,
},
.positionals = .{
.asm_source = try self.emitStringLiteral(inst.src, old_inst.args.asm_source),
.return_type = try self.emitType(inst.src, inst.ty),
@ -1174,7 +1226,11 @@ const EmitZIR = struct {
const old_inst = inst.cast(ir.Inst.PtrToInt).?;
const new_inst = try self.arena.allocator.create(Inst.PtrToInt);
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.PtrToInt.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.PtrToInt.base_tag,
},
.positionals = .{
.ptr = try self.resolveInst(inst_table, old_inst.args.ptr),
},
@ -1186,7 +1242,11 @@ const EmitZIR = struct {
const old_inst = inst.cast(ir.Inst.BitCast).?;
const new_inst = try self.arena.allocator.create(Inst.BitCast);
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.BitCast.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.BitCast.base_tag,
},
.positionals = .{
.dest_type = try self.emitType(inst.src, inst.ty),
.operand = try self.resolveInst(inst_table, old_inst.args.operand),
@ -1199,7 +1259,11 @@ const EmitZIR = struct {
const old_inst = inst.cast(ir.Inst.Cmp).?;
const new_inst = try self.arena.allocator.create(Inst.Cmp);
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.Cmp.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.Cmp.base_tag,
},
.positionals = .{
.lhs = try self.resolveInst(inst_table, old_inst.args.lhs),
.rhs = try self.resolveInst(inst_table, old_inst.args.rhs),
@ -1223,7 +1287,11 @@ const EmitZIR = struct {
const new_inst = try self.arena.allocator.create(Inst.CondBr);
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.CondBr.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.CondBr.base_tag,
},
.positionals = .{
.condition = try self.resolveInst(inst_table, old_inst.args.condition),
.true_body = .{ .instructions = true_body.toOwnedSlice() },
@ -1237,7 +1305,11 @@ const EmitZIR = struct {
const old_inst = inst.cast(ir.Inst.IsNull).?;
const new_inst = try self.arena.allocator.create(Inst.IsNull);
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.IsNull.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.IsNull.base_tag,
},
.positionals = .{
.operand = try self.resolveInst(inst_table, old_inst.args.operand),
},
@ -1249,7 +1321,11 @@ const EmitZIR = struct {
const old_inst = inst.cast(ir.Inst.IsNonNull).?;
const new_inst = try self.arena.allocator.create(Inst.IsNonNull);
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.IsNonNull.base_tag },
.base = .{
.name = try self.autoName(),
.src = inst.src,
.tag = Inst.IsNonNull.base_tag,
},
.positionals = .{
.operand = try self.resolveInst(inst_table, old_inst.args.operand),
},
@ -1258,7 +1334,7 @@ const EmitZIR = struct {
break :blk &new_inst.base;
},
};
try instructions.append(self.allocator, new_inst);
try instructions.append(new_inst);
try inst_table.putNoClobber(inst, new_inst);
}
}
@ -1301,7 +1377,11 @@ const EmitZIR = struct {
const fntype_inst = try self.arena.allocator.create(Inst.FnType);
fntype_inst.* = .{
.base = .{ .src = src, .tag = Inst.FnType.base_tag },
.base = .{
.name = try self.autoName(),
.src = src,
.tag = Inst.FnType.base_tag,
},
.positionals = .{
.param_types = emitted_params,
.return_type = try self.emitType(src, ty.fnReturnType()),
@ -1318,10 +1398,18 @@ const EmitZIR = struct {
}
}
fn autoName(self: *EmitZIR) ![]u8 {
return std.fmt.allocPrint(&self.arena.allocator, "{}", .{self.decls.items.len});
}
fn emitPrimitiveType(self: *EmitZIR, src: usize, tag: Inst.Primitive.BuiltinType) !*Inst {
const primitive_inst = try self.arena.allocator.create(Inst.Primitive);
primitive_inst.* = .{
.base = .{ .src = src, .tag = Inst.Primitive.base_tag },
.base = .{
.name = try self.autoName(),
.src = src,
.tag = Inst.Primitive.base_tag,
},
.positionals = .{
.tag = tag,
},
@ -1334,7 +1422,11 @@ const EmitZIR = struct {
fn emitStringLiteral(self: *EmitZIR, src: usize, str: []const u8) !*Inst {
const str_inst = try self.arena.allocator.create(Inst.Str);
str_inst.* = .{
.base = .{ .src = src, .tag = Inst.Str.base_tag },
.base = .{
.name = try self.autoName(),
.src = src,
.tag = Inst.Str.base_tag,
},
.positionals = .{
.bytes = str,
},

View File

@ -153,7 +153,7 @@ pub const ElfFile = struct {
};
pub const Export = struct {
sym_index: usize,
sym_index: ?usize = null,
};
pub fn deinit(self: *ElfFile) void {
@ -249,6 +249,11 @@ pub const ElfFile = struct {
return @intCast(u32, result);
}
fn getString(self: *ElfFile, str_off: u32) []const u8 {
assert(str_off < self.shstrtab.items.len);
return mem.spanZ(@ptrCast([*:0]const u8, self.shstrtab.items.ptr + str_off));
}
fn updateString(self: *ElfFile, old_str_off: u32, new_name: []const u8) !u32 {
const existing_name = self.getString(old_str_off);
if (mem.eql(u8, existing_name, new_name)) {
@ -418,6 +423,14 @@ pub const ElfFile = struct {
const foreign_endian = self.options.target.cpu.arch.endian() != std.Target.current.cpu.arch.endian();
if (self.phdr_table_dirty) {
const phsize: u64 = switch (self.ptr_width) {
.p32 => @sizeOf(elf.Elf32_Phdr),
.p64 => @sizeOf(elf.Elf64_Phdr),
};
const phalign: u16 = switch (self.ptr_width) {
.p32 => @alignOf(elf.Elf32_Phdr),
.p64 => @alignOf(elf.Elf64_Phdr),
};
const allocated_size = self.allocatedSize(self.phdr_table_offset.?);
const needed_size = self.program_headers.items.len * phsize;
@ -426,11 +439,10 @@ pub const ElfFile = struct {
self.phdr_table_offset = self.findFreeSpace(needed_size, phalign);
}
const allocator = self.program_headers.allocator;
switch (self.ptr_width) {
.p32 => {
const buf = try allocator.alloc(elf.Elf32_Phdr, self.program_headers.items.len);
defer allocator.free(buf);
const buf = try self.allocator.alloc(elf.Elf32_Phdr, self.program_headers.items.len);
defer self.allocator.free(buf);
for (buf) |*phdr, i| {
phdr.* = progHeaderTo32(self.program_headers.items[i]);
@ -441,8 +453,8 @@ pub const ElfFile = struct {
try self.file.pwriteAll(mem.sliceAsBytes(buf), self.phdr_table_offset.?);
},
.p64 => {
const buf = try allocator.alloc(elf.Elf64_Phdr, self.program_headers.items.len);
defer allocator.free(buf);
const buf = try self.allocator.alloc(elf.Elf64_Phdr, self.program_headers.items.len);
defer self.allocator.free(buf);
for (buf) |*phdr, i| {
phdr.* = self.program_headers.items[i];
@ -478,12 +490,20 @@ pub const ElfFile = struct {
}
}
if (self.shdr_table_dirty) {
const shsize: u64 = switch (self.ptr_width) {
.p32 => @sizeOf(elf.Elf32_Shdr),
.p64 => @sizeOf(elf.Elf64_Shdr),
};
const shalign: u16 = switch (self.ptr_width) {
.p32 => @alignOf(elf.Elf32_Shdr),
.p64 => @alignOf(elf.Elf64_Shdr),
};
const allocated_size = self.allocatedSize(self.shdr_table_offset.?);
const needed_size = self.sections.items.len * phsize;
const needed_size = self.sections.items.len * shsize;
if (needed_size > allocated_size) {
self.shdr_table_offset = null; // free the space
self.shdr_table_offset = self.findFreeSpace(needed_size, phalign);
self.shdr_table_offset = self.findFreeSpace(needed_size, shalign);
}
switch (self.ptr_width) {
@ -719,7 +739,7 @@ pub const ElfFile = struct {
defer code.deinit();
const typed_value = decl.typed_value.most_recent.typed_value;
const err_msg = try codegen.generateSymbol(typed_value, module, &code);
const err_msg = try codegen.generateSymbol(typed_value, module.*, &code);
if (err_msg != null) |em| {
decl.analysis = .codegen_failure;
_ = try module.failed_decls.put(decl, em);
@ -751,15 +771,15 @@ pub const ElfFile = struct {
try self.writeSymbol(decl.link.local_sym_index);
break :blk file_offset;
} else {
try self.symbols.ensureCapacity(self.symbols.items.len + 1);
try self.offset_table.ensureCapacity(self.offset_table.items.len + 1);
try self.symbols.ensureCapacity(self.allocator, self.symbols.items.len + 1);
try self.offset_table.ensureCapacity(self.allocator, self.offset_table.items.len + 1);
const decl_name = mem.spanZ(u8, decl.name);
const name_str_index = try self.makeString(decl_name);
const new_block = try self.allocateTextBlock(code_size);
const local_sym_index = self.symbols.items.len;
const offset_table_index = self.offset_table.items.len;
self.symbols.appendAssumeCapacity(self.allocator, .{
self.symbols.appendAssumeCapacity(.{
.st_name = name_str_index,
.st_info = (elf.STB_LOCAL << 4) | stt_bits,
.st_other = 0,
@ -767,9 +787,9 @@ pub const ElfFile = struct {
.st_value = new_block.vaddr,
.st_size = code_size,
});
errdefer self.symbols.shrink(self.symbols.items.len - 1);
self.offset_table.appendAssumeCapacity(self.allocator, new_block.vaddr);
errdefer self.offset_table.shrink(self.offset_table.items.len - 1);
errdefer self.symbols.shrink(self.allocator, self.symbols.items.len - 1);
self.offset_table.appendAssumeCapacity(new_block.vaddr);
errdefer self.offset_table.shrink(self.allocator, self.offset_table.items.len - 1);
try self.writeSymbol(local_sym_index);
try self.writeOffsetTableEntry(offset_table_index);
@ -796,11 +816,12 @@ pub const ElfFile = struct {
self: *ElfFile,
module: *ir.Module,
decl: *const ir.Module.Decl,
exports: []const *const Export,
exports: []const *ir.Module.Export,
) !void {
try self.symbols.ensureCapacity(self.symbols.items.len + exports.len);
try self.symbols.ensureCapacity(self.allocator, self.symbols.items.len + exports.len);
const typed_value = decl.typed_value.most_recent.typed_value;
const decl_sym = self.symbols.items[decl.link.local_sym_index.?];
assert(decl.link.local_sym_index != 0);
const decl_sym = self.symbols.items[decl.link.local_sym_index];
for (exports) |exp| {
if (exp.options.section) |section_name| {
@ -808,15 +829,16 @@ pub const ElfFile = struct {
try module.failed_exports.ensureCapacity(module.failed_exports.size + 1);
module.failed_exports.putAssumeCapacityNoClobber(
exp,
try ir.ErrorMsg.create(0, "Unimplemented: ExportOptions.section", .{}),
try ir.ErrorMsg.create(self.allocator, 0, "Unimplemented: ExportOptions.section", .{}),
);
continue;
}
}
const stb_bits = switch (exp.options.linkage) {
const stb_bits: u8 = switch (exp.options.linkage) {
.Internal => elf.STB_LOCAL,
.Strong => blk: {
if (mem.eql(u8, exp.options.name, "_start")) {
self.entry_addr = decl_symbol.vaddr;
self.entry_addr = decl_sym.st_value;
}
break :blk elf.STB_GLOBAL;
},
@ -825,8 +847,9 @@ pub const ElfFile = struct {
try module.failed_exports.ensureCapacity(module.failed_exports.size + 1);
module.failed_exports.putAssumeCapacityNoClobber(
exp,
try ir.ErrorMsg.create(0, "Unimplemented: GlobalLinkage.LinkOnce", .{}),
try ir.ErrorMsg.create(self.allocator, 0, "Unimplemented: GlobalLinkage.LinkOnce", .{}),
);
continue;
},
};
const stt_bits: u8 = @truncate(u4, decl_sym.st_info);
@ -844,15 +867,15 @@ pub const ElfFile = struct {
} else {
const name = try self.makeString(exp.options.name);
const i = self.symbols.items.len;
self.symbols.appendAssumeCapacity(self.allocator, .{
.st_name = sn.name,
self.symbols.appendAssumeCapacity(.{
.st_name = name,
.st_info = (stb_bits << 4) | stt_bits,
.st_other = 0,
.st_shndx = self.text_section_index.?,
.st_value = decl_sym.st_value,
.st_size = decl_sym.st_size,
});
errdefer self.symbols.shrink(self.symbols.items.len - 1);
errdefer self.symbols.shrink(self.allocator, self.symbols.items.len - 1);
try self.writeSymbol(i);
self.symbol_count_dirty = true;
@ -946,10 +969,15 @@ pub const ElfFile = struct {
}
fn writeSymbol(self: *ElfFile, index: usize) !void {
assert(index != 0);
const syms_sect = &self.sections.items[self.symtab_section_index.?];
// Make sure we are not pointlessly writing symbol data that will have to get relocated
// due to running out of space.
if (self.symbol_count_dirty) {
const sym_size: u64 = switch (self.ptr_width) {
.p32 => @sizeOf(elf.Elf32_Sym),
.p64 => @sizeOf(elf.Elf64_Sym),
};
const allocated_size = self.allocatedSize(syms_sect.sh_offset);
const needed_size = self.symbols.items.len * sym_size;
if (needed_size > allocated_size) {
@ -990,11 +1018,15 @@ pub const ElfFile = struct {
}
fn writeAllSymbols(self: *ElfFile) !void {
const small_ptr = self.ptr_width == .p32;
const syms_sect = &self.sections.items[self.symtab_section_index.?];
const sym_align: u16 = if (small_ptr) @alignOf(elf.Elf32_Sym) else @alignOf(elf.Elf64_Sym);
const sym_size: u64 = if (small_ptr) @sizeOf(elf.Elf32_Sym) else @sizeOf(elf.Elf64_Sym);
const sym_align: u16 = switch (self.ptr_width) {
.p32 => @alignOf(elf.Elf32_Sym),
.p64 => @alignOf(elf.Elf64_Sym),
};
const sym_size: u64 = switch (self.ptr_width) {
.p32 => @sizeOf(elf.Elf32_Sym),
.p64 => @sizeOf(elf.Elf64_Sym),
};
const allocated_size = self.allocatedSize(syms_sect.sh_offset);
const needed_size = self.symbols.items.len * sym_size;
if (needed_size > allocated_size) {

View File

@ -67,6 +67,7 @@ pub const Value = extern union {
int_big_positive,
int_big_negative,
function,
ref_val,
decl_ref,
elem_ptr,
bytes,
@ -158,6 +159,11 @@ pub const Value = extern union {
.int_big_positive => return out_stream.print("{}", .{val.cast(Payload.IntBigPositive).?.asBigInt()}),
.int_big_negative => return out_stream.print("{}", .{val.cast(Payload.IntBigNegative).?.asBigInt()}),
.function => return out_stream.writeAll("(function)"),
.ref_val => {
const ref_val = val.cast(Payload.RefVal).?;
try out_stream.writeAll("&const ");
val = ref_val.val;
},
.decl_ref => return out_stream.writeAll("(decl ref)"),
.elem_ptr => {
const elem_ptr = val.cast(Payload.ElemPtr).?;
@ -229,6 +235,7 @@ pub const Value = extern union {
.int_big_positive,
.int_big_negative,
.function,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -276,6 +283,7 @@ pub const Value = extern union {
.bool_false,
.null_value,
.function,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -333,6 +341,7 @@ pub const Value = extern union {
.bool_false,
.null_value,
.function,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -391,6 +400,7 @@ pub const Value = extern union {
.bool_false,
.null_value,
.function,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -454,6 +464,7 @@ pub const Value = extern union {
.bool_false,
.null_value,
.function,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -546,6 +557,7 @@ pub const Value = extern union {
.bool_false,
.null_value,
.function,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -600,6 +612,7 @@ pub const Value = extern union {
.bool_false,
.null_value,
.function,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -655,7 +668,8 @@ pub const Value = extern union {
}
/// Asserts the value is a pointer and dereferences it.
pub fn pointerDeref(self: Value, module: *ir.Module) !Value {
/// Returns error.AnalysisFail if the pointer points to a Decl that failed semantic analysis.
pub fn pointerDeref(self: Value, allocator: *Allocator) error{ AnalysisFail, OutOfMemory }!Value {
return switch (self.tag()) {
.ty,
.u8_type,
@ -704,21 +718,19 @@ pub const Value = extern union {
=> unreachable,
.the_one_possible_value => Value.initTag(.the_one_possible_value),
.decl_ref => {
const index = self.cast(Payload.DeclRef).?.index;
return module.getDeclValue(index);
},
.ref_val => self.cast(Payload.RefVal).?.val,
.decl_ref => self.cast(Payload.DeclRef).?.decl.value(),
.elem_ptr => {
const elem_ptr = self.cast(ElemPtr).?;
const array_val = try elem_ptr.array_ptr.pointerDeref(module);
return self.elemValue(array_val, elem_ptr.index);
const elem_ptr = self.cast(Payload.ElemPtr).?;
const array_val = try elem_ptr.array_ptr.pointerDeref(allocator);
return array_val.elemValue(allocator, elem_ptr.index);
},
};
}
/// Asserts the value is a single-item pointer to an array, or an array,
/// or an unknown-length pointer, and returns the element value at the index.
pub fn elemValue(self: Value, index: usize) Value {
pub fn elemValue(self: Value, allocator: *Allocator, index: usize) error{OutOfMemory}!Value {
switch (self.tag()) {
.ty,
.u8_type,
@ -764,6 +776,7 @@ pub const Value = extern union {
.int_big_negative,
.undef,
.elem_ptr,
.ref_val,
.decl_ref,
=> unreachable,
@ -838,6 +851,7 @@ pub const Value = extern union {
.int_i64,
.int_big_positive,
.int_big_negative,
.ref_val,
.decl_ref,
.elem_ptr,
.bytes,
@ -896,11 +910,16 @@ pub const Value = extern union {
elem_type: *Type,
};
/// Represents a pointer to another immutable value.
pub const RefVal = struct {
base: Payload = Payload{ .tag = .ref_val },
val: Value,
};
/// Represents a pointer to a decl, not the value of the decl.
pub const DeclRef = struct {
base: Payload = Payload{ .tag = .decl_ref },
/// Index into the Module's decls list
index: usize,
decl: *ir.Module.Decl,
};
pub const ElemPtr = struct {