self-hosted: basic IR pass2

This commit is contained in:
Andrew Kelley 2018-07-13 21:56:38 -04:00
parent c87102c304
commit e78b1b810f
7 changed files with 706 additions and 203 deletions

View File

@ -14,6 +14,13 @@ pub const Color = enum {
pub const Span = struct { pub const Span = struct {
first: ast.TokenIndex, first: ast.TokenIndex,
last: ast.TokenIndex, last: ast.TokenIndex,
pub fn token(i: TokenIndex) Span {
return Span {
.first = i,
.last = i,
};
}
}; };
pub const Msg = struct { pub const Msg = struct {

View File

@ -9,31 +9,34 @@ const Type = Value.Type;
const assert = std.debug.assert; const assert = std.debug.assert;
const Token = std.zig.Token; const Token = std.zig.Token;
const ParsedFile = @import("parsed_file.zig").ParsedFile; const ParsedFile = @import("parsed_file.zig").ParsedFile;
const Span = @import("errmsg.zig").Span;
pub const LVal = enum { pub const LVal = enum {
None, None,
Ptr, Ptr,
}; };
pub const Mut = enum {
Mut,
Const,
};
pub const Volatility = enum {
NonVolatile,
Volatile,
};
pub const IrVal = union(enum) { pub const IrVal = union(enum) {
Unknown, Unknown,
Known: *Value, KnownType: *Type,
KnownValue: *Value,
const Init = enum {
Unknown,
NoReturn,
Void,
};
pub fn dump(self: IrVal) void { pub fn dump(self: IrVal) void {
switch (self) { switch (self) {
IrVal.Unknown => std.debug.warn("Unknown"), IrVal.Unknown => typeof.dump(),
IrVal.Known => |value| { IrVal.KnownType => |typeof| {
std.debug.warn("Known("); std.debug.warn("KnownType(");
typeof.dump();
std.debug.warn(")");
},
IrVal.KnownValue => |value| {
std.debug.warn("KnownValue(");
value.dump(); value.dump();
std.debug.warn(")"); std.debug.warn(")");
}, },
@ -46,10 +49,18 @@ pub const Instruction = struct {
scope: *Scope, scope: *Scope,
debug_id: usize, debug_id: usize,
val: IrVal, val: IrVal,
ref_count: usize,
span: Span,
/// true if this instruction was generated by zig and not from user code /// true if this instruction was generated by zig and not from user code
is_generated: bool, is_generated: bool,
/// the instruction that is derived from this one in analysis
child: ?*Instruction,
/// the instruction that this one derives from in analysis
parent: ?*Instruction,
pub fn cast(base: *Instruction, comptime T: type) ?*T { pub fn cast(base: *Instruction, comptime T: type) ?*T {
if (base.id == comptime typeToId(T)) { if (base.id == comptime typeToId(T)) {
return @fieldParentPtr(T, "base", base); return @fieldParentPtr(T, "base", base);
@ -81,6 +92,47 @@ pub const Instruction = struct {
unreachable; unreachable;
} }
pub fn hasSideEffects(base: *const Instruction) bool {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (base.id == @field(Id, @memberName(Id, i))) {
const T = @field(Instruction, @memberName(Id, i));
return @fieldParentPtr(T, "base", base).hasSideEffects();
}
}
unreachable;
}
pub fn analyze(base: *Instruction, ira: *Analyze) Analyze.Error!*Instruction {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (base.id == @field(Id, @memberName(Id, i))) {
const T = @field(Instruction, @memberName(Id, i));
const new_inst = try @fieldParentPtr(T, "base", base).analyze(ira);
new_inst.linkToParent(base);
return new_inst;
}
}
unreachable;
}
fn getAsParam(param: *Instruction) !*Instruction {
const child = param.child orelse return error.SemanticAnalysisFailed;
switch (child.val) {
IrVal.Unknown => return error.SemanticAnalysisFailed,
else => return child,
}
}
/// asserts that the type is known
fn getKnownType(self: *Instruction) *Type {
switch (self.val) {
IrVal.KnownType => |typeof| return typeof,
IrVal.KnownValue => |value| return value.typeof,
IrVal.Unknown => unreachable,
}
}
pub fn setGenerated(base: *Instruction) void { pub fn setGenerated(base: *Instruction) void {
base.is_generated = true; base.is_generated = true;
} }
@ -88,10 +140,18 @@ pub const Instruction = struct {
pub fn isNoReturn(base: *const Instruction) bool { pub fn isNoReturn(base: *const Instruction) bool {
switch (base.val) { switch (base.val) {
IrVal.Unknown => return false, IrVal.Unknown => return false,
IrVal.Known => |x| return x.typeof.id == Type.Id.NoReturn, IrVal.KnownValue => |x| return x.typeof.id == Type.Id.NoReturn,
IrVal.KnownType => |typeof| return typeof.id == Type.Id.NoReturn,
} }
} }
pub fn linkToParent(self: *Instruction, parent: *Instruction) void {
assert(self.parent == null);
assert(parent.child == null);
self.parent = parent;
parent.child = self;
}
pub const Id = enum { pub const Id = enum {
Return, Return,
Const, Const,
@ -100,196 +160,231 @@ pub const Instruction = struct {
CheckVoidStmt, CheckVoidStmt,
Phi, Phi,
Br, Br,
AddImplicitReturnType,
}; };
pub const Const = struct { pub const Const = struct {
base: Instruction, base: Instruction,
params: Params,
pub fn buildBool(irb: *Builder, scope: *Scope, val: bool) !*Instruction { const Params = struct {};
const inst = try irb.arena().create(Const{
.base = Instruction{ // Use Builder.buildConst* methods, or, after building a Const instruction,
.id = Instruction.Id.Const, // manually set the ir_val field.
.is_generated = false, const ir_val_init = IrVal.Init.Unknown;
.scope = scope,
.debug_id = irb.next_debug_id, pub fn dump(self: *const Const) void {
.val = IrVal{ .Known = &Value.Bool.get(irb.module, val).base }, self.base.val.KnownValue.dump();
},
});
irb.next_debug_id += 1;
try irb.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
} }
pub fn buildVoid(irb: *Builder, scope: *Scope, is_generated: bool) !*Instruction { pub fn hasSideEffects(self: *const Const) bool {
const inst = try irb.arena().create(Const{ return false;
.base = Instruction{
.id = Instruction.Id.Const,
.is_generated = is_generated,
.scope = scope,
.debug_id = irb.next_debug_id,
.val = IrVal{ .Known = &Value.Void.get(irb.module).base },
},
});
irb.next_debug_id += 1;
try irb.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
} }
pub fn dump(inst: *const Const) void { pub fn analyze(self: *const Const, ira: *Analyze) !*Instruction {
inst.base.val.Known.dump(); const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{});
new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() };
return new_inst;
} }
}; };
pub const Return = struct { pub const Return = struct {
base: Instruction, base: Instruction,
return_value: *Instruction, params: Params,
pub fn build(irb: *Builder, scope: *Scope, return_value: *Instruction) !*Instruction { const Params = struct {
const inst = try irb.arena().create(Return{ return_value: *Instruction,
.base = Instruction{ };
.id = Instruction.Id.Return,
.is_generated = false, const ir_val_init = IrVal.Init.NoReturn;
.scope = scope,
.debug_id = irb.next_debug_id, pub fn dump(self: *const Return) void {
.val = IrVal{ .Known = &Value.Void.get(irb.module).base }, std.debug.warn("#{}", self.params.return_value.debug_id);
},
.return_value = return_value,
});
irb.next_debug_id += 1;
try irb.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
} }
pub fn dump(inst: *const Return) void { pub fn hasSideEffects(self: *const Return) bool {
std.debug.warn("#{}", inst.return_value.debug_id); return true;
}
pub fn analyze(self: *const Return, ira: *Analyze) !*Instruction {
const value = try self.params.return_value.getAsParam();
const casted_value = try ira.implicitCast(value, ira.explicit_return_type);
// TODO detect returning local variable address
return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
} }
}; };
pub const Ref = struct { pub const Ref = struct {
base: Instruction, base: Instruction,
target: *Instruction, params: Params,
mut: Mut,
volatility: Volatility,
pub fn build( const Params = struct {
irb: *Builder,
scope: *Scope,
target: *Instruction, target: *Instruction,
mut: Mut, mut: Type.Pointer.Mut,
volatility: Volatility, volatility: Type.Pointer.Vol,
) !*Instruction { };
const inst = try irb.arena().create(Ref{
.base = Instruction{ const ir_val_init = IrVal.Init.Unknown;
.id = Instruction.Id.Ref,
.is_generated = false,
.scope = scope,
.debug_id = irb.next_debug_id,
.val = IrVal.Unknown,
},
.target = target,
.mut = mut,
.volatility = volatility,
});
irb.next_debug_id += 1;
try irb.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
}
pub fn dump(inst: *const Ref) void {} pub fn dump(inst: *const Ref) void {}
pub fn hasSideEffects(inst: *const Ref) bool {
return false;
}
pub fn analyze(self: *const Ref, ira: *Analyze) !*Instruction {
const target = try self.params.target.getAsParam();
if (ira.getCompTimeValOrNullUndefOk(target)) |val| {
return ira.getCompTimeRef(
val,
Value.Ptr.Mut.CompTimeConst,
self.params.mut,
self.params.volatility,
val.typeof.getAbiAlignment(ira.irb.module),
);
}
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{
.target = target,
.mut = self.params.mut,
.volatility = self.params.volatility,
});
const elem_type = target.getKnownType();
const ptr_type = Type.Pointer.get(
ira.irb.module,
elem_type,
self.params.mut,
self.params.volatility,
Type.Pointer.Size.One,
elem_type.getAbiAlignment(ira.irb.module),
);
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
// could be a ref of a global, for example
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
// TODO potentially add an alloca entry here
return new_inst;
}
}; };
pub const DeclVar = struct { pub const DeclVar = struct {
base: Instruction, base: Instruction,
params: Params,
const Params = struct {
variable: *Variable, variable: *Variable,
};
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const DeclVar) void {} pub fn dump(inst: *const DeclVar) void {}
pub fn hasSideEffects(inst: *const DeclVar) bool {
return true;
}
pub fn analyze(self: *const DeclVar, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
}; };
pub const CheckVoidStmt = struct { pub const CheckVoidStmt = struct {
base: Instruction, base: Instruction,
target: *Instruction, params: Params,
pub fn build( const Params = struct {
irb: *Builder,
scope: *Scope,
target: *Instruction, target: *Instruction,
) !*Instruction { };
const inst = try irb.arena().create(CheckVoidStmt{
.base = Instruction{ const ir_val_init = IrVal.Init.Unknown;
.id = Instruction.Id.CheckVoidStmt,
.is_generated = true,
.scope = scope,
.debug_id = irb.next_debug_id,
.val = IrVal{ .Known = &Value.Void.get(irb.module).base },
},
.target = target,
});
irb.next_debug_id += 1;
try irb.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
}
pub fn dump(inst: *const CheckVoidStmt) void {} pub fn dump(inst: *const CheckVoidStmt) void {}
pub fn hasSideEffects(inst: *const CheckVoidStmt) bool {
return true;
}
pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
}; };
pub const Phi = struct { pub const Phi = struct {
base: Instruction, base: Instruction,
incoming_blocks: []*BasicBlock, params: Params,
incoming_values: []*Instruction,
pub fn build( const Params = struct {
irb: *Builder,
scope: *Scope,
incoming_blocks: []*BasicBlock, incoming_blocks: []*BasicBlock,
incoming_values: []*Instruction, incoming_values: []*Instruction,
) !*Instruction { };
const inst = try irb.arena().create(Phi{
.base = Instruction{ const ir_val_init = IrVal.Init.Unknown;
.id = Instruction.Id.Phi,
.is_generated = false,
.scope = scope,
.debug_id = irb.next_debug_id,
.val = IrVal.Unknown,
},
.incoming_blocks = incoming_blocks,
.incoming_values = incoming_values,
});
irb.next_debug_id += 1;
try irb.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
}
pub fn dump(inst: *const Phi) void {} pub fn dump(inst: *const Phi) void {}
pub fn hasSideEffects(inst: *const Phi) bool {
return false;
}
pub fn analyze(self: *const Phi, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
}; };
pub const Br = struct { pub const Br = struct {
base: Instruction, base: Instruction,
dest_block: *BasicBlock, params: Params,
is_comptime: *Instruction,
pub fn build( const Params = struct {
irb: *Builder,
scope: *Scope,
dest_block: *BasicBlock, dest_block: *BasicBlock,
is_comptime: *Instruction, is_comptime: *Instruction,
) !*Instruction { };
const inst = try irb.arena().create(Br{
.base = Instruction{ const ir_val_init = IrVal.Init.NoReturn;
.id = Instruction.Id.Br,
.is_generated = false,
.scope = scope,
.debug_id = irb.next_debug_id,
.val = IrVal{ .Known = &Value.NoReturn.get(irb.module).base },
},
.dest_block = dest_block,
.is_comptime = is_comptime,
});
irb.next_debug_id += 1;
try irb.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
}
pub fn dump(inst: *const Br) void {} pub fn dump(inst: *const Br) void {}
pub fn hasSideEffects(inst: *const Br) bool {
return true;
}
pub fn analyze(self: *const Br, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
};
pub const AddImplicitReturnType = struct {
base: Instruction,
params: Params,
pub const Params = struct {
target: *Instruction,
};
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const AddImplicitReturnType) void {
std.debug.warn("#{}", inst.params.target.debug_id);
}
pub fn hasSideEffects(inst: *const AddImplicitReturnType) bool {
return true;
}
pub fn analyze(self: *const AddImplicitReturnType, ira: *Analyze) !*Instruction {
const target = try self.params.target.getAsParam();
try ira.src_implicit_return_type_list.append(target);
return ira.irb.build(
AddImplicitReturnType,
self.base.scope,
self.base.span,
Params{ .target = target },
);
}
}; };
}; };
@ -303,16 +398,31 @@ pub const BasicBlock = struct {
debug_id: usize, debug_id: usize,
scope: *Scope, scope: *Scope,
instruction_list: std.ArrayList(*Instruction), instruction_list: std.ArrayList(*Instruction),
ref_instruction: ?*Instruction,
/// the basic block that is derived from this one in analysis
child: ?*BasicBlock,
/// the basic block that this one derives from in analysis
parent: ?*BasicBlock,
pub fn ref(self: *BasicBlock) void { pub fn ref(self: *BasicBlock) void {
self.ref_count += 1; self.ref_count += 1;
} }
pub fn linkToParent(self: *BasicBlock, parent: *BasicBlock) void {
assert(self.parent == null);
assert(parent.child == null);
self.parent = parent;
parent.child = self;
}
}; };
/// Stuff that survives longer than Builder /// Stuff that survives longer than Builder
pub const Code = struct { pub const Code = struct {
basic_block_list: std.ArrayList(*BasicBlock), basic_block_list: std.ArrayList(*BasicBlock),
arena: std.heap.ArenaAllocator, arena: std.heap.ArenaAllocator,
return_type: ?*Type,
/// allocator is module.a() /// allocator is module.a()
pub fn destroy(self: *Code, allocator: *Allocator) void { pub fn destroy(self: *Code, allocator: *Allocator) void {
@ -341,15 +451,13 @@ pub const Builder = struct {
parsed_file: *ParsedFile, parsed_file: *ParsedFile,
is_comptime: bool, is_comptime: bool,
pub const Error = error{ pub const Error = Analyze.Error;
OutOfMemory,
Unimplemented,
};
pub fn init(module: *Module, parsed_file: *ParsedFile) !Builder { pub fn init(module: *Module, parsed_file: *ParsedFile) !Builder {
const code = try module.a().create(Code{ const code = try module.a().create(Code{
.basic_block_list = undefined, .basic_block_list = undefined,
.arena = std.heap.ArenaAllocator.init(module.a()), .arena = std.heap.ArenaAllocator.init(module.a()),
.return_type = null,
}); });
code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator); code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator);
errdefer code.destroy(module.a()); errdefer code.destroy(module.a());
@ -381,6 +489,9 @@ pub const Builder = struct {
.debug_id = self.next_debug_id, .debug_id = self.next_debug_id,
.scope = scope, .scope = scope,
.instruction_list = std.ArrayList(*Instruction).init(self.arena()), .instruction_list = std.ArrayList(*Instruction).init(self.arena()),
.child = null,
.parent = null,
.ref_instruction = null,
}); });
self.next_debug_id += 1; self.next_debug_id += 1;
return basic_block; return basic_block;
@ -490,14 +601,18 @@ pub const Builder = struct {
if (block.statements.len == 0) { if (block.statements.len == 0) {
// {} // {}
return Instruction.Const.buildVoid(irb, child_scope, false); return irb.buildConstVoid(child_scope, Span.token(block.lbrace), false);
} }
if (block.label) |label| { if (block.label) |label| {
block_scope.incoming_values = std.ArrayList(*Instruction).init(irb.arena()); block_scope.incoming_values = std.ArrayList(*Instruction).init(irb.arena());
block_scope.incoming_blocks = std.ArrayList(*BasicBlock).init(irb.arena()); block_scope.incoming_blocks = std.ArrayList(*BasicBlock).init(irb.arena());
block_scope.end_block = try irb.createBasicBlock(parent_scope, "BlockEnd"); block_scope.end_block = try irb.createBasicBlock(parent_scope, "BlockEnd");
block_scope.is_comptime = try Instruction.Const.buildBool(irb, parent_scope, irb.isCompTime(parent_scope)); block_scope.is_comptime = try irb.buildConstBool(
parent_scope,
Span.token(block.lbrace),
irb.isCompTime(parent_scope),
);
} }
var is_continuation_unreachable = false; var is_continuation_unreachable = false;
@ -530,10 +645,15 @@ pub const Builder = struct {
if (statement_value.cast(Instruction.DeclVar)) |decl_var| { if (statement_value.cast(Instruction.DeclVar)) |decl_var| {
// variable declarations start a new scope // variable declarations start a new scope
child_scope = decl_var.variable.child_scope; child_scope = decl_var.params.variable.child_scope;
} else if (!is_continuation_unreachable) { } else if (!is_continuation_unreachable) {
// this statement's value must be void // this statement's value must be void
_ = Instruction.CheckVoidStmt.build(irb, child_scope, statement_value); _ = irb.build(
Instruction.CheckVoidStmt,
child_scope,
statement_value.span,
Instruction.CheckVoidStmt.Params{ .target = statement_value },
);
} }
} }
@ -544,37 +664,34 @@ pub const Builder = struct {
} }
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block); try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
return Instruction.Phi.build( return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{
irb, .incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
parent_scope, .incoming_values = block_scope.incoming_values.toOwnedSlice(),
block_scope.incoming_blocks.toOwnedSlice(), });
block_scope.incoming_values.toOwnedSlice(),
);
} }
if (block.label) |label| { if (block.label) |label| {
try block_scope.incoming_blocks.append(irb.current_basic_block); try block_scope.incoming_blocks.append(irb.current_basic_block);
try block_scope.incoming_values.append( try block_scope.incoming_values.append(
try Instruction.Const.buildVoid(irb, parent_scope, true), try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true),
); );
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit); _ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
(try Instruction.Br.build(
irb, _ = try irb.buildGen(Instruction.Br, parent_scope, Span.token(block.rbrace), Instruction.Br.Params{
parent_scope, .dest_block = block_scope.end_block,
block_scope.end_block, .is_comptime = block_scope.is_comptime,
block_scope.is_comptime, });
)).setGenerated();
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block); try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
return Instruction.Phi.build(
irb, return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{
parent_scope, .incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
block_scope.incoming_blocks.toOwnedSlice(), .incoming_values = block_scope.incoming_values.toOwnedSlice(),
block_scope.incoming_values.toOwnedSlice(), });
);
} }
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit); _ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
return try Instruction.Const.buildVoid(irb, child_scope, true); return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true);
} }
fn genDefersForBlock( fn genDefersForBlock(
@ -603,7 +720,12 @@ pub const Builder = struct {
if (instruction.isNoReturn()) { if (instruction.isNoReturn()) {
is_noreturn = true; is_noreturn = true;
} else { } else {
_ = Instruction.CheckVoidStmt.build(irb, &defer_expr_scope.base, instruction); _ = try irb.build(
Instruction.CheckVoidStmt,
&defer_expr_scope.base,
Span.token(defer_expr_scope.expr_node.lastToken()),
Instruction.CheckVoidStmt.Params{ .target = instruction },
);
} }
} }
}, },
@ -626,7 +748,11 @@ pub const Builder = struct {
LVal.Ptr => { LVal.Ptr => {
// We needed a pointer to a value, but we got a value. So we create // We needed a pointer to a value, but we got a value. So we create
// an instruction which just makes a const pointer of it. // an instruction which just makes a const pointer of it.
return Instruction.Ref.build(irb, scope, instruction, Mut.Const, Volatility.NonVolatile); return irb.build(Instruction.Ref, scope, instruction.span, Instruction.Ref.Params{
.target = instruction,
.mut = Type.Pointer.Mut.Const,
.volatility = Type.Pointer.Vol.Non,
});
}, },
} }
} }
@ -634,9 +760,218 @@ pub const Builder = struct {
fn arena(self: *Builder) *Allocator { fn arena(self: *Builder) *Allocator {
return &self.code.arena.allocator; return &self.code.arena.allocator;
} }
fn buildExtra(
self: *Builder,
comptime I: type,
scope: *Scope,
span: Span,
params: I.Params,
is_generated: bool,
) !*Instruction {
const inst = try self.arena().create(I{
.base = Instruction{
.id = Instruction.typeToId(I),
.is_generated = is_generated,
.scope = scope,
.debug_id = self.next_debug_id,
.val = switch (I.ir_val_init) {
IrVal.Init.Unknown => IrVal.Unknown,
IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.module).base },
IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.module).base },
},
.ref_count = 0,
.span = span,
.child = null,
.parent = null,
},
.params = params,
});
// Look at the params and ref() other instructions
comptime var i = 0;
inline while (i < @memberCount(I.Params)) : (i += 1) {
const FieldType = comptime @typeOf(@field(I.Params(undefined), @memberName(I.Params, i)));
switch (FieldType) {
*Instruction => @field(inst.params, @memberName(I.Params, i)).ref_count += 1,
?*Instruction => if (@field(inst.params, @memberName(I.Params, i))) |other| other.ref_count += 1,
else => {},
}
}
self.next_debug_id += 1;
try self.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
}
fn build(
self: *Builder,
comptime I: type,
scope: *Scope,
span: Span,
params: I.Params,
) !*Instruction {
return self.buildExtra(I, scope, span, params, false);
}
fn buildGen(
self: *Builder,
comptime I: type,
scope: *Scope,
span: Span,
params: I.Params,
) !*Instruction {
return self.buildExtra(I, scope, span, params, true);
}
fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Instruction {
const inst = try self.build(Instruction.Const, scope, span, Instruction.Const.Params{});
inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.module, x).base };
return inst;
}
fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Instruction {
const inst = try self.buildExtra(Instruction.Const, scope, span, Instruction.Const.Params{}, is_generated);
inst.val = IrVal{ .KnownValue = &Value.Void.get(self.module).base };
return inst;
}
}; };
pub async fn gen(module: *Module, body_node: *ast.Node, scope: *Scope, parsed_file: *ParsedFile) !*Code { const Analyze = struct {
irb: Builder,
old_bb_index: usize,
const_predecessor_bb: ?*BasicBlock,
parent_basic_block: *BasicBlock,
instruction_index: usize,
src_implicit_return_type_list: std.ArrayList(*Instruction),
explicit_return_type: ?*Type,
pub const Error = error{
/// This is only for when we have already reported a compile error. It is the poison value.
SemanticAnalysisFailed,
/// This is a placeholder - it is useful to use instead of panicking but once the compiler is
/// done this error code will be removed.
Unimplemented,
OutOfMemory,
};
pub fn init(module: *Module, parsed_file: *ParsedFile, explicit_return_type: ?*Type) !Analyze {
var irb = try Builder.init(module, parsed_file);
errdefer irb.abort();
return Analyze{
.irb = irb,
.old_bb_index = 0,
.const_predecessor_bb = null,
.parent_basic_block = undefined, // initialized with startBasicBlock
.instruction_index = undefined, // initialized with startBasicBlock
.src_implicit_return_type_list = std.ArrayList(*Instruction).init(irb.arena()),
.explicit_return_type = explicit_return_type,
};
}
pub fn abort(self: *Analyze) void {
self.irb.abort();
}
pub fn getNewBasicBlock(self: *Analyze, old_bb: *BasicBlock, ref_old_instruction: ?*Instruction) !*BasicBlock {
if (old_bb.child) |child| {
if (ref_old_instruction == null or child.ref_instruction != ref_old_instruction)
return child;
}
const new_bb = try self.irb.createBasicBlock(old_bb.scope, old_bb.name_hint);
new_bb.linkToParent(old_bb);
new_bb.ref_instruction = ref_old_instruction;
return new_bb;
}
pub fn startBasicBlock(self: *Analyze, old_bb: *BasicBlock, const_predecessor_bb: ?*BasicBlock) void {
self.instruction_index = 0;
self.parent_basic_block = old_bb;
self.const_predecessor_bb = const_predecessor_bb;
}
pub fn finishBasicBlock(ira: *Analyze, old_code: *Code) !void {
try ira.irb.code.basic_block_list.append(ira.irb.current_basic_block);
ira.instruction_index += 1;
while (ira.instruction_index < ira.parent_basic_block.instruction_list.len) {
const next_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
if (!next_instruction.is_generated) {
try ira.addCompileError(next_instruction.span, "unreachable code");
break;
}
ira.instruction_index += 1;
}
ira.old_bb_index += 1;
var need_repeat = true;
while (true) {
while (ira.old_bb_index < old_code.basic_block_list.len) {
const old_bb = old_code.basic_block_list.at(ira.old_bb_index);
const new_bb = old_bb.child orelse {
ira.old_bb_index += 1;
continue;
};
if (new_bb.instruction_list.len != 0) {
ira.old_bb_index += 1;
continue;
}
ira.irb.current_basic_block = new_bb;
ira.startBasicBlock(old_bb, null);
return;
}
if (!need_repeat)
return;
need_repeat = false;
ira.old_bb_index = 0;
continue;
}
}
fn addCompileError(self: *Analyze, span: Span, comptime fmt: []const u8, args: ...) !void {
return self.irb.module.addCompileError(self.irb.parsed_file, span, fmt, args);
}
fn resolvePeerTypes(self: *Analyze, expected_type: ?*Type, peers: []const *Instruction) Analyze.Error!*Type {
// TODO actual implementation
return &Type.Void.get(self.irb.module).base;
}
fn implicitCast(self: *Analyze, target: *Instruction, optional_dest_type: ?*Type) Analyze.Error!*Instruction {
const dest_type = optional_dest_type orelse return target;
@panic("TODO implicitCast");
}
fn getCompTimeValOrNullUndefOk(self: *Analyze, target: *Instruction) ?*Value {
@panic("TODO getCompTimeValOrNullUndefOk");
}
fn getCompTimeRef(
self: *Analyze,
value: *Value,
ptr_mut: Value.Ptr.Mut,
mut: Type.Pointer.Mut,
volatility: Type.Pointer.Vol,
ptr_align: u32,
) Analyze.Error!*Instruction {
@panic("TODO getCompTimeRef");
}
};
pub async fn gen(
module: *Module,
body_node: *ast.Node,
scope: *Scope,
end_span: Span,
parsed_file: *ParsedFile,
) !*Code {
var irb = try Builder.init(module, parsed_file); var irb = try Builder.init(module, parsed_file);
errdefer irb.abort(); errdefer irb.abort();
@ -646,8 +981,61 @@ pub async fn gen(module: *Module, body_node: *ast.Node, scope: *Scope, parsed_fi
const result = try irb.genNode(body_node, scope, LVal.None); const result = try irb.genNode(body_node, scope, LVal.None);
if (!result.isNoReturn()) { if (!result.isNoReturn()) {
(try Instruction.Return.build(&irb, scope, result)).setGenerated(); _ = irb.buildGen(
Instruction.AddImplicitReturnType,
scope,
end_span,
Instruction.AddImplicitReturnType.Params{ .target = result },
);
_ = irb.buildGen(
Instruction.Return,
scope,
end_span,
Instruction.Return.Params{ .return_value = result },
);
} }
return irb.finish(); return irb.finish();
} }
pub async fn analyze(module: *Module, parsed_file: *ParsedFile, old_code: *Code, expected_type: ?*Type) !*Code {
var ira = try Analyze.init(module, parsed_file, expected_type);
errdefer ira.abort();
const old_entry_bb = old_code.basic_block_list.at(0);
const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null);
new_entry_bb.ref();
ira.irb.current_basic_block = new_entry_bb;
ira.startBasicBlock(old_entry_bb, null);
while (ira.old_bb_index < old_code.basic_block_list.len) {
const old_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
if (old_instruction.ref_count == 0 and !old_instruction.hasSideEffects()) {
ira.instruction_index += 1;
continue;
}
const return_inst = try old_instruction.analyze(&ira);
// Note: if we ever modify the above to handle error.CompileError by continuing analysis,
// then here we want to check if ira.isCompTime() and return early if true
if (return_inst.isNoReturn()) {
try ira.finishBasicBlock(old_code);
continue;
}
ira.instruction_index += 1;
}
if (ira.src_implicit_return_type_list.len == 0) {
ira.irb.code.return_type = &Type.NoReturn.get(module).base;
return ira.irb.finish();
}
ira.irb.code.return_type = try ira.resolvePeerTypes(expected_type, ira.src_implicit_return_type_list.toSliceConst());
return ira.irb.finish();
}

View File

@ -497,7 +497,7 @@ async fn processBuildEvents(module: *Module, color: errmsg.Color) void {
}, },
Module.Event.Error => |err| { Module.Event.Error => |err| {
std.debug.warn("build failed: {}\n", @errorName(err)); std.debug.warn("build failed: {}\n", @errorName(err));
@panic("TODO error return trace"); os.exit(1);
}, },
Module.Event.Fail => |msgs| { Module.Event.Fail => |msgs| {
for (msgs) |msg| { for (msgs) |msg| {

View File

@ -24,6 +24,7 @@ const Visib = @import("visib.zig").Visib;
const ParsedFile = @import("parsed_file.zig").ParsedFile; const ParsedFile = @import("parsed_file.zig").ParsedFile;
const Value = @import("value.zig").Value; const Value = @import("value.zig").Value;
const Type = Value.Type; const Type = Value.Type;
const Span = errmsg.Span;
pub const Module = struct { pub const Module = struct {
loop: *event.Loop, loop: *event.Loop,
@ -148,13 +149,14 @@ pub const Module = struct {
Overflow, Overflow,
NotSupported, NotSupported,
BufferTooSmall, BufferTooSmall,
Unimplemented, Unimplemented, // TODO remove this one
SemanticAnalysisFailed, // TODO remove this one
}; };
pub const Event = union(enum) { pub const Event = union(enum) {
Ok, Ok,
Fail: []*errmsg.Msg,
Error: BuildError, Error: BuildError,
Fail: []*errmsg.Msg,
}; };
pub const DarwinVersionMin = union(enum) { pub const DarwinVersionMin = union(enum) {
@ -413,21 +415,32 @@ pub const Module = struct {
while (true) { while (true) {
// TODO directly awaiting async should guarantee memory allocation elision // TODO directly awaiting async should guarantee memory allocation elision
// TODO also async before suspending should guarantee memory allocation elision // TODO also async before suspending should guarantee memory allocation elision
(await (async self.addRootSrc() catch unreachable)) catch |err| { const build_result = await (async self.addRootSrc() catch unreachable);
await (async self.events.put(Event{ .Error = err }) catch unreachable);
return; // this makes a handy error return trace and stack trace in debug mode
}; if (std.debug.runtime_safety) {
build_result catch unreachable;
}
const compile_errors = blk: { const compile_errors = blk: {
const held = await (async self.compile_errors.acquire() catch unreachable); const held = await (async self.compile_errors.acquire() catch unreachable);
defer held.release(); defer held.release();
break :blk held.value.toOwnedSlice(); break :blk held.value.toOwnedSlice();
}; };
if (build_result) |_| {
if (compile_errors.len == 0) { if (compile_errors.len == 0) {
await (async self.events.put(Event.Ok) catch unreachable); await (async self.events.put(Event.Ok) catch unreachable);
} else { } else {
await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable); await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable);
} }
} else |err| {
// if there's an error then the compile errors have dangling references
self.a().free(compile_errors);
await (async self.events.put(Event{ .Error = err }) catch unreachable);
}
// for now we stop after 1 // for now we stop after 1
return; return;
} }
@ -477,7 +490,7 @@ pub const Module = struct {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl); const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
const name = if (fn_proto.name_token) |name_token| tree.tokenSlice(name_token) else { const name = if (fn_proto.name_token) |name_token| tree.tokenSlice(name_token) else {
try self.addCompileError(parsed_file, errmsg.Span{ try self.addCompileError(parsed_file, Span{
.first = fn_proto.fn_token, .first = fn_proto.fn_token,
.last = fn_proto.fn_token + 1, .last = fn_proto.fn_token + 1,
}, "missing function name"); }, "missing function name");
@ -518,27 +531,23 @@ pub const Module = struct {
} }
} }
fn addCompileError(self: *Module, parsed_file: *ParsedFile, span: errmsg.Span, comptime fmt: []const u8, args: ...) !void { fn addCompileError(self: *Module, parsed_file: *ParsedFile, span: Span, comptime fmt: []const u8, args: ...) !void {
const text = try std.fmt.allocPrint(self.loop.allocator, fmt, args); const text = try std.fmt.allocPrint(self.loop.allocator, fmt, args);
errdefer self.loop.allocator.free(text); errdefer self.loop.allocator.free(text);
try self.build_group.call(addCompileErrorAsync, self, parsed_file, span.first, span.last, text); try self.build_group.call(addCompileErrorAsync, self, parsed_file, span, text);
} }
async fn addCompileErrorAsync( async fn addCompileErrorAsync(
self: *Module, self: *Module,
parsed_file: *ParsedFile, parsed_file: *ParsedFile,
first_token: ast.TokenIndex, span: Span,
last_token: ast.TokenIndex,
text: []u8, text: []u8,
) !void { ) !void {
const msg = try self.loop.allocator.create(errmsg.Msg{ const msg = try self.loop.allocator.create(errmsg.Msg{
.path = parsed_file.realpath, .path = parsed_file.realpath,
.text = text, .text = text,
.span = errmsg.Span{ .span = span,
.first = first_token,
.last = last_token,
},
.tree = &parsed_file.tree, .tree = &parsed_file.tree,
}); });
errdefer self.loop.allocator.destroy(msg); errdefer self.loop.allocator.destroy(msg);
@ -624,6 +633,7 @@ pub async fn resolveDecl(module: *Module, decl: *Decl) !void {
if (@atomicRmw(u8, &decl.resolution_in_progress, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) == 0) { if (@atomicRmw(u8, &decl.resolution_in_progress, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) == 0) {
decl.resolution.data = await (async generateDecl(module, decl) catch unreachable); decl.resolution.data = await (async generateDecl(module, decl) catch unreachable);
decl.resolution.resolve(); decl.resolution.resolve();
return decl.resolution.data;
} else { } else {
return (await (async decl.resolution.get() catch unreachable)).*; return (await (async decl.resolution.get() catch unreachable)).*;
} }
@ -655,12 +665,41 @@ async fn generateDeclFn(module: *Module, fn_decl: *Decl.Fn) !void {
fn_decl.value = Decl.Fn.Val{ .Ok = fn_val }; fn_decl.value = Decl.Fn.Val{ .Ok = fn_val };
const code = try await (async ir.gen( const unanalyzed_code = (await (async ir.gen(
module, module,
body_node, body_node,
&fndef_scope.base, &fndef_scope.base,
Span.token(body_node.lastToken()),
fn_decl.base.parsed_file, fn_decl.base.parsed_file,
) catch unreachable); ) catch unreachable)) catch |err| switch (err) {
//code.dump(); // This poison value should not cause the errdefers to run. It simply means
//try await (async irAnalyze(module, func) catch unreachable); // that self.compile_errors is populated.
error.SemanticAnalysisFailed => return {},
else => return err,
};
defer unanalyzed_code.destroy(module.a());
if (module.verbose_ir) {
std.debug.warn("unanalyzed:\n");
unanalyzed_code.dump();
}
const analyzed_code = (await (async ir.analyze(
module,
fn_decl.base.parsed_file,
unanalyzed_code,
null,
) catch unreachable)) catch |err| switch (err) {
// This poison value should not cause the errdefers to run. It simply means
// that self.compile_errors is populated.
error.SemanticAnalysisFailed => return {},
else => return err,
};
defer analyzed_code.destroy(module.a());
if (module.verbose_ir) {
std.debug.warn("analyzed:\n");
analyzed_code.dump();
}
// TODO now render to LLVM module
} }

View File

@ -39,6 +39,14 @@ pub const Type = struct {
} }
} }
pub fn dump(base: *const Type) void {
std.debug.warn("{}", @tagName(base.id));
}
pub fn getAbiAlignment(base: *Type, module: *Module) u32 {
@panic("TODO getAbiAlignment");
}
pub const Struct = struct { pub const Struct = struct {
base: Type, base: Type,
decls: *Scope.Decls, decls: *Scope.Decls,
@ -143,10 +151,35 @@ pub const Type = struct {
}; };
pub const Pointer = struct { pub const Pointer = struct {
base: Type, base: Type,
mut: Mut,
vol: Vol,
size: Size,
alignment: u32,
pub const Mut = enum {
Mut,
Const,
};
pub const Vol = enum {
Non,
Volatile,
};
pub const Size = builtin.TypeInfo.Pointer.Size;
pub fn destroy(self: *Pointer, module: *Module) void { pub fn destroy(self: *Pointer, module: *Module) void {
module.a().destroy(self); module.a().destroy(self);
} }
pub fn get(
module: *Module,
elem_type: *Type,
mut: Mut,
vol: Vol,
size: Size,
alignment: u32,
) *Pointer {
@panic("TODO get pointer");
}
}; };
pub const Array = struct { pub const Array = struct {
base: Type, base: Type,

View File

@ -24,10 +24,16 @@ pub const Value = struct {
Id.Void => @fieldParentPtr(Void, "base", base).destroy(module), Id.Void => @fieldParentPtr(Void, "base", base).destroy(module),
Id.Bool => @fieldParentPtr(Bool, "base", base).destroy(module), Id.Bool => @fieldParentPtr(Bool, "base", base).destroy(module),
Id.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(module), Id.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(module),
Id.Ptr => @fieldParentPtr(Ptr, "base", base).destroy(module),
} }
} }
} }
pub fn getRef(base: *Value) *Value {
base.ref();
return base;
}
pub fn dump(base: *const Value) void { pub fn dump(base: *const Value) void {
std.debug.warn("{}", @tagName(base.id)); std.debug.warn("{}", @tagName(base.id));
} }
@ -38,6 +44,7 @@ pub const Value = struct {
Void, Void,
Bool, Bool,
NoReturn, NoReturn,
Ptr,
}; };
pub const Type = @import("type.zig").Type; pub const Type = @import("type.zig").Type;
@ -122,4 +129,18 @@ pub const Value = struct {
module.a().destroy(self); module.a().destroy(self);
} }
}; };
pub const Ptr = struct {
base: Value,
pub const Mut = enum {
CompTimeConst,
CompTimeVar,
RunTime,
};
pub fn destroy(self: *Ptr, module: *Module) void {
module.a().destroy(self);
}
};
}; };

View File

@ -382,6 +382,21 @@ pub const Loop = struct {
return async<self.allocator> S.asyncFunc(self, &handle, args); return async<self.allocator> S.asyncFunc(self, &handle, args);
} }
/// Awaiting a yield lets the event loop run, starting any unstarted async operations.
/// Note that async operations automatically start when a function yields for any other reason,
/// for example, when async I/O is performed. This function is intended to be used only when
/// CPU bound tasks would be waiting in the event loop but never get started because no async I/O
/// is performed.
pub async fn yield(self: *Loop) void {
suspend |p| {
var my_tick_node = Loop.NextTickNode{
.next = undefined,
.data = p,
};
loop.onNextTick(&my_tick_node);
}
}
fn workerRun(self: *Loop) void { fn workerRun(self: *Loop) void {
start_over: while (true) { start_over: while (true) {
if (@atomicRmw(u8, &self.dispatch_lock, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) == 0) { if (@atomicRmw(u8, &self.dispatch_lock, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) == 0) {