zig/src-self-hosted/ir.zig

1042 lines
35 KiB
Zig
Raw Normal View History

const std = @import("std");
const builtin = @import("builtin");
const Module = @import("module.zig").Module;
const Scope = @import("scope.zig").Scope;
const ast = std.zig.ast;
const Allocator = std.mem.Allocator;
const Value = @import("value.zig").Value;
const Type = Value.Type;
const assert = std.debug.assert;
const Token = std.zig.Token;
const ParsedFile = @import("parsed_file.zig").ParsedFile;
2018-07-13 18:56:38 -07:00
const Span = @import("errmsg.zig").Span;
pub const LVal = enum {
None,
Ptr,
};
pub const IrVal = union(enum) {
Unknown,
2018-07-13 18:56:38 -07:00
KnownType: *Type,
KnownValue: *Value,
const Init = enum {
Unknown,
NoReturn,
Void,
};
pub fn dump(self: IrVal) void {
switch (self) {
2018-07-13 18:56:38 -07:00
IrVal.Unknown => typeof.dump(),
IrVal.KnownType => |typeof| {
std.debug.warn("KnownType(");
typeof.dump();
std.debug.warn(")");
},
IrVal.KnownValue => |value| {
std.debug.warn("KnownValue(");
value.dump();
std.debug.warn(")");
},
}
}
};
pub const Instruction = struct {
id: Id,
scope: *Scope,
debug_id: usize,
val: IrVal,
2018-07-13 18:56:38 -07:00
ref_count: usize,
span: Span,
/// true if this instruction was generated by zig and not from user code
is_generated: bool,
2018-07-13 18:56:38 -07:00
/// the instruction that is derived from this one in analysis
child: ?*Instruction,
/// the instruction that this one derives from in analysis
parent: ?*Instruction,
pub fn cast(base: *Instruction, comptime T: type) ?*T {
if (base.id == comptime typeToId(T)) {
return @fieldParentPtr(T, "base", base);
}
return null;
}
pub fn typeToId(comptime T: type) Id {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (T == @field(Instruction, @memberName(Id, i))) {
return @field(Id, @memberName(Id, i));
}
}
unreachable;
}
pub fn dump(base: *const Instruction) void {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (base.id == @field(Id, @memberName(Id, i))) {
const T = @field(Instruction, @memberName(Id, i));
std.debug.warn("#{} = {}(", base.debug_id, @tagName(base.id));
@fieldParentPtr(T, "base", base).dump();
std.debug.warn(")");
return;
}
}
unreachable;
}
2018-07-13 18:56:38 -07:00
pub fn hasSideEffects(base: *const Instruction) bool {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (base.id == @field(Id, @memberName(Id, i))) {
const T = @field(Instruction, @memberName(Id, i));
return @fieldParentPtr(T, "base", base).hasSideEffects();
}
}
unreachable;
}
pub fn analyze(base: *Instruction, ira: *Analyze) Analyze.Error!*Instruction {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
if (base.id == @field(Id, @memberName(Id, i))) {
const T = @field(Instruction, @memberName(Id, i));
const new_inst = try @fieldParentPtr(T, "base", base).analyze(ira);
new_inst.linkToParent(base);
return new_inst;
}
}
unreachable;
}
fn getAsParam(param: *Instruction) !*Instruction {
const child = param.child orelse return error.SemanticAnalysisFailed;
switch (child.val) {
IrVal.Unknown => return error.SemanticAnalysisFailed,
else => return child,
}
}
/// asserts that the type is known
fn getKnownType(self: *Instruction) *Type {
switch (self.val) {
IrVal.KnownType => |typeof| return typeof,
IrVal.KnownValue => |value| return value.typeof,
IrVal.Unknown => unreachable,
}
}
pub fn setGenerated(base: *Instruction) void {
base.is_generated = true;
}
pub fn isNoReturn(base: *const Instruction) bool {
switch (base.val) {
IrVal.Unknown => return false,
2018-07-13 18:56:38 -07:00
IrVal.KnownValue => |x| return x.typeof.id == Type.Id.NoReturn,
IrVal.KnownType => |typeof| return typeof.id == Type.Id.NoReturn,
}
}
2018-07-13 18:56:38 -07:00
pub fn linkToParent(self: *Instruction, parent: *Instruction) void {
assert(self.parent == null);
assert(parent.child == null);
self.parent = parent;
parent.child = self;
}
pub const Id = enum {
Return,
Const,
Ref,
DeclVar,
CheckVoidStmt,
Phi,
Br,
2018-07-13 18:56:38 -07:00
AddImplicitReturnType,
};
pub const Const = struct {
base: Instruction,
2018-07-13 18:56:38 -07:00
params: Params,
2018-07-13 18:56:38 -07:00
const Params = struct {};
// Use Builder.buildConst* methods, or, after building a Const instruction,
// manually set the ir_val field.
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(self: *const Const) void {
self.base.val.KnownValue.dump();
}
2018-07-13 18:56:38 -07:00
pub fn hasSideEffects(self: *const Const) bool {
return false;
}
pub fn analyze(self: *const Const, ira: *Analyze) !*Instruction {
const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{});
new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() };
return new_inst;
}
};
pub const Return = struct {
base: Instruction,
2018-07-13 18:56:38 -07:00
params: Params,
const Params = struct {
return_value: *Instruction,
};
const ir_val_init = IrVal.Init.NoReturn;
pub fn dump(self: *const Return) void {
std.debug.warn("#{}", self.params.return_value.debug_id);
}
2018-07-13 18:56:38 -07:00
pub fn hasSideEffects(self: *const Return) bool {
return true;
}
pub fn analyze(self: *const Return, ira: *Analyze) !*Instruction {
const value = try self.params.return_value.getAsParam();
const casted_value = try ira.implicitCast(value, ira.explicit_return_type);
// TODO detect returning local variable address
return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
}
};
pub const Ref = struct {
base: Instruction,
2018-07-13 18:56:38 -07:00
params: Params,
2018-07-13 18:56:38 -07:00
const Params = struct {
target: *Instruction,
2018-07-13 18:56:38 -07:00
mut: Type.Pointer.Mut,
volatility: Type.Pointer.Vol,
};
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const Ref) void {}
pub fn hasSideEffects(inst: *const Ref) bool {
return false;
}
pub fn analyze(self: *const Ref, ira: *Analyze) !*Instruction {
const target = try self.params.target.getAsParam();
if (ira.getCompTimeValOrNullUndefOk(target)) |val| {
return ira.getCompTimeRef(
val,
Value.Ptr.Mut.CompTimeConst,
self.params.mut,
self.params.volatility,
val.typeof.getAbiAlignment(ira.irb.module),
);
}
const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{
.target = target,
2018-07-13 18:56:38 -07:00
.mut = self.params.mut,
.volatility = self.params.volatility,
});
2018-07-13 18:56:38 -07:00
const elem_type = target.getKnownType();
const ptr_type = Type.Pointer.get(
ira.irb.module,
elem_type,
self.params.mut,
self.params.volatility,
Type.Pointer.Size.One,
elem_type.getAbiAlignment(ira.irb.module),
);
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
// could be a ref of a global, for example
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
// TODO potentially add an alloca entry here
return new_inst;
}
};
pub const DeclVar = struct {
base: Instruction,
2018-07-13 18:56:38 -07:00
params: Params,
const Params = struct {
variable: *Variable,
};
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const DeclVar) void {}
2018-07-13 18:56:38 -07:00
pub fn hasSideEffects(inst: *const DeclVar) bool {
return true;
}
pub fn analyze(self: *const DeclVar, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
};
pub const CheckVoidStmt = struct {
base: Instruction,
2018-07-13 18:56:38 -07:00
params: Params,
2018-07-13 18:56:38 -07:00
const Params = struct {
target: *Instruction,
2018-07-13 18:56:38 -07:00
};
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const CheckVoidStmt) void {}
2018-07-13 18:56:38 -07:00
pub fn hasSideEffects(inst: *const CheckVoidStmt) bool {
return true;
}
pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
};
pub const Phi = struct {
base: Instruction,
2018-07-13 18:56:38 -07:00
params: Params,
2018-07-13 18:56:38 -07:00
const Params = struct {
incoming_blocks: []*BasicBlock,
incoming_values: []*Instruction,
2018-07-13 18:56:38 -07:00
};
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const Phi) void {}
2018-07-13 18:56:38 -07:00
pub fn hasSideEffects(inst: *const Phi) bool {
return false;
}
pub fn analyze(self: *const Phi, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
};
pub const Br = struct {
base: Instruction,
2018-07-13 18:56:38 -07:00
params: Params,
2018-07-13 18:56:38 -07:00
const Params = struct {
dest_block: *BasicBlock,
is_comptime: *Instruction,
2018-07-13 18:56:38 -07:00
};
const ir_val_init = IrVal.Init.NoReturn;
pub fn dump(inst: *const Br) void {}
2018-07-13 18:56:38 -07:00
pub fn hasSideEffects(inst: *const Br) bool {
return true;
}
pub fn analyze(self: *const Br, ira: *Analyze) !*Instruction {
return error.Unimplemented; // TODO
}
};
pub const AddImplicitReturnType = struct {
base: Instruction,
params: Params,
pub const Params = struct {
target: *Instruction,
};
const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const AddImplicitReturnType) void {
std.debug.warn("#{}", inst.params.target.debug_id);
}
pub fn hasSideEffects(inst: *const AddImplicitReturnType) bool {
return true;
}
pub fn analyze(self: *const AddImplicitReturnType, ira: *Analyze) !*Instruction {
const target = try self.params.target.getAsParam();
try ira.src_implicit_return_type_list.append(target);
return ira.irb.build(
AddImplicitReturnType,
self.base.scope,
self.base.span,
Params{ .target = target },
);
}
};
};
pub const Variable = struct {
child_scope: *Scope,
};
pub const BasicBlock = struct {
ref_count: usize,
name_hint: []const u8,
debug_id: usize,
scope: *Scope,
instruction_list: std.ArrayList(*Instruction),
2018-07-13 18:56:38 -07:00
ref_instruction: ?*Instruction,
/// the basic block that is derived from this one in analysis
child: ?*BasicBlock,
/// the basic block that this one derives from in analysis
parent: ?*BasicBlock,
pub fn ref(self: *BasicBlock) void {
self.ref_count += 1;
}
2018-07-13 18:56:38 -07:00
pub fn linkToParent(self: *BasicBlock, parent: *BasicBlock) void {
assert(self.parent == null);
assert(parent.child == null);
self.parent = parent;
parent.child = self;
}
};
/// Stuff that survives longer than Builder
pub const Code = struct {
basic_block_list: std.ArrayList(*BasicBlock),
arena: std.heap.ArenaAllocator,
2018-07-13 18:56:38 -07:00
return_type: ?*Type,
/// allocator is module.a()
pub fn destroy(self: *Code, allocator: *Allocator) void {
self.arena.deinit();
allocator.destroy(self);
}
pub fn dump(self: *Code) void {
var bb_i: usize = 0;
for (self.basic_block_list.toSliceConst()) |bb| {
std.debug.warn("{}_{}:\n", bb.name_hint, bb.debug_id);
for (bb.instruction_list.toSliceConst()) |instr| {
std.debug.warn(" ");
instr.dump();
std.debug.warn("\n");
}
}
}
};
pub const Builder = struct {
module: *Module,
code: *Code,
current_basic_block: *BasicBlock,
next_debug_id: usize,
parsed_file: *ParsedFile,
is_comptime: bool,
2018-07-13 18:56:38 -07:00
pub const Error = Analyze.Error;
pub fn init(module: *Module, parsed_file: *ParsedFile) !Builder {
const code = try module.a().create(Code{
.basic_block_list = undefined,
.arena = std.heap.ArenaAllocator.init(module.a()),
2018-07-13 18:56:38 -07:00
.return_type = null,
});
code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator);
errdefer code.destroy(module.a());
return Builder{
.module = module,
.parsed_file = parsed_file,
.current_basic_block = undefined,
.code = code,
.next_debug_id = 0,
.is_comptime = false,
};
}
pub fn abort(self: *Builder) void {
self.code.destroy(self.module.a());
}
/// Call code.destroy() when done
pub fn finish(self: *Builder) *Code {
return self.code;
}
/// No need to clean up resources thanks to the arena allocator.
pub fn createBasicBlock(self: *Builder, scope: *Scope, name_hint: []const u8) !*BasicBlock {
const basic_block = try self.arena().create(BasicBlock{
.ref_count = 0,
.name_hint = name_hint,
.debug_id = self.next_debug_id,
.scope = scope,
.instruction_list = std.ArrayList(*Instruction).init(self.arena()),
2018-07-13 18:56:38 -07:00
.child = null,
.parent = null,
.ref_instruction = null,
});
self.next_debug_id += 1;
return basic_block;
}
pub fn setCursorAtEndAndAppendBlock(self: *Builder, basic_block: *BasicBlock) !void {
try self.code.basic_block_list.append(basic_block);
self.setCursorAtEnd(basic_block);
}
pub fn setCursorAtEnd(self: *Builder, basic_block: *BasicBlock) void {
self.current_basic_block = basic_block;
}
pub fn genNode(irb: *Builder, node: *ast.Node, scope: *Scope, lval: LVal) Error!*Instruction {
switch (node.id) {
ast.Node.Id.Root => unreachable,
ast.Node.Id.Use => unreachable,
ast.Node.Id.TestDecl => unreachable,
ast.Node.Id.VarDecl => @panic("TODO"),
ast.Node.Id.Defer => @panic("TODO"),
ast.Node.Id.InfixOp => @panic("TODO"),
ast.Node.Id.PrefixOp => @panic("TODO"),
ast.Node.Id.SuffixOp => @panic("TODO"),
ast.Node.Id.Switch => @panic("TODO"),
ast.Node.Id.While => @panic("TODO"),
ast.Node.Id.For => @panic("TODO"),
ast.Node.Id.If => @panic("TODO"),
ast.Node.Id.ControlFlowExpression => return error.Unimplemented,
ast.Node.Id.Suspend => @panic("TODO"),
ast.Node.Id.VarType => @panic("TODO"),
ast.Node.Id.ErrorType => @panic("TODO"),
ast.Node.Id.FnProto => @panic("TODO"),
ast.Node.Id.PromiseType => @panic("TODO"),
ast.Node.Id.IntegerLiteral => @panic("TODO"),
ast.Node.Id.FloatLiteral => @panic("TODO"),
ast.Node.Id.StringLiteral => @panic("TODO"),
ast.Node.Id.MultilineStringLiteral => @panic("TODO"),
ast.Node.Id.CharLiteral => @panic("TODO"),
ast.Node.Id.BoolLiteral => @panic("TODO"),
ast.Node.Id.NullLiteral => @panic("TODO"),
ast.Node.Id.UndefinedLiteral => @panic("TODO"),
ast.Node.Id.ThisLiteral => @panic("TODO"),
ast.Node.Id.Unreachable => @panic("TODO"),
ast.Node.Id.Identifier => @panic("TODO"),
ast.Node.Id.GroupedExpression => {
const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", node);
return irb.genNode(grouped_expr.expr, scope, lval);
},
ast.Node.Id.BuiltinCall => @panic("TODO"),
ast.Node.Id.ErrorSetDecl => @panic("TODO"),
ast.Node.Id.ContainerDecl => @panic("TODO"),
ast.Node.Id.Asm => @panic("TODO"),
ast.Node.Id.Comptime => @panic("TODO"),
ast.Node.Id.Block => {
const block = @fieldParentPtr(ast.Node.Block, "base", node);
return irb.lvalWrap(scope, try irb.genBlock(block, scope), lval);
},
ast.Node.Id.DocComment => @panic("TODO"),
ast.Node.Id.SwitchCase => @panic("TODO"),
ast.Node.Id.SwitchElse => @panic("TODO"),
ast.Node.Id.Else => @panic("TODO"),
ast.Node.Id.Payload => @panic("TODO"),
ast.Node.Id.PointerPayload => @panic("TODO"),
ast.Node.Id.PointerIndexPayload => @panic("TODO"),
ast.Node.Id.StructField => @panic("TODO"),
ast.Node.Id.UnionTag => @panic("TODO"),
ast.Node.Id.EnumTag => @panic("TODO"),
ast.Node.Id.ErrorTag => @panic("TODO"),
ast.Node.Id.AsmInput => @panic("TODO"),
ast.Node.Id.AsmOutput => @panic("TODO"),
ast.Node.Id.AsyncAttribute => @panic("TODO"),
ast.Node.Id.ParamDecl => @panic("TODO"),
ast.Node.Id.FieldInitializer => @panic("TODO"),
}
}
fn isCompTime(irb: *Builder, target_scope: *Scope) bool {
if (irb.is_comptime)
return true;
var scope = target_scope;
while (true) {
switch (scope.id) {
Scope.Id.CompTime => return true,
Scope.Id.FnDef => return false,
Scope.Id.Decls => unreachable,
Scope.Id.Block,
Scope.Id.Defer,
Scope.Id.DeferExpr,
=> scope = scope.parent orelse return false,
}
}
}
pub fn genBlock(irb: *Builder, block: *ast.Node.Block, parent_scope: *Scope) !*Instruction {
const block_scope = try Scope.Block.create(irb.module, parent_scope);
const outer_block_scope = &block_scope.base;
var child_scope = outer_block_scope;
if (parent_scope.findFnDef()) |fndef_scope| {
if (fndef_scope.fn_val.child_scope == parent_scope) {
fndef_scope.fn_val.block_scope = block_scope;
}
}
if (block.statements.len == 0) {
// {}
2018-07-13 18:56:38 -07:00
return irb.buildConstVoid(child_scope, Span.token(block.lbrace), false);
}
if (block.label) |label| {
block_scope.incoming_values = std.ArrayList(*Instruction).init(irb.arena());
block_scope.incoming_blocks = std.ArrayList(*BasicBlock).init(irb.arena());
block_scope.end_block = try irb.createBasicBlock(parent_scope, "BlockEnd");
2018-07-13 18:56:38 -07:00
block_scope.is_comptime = try irb.buildConstBool(
parent_scope,
Span.token(block.lbrace),
irb.isCompTime(parent_scope),
);
}
var is_continuation_unreachable = false;
var noreturn_return_value: ?*Instruction = null;
var stmt_it = block.statements.iterator(0);
while (stmt_it.next()) |statement_node_ptr| {
const statement_node = statement_node_ptr.*;
if (statement_node.cast(ast.Node.Defer)) |defer_node| {
// defer starts a new scope
const defer_token = irb.parsed_file.tree.tokens.at(defer_node.defer_token);
const kind = switch (defer_token.id) {
Token.Id.Keyword_defer => Scope.Defer.Kind.ScopeExit,
Token.Id.Keyword_errdefer => Scope.Defer.Kind.ErrorExit,
else => unreachable,
};
const defer_expr_scope = try Scope.DeferExpr.create(irb.module, parent_scope, defer_node.expr);
const defer_child_scope = try Scope.Defer.create(irb.module, parent_scope, kind, defer_expr_scope);
child_scope = &defer_child_scope.base;
continue;
}
const statement_value = try irb.genNode(statement_node, child_scope, LVal.None);
is_continuation_unreachable = statement_value.isNoReturn();
if (is_continuation_unreachable) {
// keep the last noreturn statement value around in case we need to return it
noreturn_return_value = statement_value;
}
if (statement_value.cast(Instruction.DeclVar)) |decl_var| {
// variable declarations start a new scope
2018-07-13 18:56:38 -07:00
child_scope = decl_var.params.variable.child_scope;
} else if (!is_continuation_unreachable) {
// this statement's value must be void
2018-07-13 18:56:38 -07:00
_ = irb.build(
Instruction.CheckVoidStmt,
child_scope,
statement_value.span,
Instruction.CheckVoidStmt.Params{ .target = statement_value },
);
}
}
if (is_continuation_unreachable) {
assert(noreturn_return_value != null);
if (block.label == null or block_scope.incoming_blocks.len == 0) {
return noreturn_return_value.?;
}
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
2018-07-13 18:56:38 -07:00
return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
});
}
if (block.label) |label| {
try block_scope.incoming_blocks.append(irb.current_basic_block);
try block_scope.incoming_values.append(
2018-07-13 18:56:38 -07:00
try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true),
);
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
2018-07-13 18:56:38 -07:00
_ = try irb.buildGen(Instruction.Br, parent_scope, Span.token(block.rbrace), Instruction.Br.Params{
.dest_block = block_scope.end_block,
.is_comptime = block_scope.is_comptime,
});
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
2018-07-13 18:56:38 -07:00
return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{
.incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
.incoming_values = block_scope.incoming_values.toOwnedSlice(),
});
}
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
2018-07-13 18:56:38 -07:00
return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true);
}
fn genDefersForBlock(
irb: *Builder,
inner_scope: *Scope,
outer_scope: *Scope,
gen_kind: Scope.Defer.Kind,
) !bool {
var scope = inner_scope;
var is_noreturn = false;
while (true) {
switch (scope.id) {
Scope.Id.Defer => {
const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope);
const generate = switch (defer_scope.kind) {
Scope.Defer.Kind.ScopeExit => true,
Scope.Defer.Kind.ErrorExit => gen_kind == Scope.Defer.Kind.ErrorExit,
};
if (generate) {
const defer_expr_scope = defer_scope.defer_expr_scope;
const instruction = try irb.genNode(
defer_expr_scope.expr_node,
&defer_expr_scope.base,
LVal.None,
);
if (instruction.isNoReturn()) {
is_noreturn = true;
} else {
2018-07-13 18:56:38 -07:00
_ = try irb.build(
Instruction.CheckVoidStmt,
&defer_expr_scope.base,
Span.token(defer_expr_scope.expr_node.lastToken()),
Instruction.CheckVoidStmt.Params{ .target = instruction },
);
}
}
},
Scope.Id.FnDef,
Scope.Id.Decls,
=> return is_noreturn,
Scope.Id.CompTime,
Scope.Id.Block,
=> scope = scope.parent orelse return is_noreturn,
Scope.Id.DeferExpr => unreachable,
}
}
}
pub fn lvalWrap(irb: *Builder, scope: *Scope, instruction: *Instruction, lval: LVal) !*Instruction {
switch (lval) {
LVal.None => return instruction,
LVal.Ptr => {
// We needed a pointer to a value, but we got a value. So we create
// an instruction which just makes a const pointer of it.
2018-07-13 18:56:38 -07:00
return irb.build(Instruction.Ref, scope, instruction.span, Instruction.Ref.Params{
.target = instruction,
.mut = Type.Pointer.Mut.Const,
.volatility = Type.Pointer.Vol.Non,
});
},
}
}
fn arena(self: *Builder) *Allocator {
return &self.code.arena.allocator;
}
2018-07-13 18:56:38 -07:00
fn buildExtra(
self: *Builder,
comptime I: type,
scope: *Scope,
span: Span,
params: I.Params,
is_generated: bool,
) !*Instruction {
const inst = try self.arena().create(I{
.base = Instruction{
.id = Instruction.typeToId(I),
.is_generated = is_generated,
.scope = scope,
.debug_id = self.next_debug_id,
.val = switch (I.ir_val_init) {
IrVal.Init.Unknown => IrVal.Unknown,
IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.module).base },
IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.module).base },
},
.ref_count = 0,
.span = span,
.child = null,
.parent = null,
},
.params = params,
});
// Look at the params and ref() other instructions
comptime var i = 0;
inline while (i < @memberCount(I.Params)) : (i += 1) {
const FieldType = comptime @typeOf(@field(I.Params(undefined), @memberName(I.Params, i)));
switch (FieldType) {
*Instruction => @field(inst.params, @memberName(I.Params, i)).ref_count += 1,
?*Instruction => if (@field(inst.params, @memberName(I.Params, i))) |other| other.ref_count += 1,
else => {},
}
}
self.next_debug_id += 1;
try self.current_basic_block.instruction_list.append(&inst.base);
return &inst.base;
}
fn build(
self: *Builder,
comptime I: type,
scope: *Scope,
span: Span,
params: I.Params,
) !*Instruction {
return self.buildExtra(I, scope, span, params, false);
}
fn buildGen(
self: *Builder,
comptime I: type,
scope: *Scope,
span: Span,
params: I.Params,
) !*Instruction {
return self.buildExtra(I, scope, span, params, true);
}
fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Instruction {
const inst = try self.build(Instruction.Const, scope, span, Instruction.Const.Params{});
inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.module, x).base };
return inst;
}
fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Instruction {
const inst = try self.buildExtra(Instruction.Const, scope, span, Instruction.Const.Params{}, is_generated);
inst.val = IrVal{ .KnownValue = &Value.Void.get(self.module).base };
return inst;
}
};
const Analyze = struct {
irb: Builder,
old_bb_index: usize,
const_predecessor_bb: ?*BasicBlock,
parent_basic_block: *BasicBlock,
instruction_index: usize,
src_implicit_return_type_list: std.ArrayList(*Instruction),
explicit_return_type: ?*Type,
pub const Error = error{
/// This is only for when we have already reported a compile error. It is the poison value.
SemanticAnalysisFailed,
/// This is a placeholder - it is useful to use instead of panicking but once the compiler is
/// done this error code will be removed.
Unimplemented,
OutOfMemory,
};
pub fn init(module: *Module, parsed_file: *ParsedFile, explicit_return_type: ?*Type) !Analyze {
var irb = try Builder.init(module, parsed_file);
errdefer irb.abort();
return Analyze{
.irb = irb,
.old_bb_index = 0,
.const_predecessor_bb = null,
.parent_basic_block = undefined, // initialized with startBasicBlock
.instruction_index = undefined, // initialized with startBasicBlock
.src_implicit_return_type_list = std.ArrayList(*Instruction).init(irb.arena()),
.explicit_return_type = explicit_return_type,
};
}
pub fn abort(self: *Analyze) void {
self.irb.abort();
}
pub fn getNewBasicBlock(self: *Analyze, old_bb: *BasicBlock, ref_old_instruction: ?*Instruction) !*BasicBlock {
if (old_bb.child) |child| {
if (ref_old_instruction == null or child.ref_instruction != ref_old_instruction)
return child;
}
const new_bb = try self.irb.createBasicBlock(old_bb.scope, old_bb.name_hint);
new_bb.linkToParent(old_bb);
new_bb.ref_instruction = ref_old_instruction;
return new_bb;
}
pub fn startBasicBlock(self: *Analyze, old_bb: *BasicBlock, const_predecessor_bb: ?*BasicBlock) void {
self.instruction_index = 0;
self.parent_basic_block = old_bb;
self.const_predecessor_bb = const_predecessor_bb;
}
pub fn finishBasicBlock(ira: *Analyze, old_code: *Code) !void {
try ira.irb.code.basic_block_list.append(ira.irb.current_basic_block);
ira.instruction_index += 1;
while (ira.instruction_index < ira.parent_basic_block.instruction_list.len) {
const next_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
if (!next_instruction.is_generated) {
try ira.addCompileError(next_instruction.span, "unreachable code");
break;
}
ira.instruction_index += 1;
}
ira.old_bb_index += 1;
var need_repeat = true;
while (true) {
while (ira.old_bb_index < old_code.basic_block_list.len) {
const old_bb = old_code.basic_block_list.at(ira.old_bb_index);
const new_bb = old_bb.child orelse {
ira.old_bb_index += 1;
continue;
};
if (new_bb.instruction_list.len != 0) {
ira.old_bb_index += 1;
continue;
}
ira.irb.current_basic_block = new_bb;
ira.startBasicBlock(old_bb, null);
return;
}
if (!need_repeat)
return;
need_repeat = false;
ira.old_bb_index = 0;
continue;
}
}
fn addCompileError(self: *Analyze, span: Span, comptime fmt: []const u8, args: ...) !void {
return self.irb.module.addCompileError(self.irb.parsed_file, span, fmt, args);
}
fn resolvePeerTypes(self: *Analyze, expected_type: ?*Type, peers: []const *Instruction) Analyze.Error!*Type {
// TODO actual implementation
return &Type.Void.get(self.irb.module).base;
}
fn implicitCast(self: *Analyze, target: *Instruction, optional_dest_type: ?*Type) Analyze.Error!*Instruction {
const dest_type = optional_dest_type orelse return target;
@panic("TODO implicitCast");
}
fn getCompTimeValOrNullUndefOk(self: *Analyze, target: *Instruction) ?*Value {
@panic("TODO getCompTimeValOrNullUndefOk");
}
fn getCompTimeRef(
self: *Analyze,
value: *Value,
ptr_mut: Value.Ptr.Mut,
mut: Type.Pointer.Mut,
volatility: Type.Pointer.Vol,
ptr_align: u32,
) Analyze.Error!*Instruction {
@panic("TODO getCompTimeRef");
}
};
2018-07-13 18:56:38 -07:00
pub async fn gen(
module: *Module,
body_node: *ast.Node,
scope: *Scope,
end_span: Span,
parsed_file: *ParsedFile,
) !*Code {
var irb = try Builder.init(module, parsed_file);
errdefer irb.abort();
const entry_block = try irb.createBasicBlock(scope, "Entry");
entry_block.ref(); // Entry block gets a reference because we enter it to begin.
try irb.setCursorAtEndAndAppendBlock(entry_block);
const result = try irb.genNode(body_node, scope, LVal.None);
if (!result.isNoReturn()) {
2018-07-13 18:56:38 -07:00
_ = irb.buildGen(
Instruction.AddImplicitReturnType,
scope,
end_span,
Instruction.AddImplicitReturnType.Params{ .target = result },
);
_ = irb.buildGen(
Instruction.Return,
scope,
end_span,
Instruction.Return.Params{ .return_value = result },
);
}
return irb.finish();
}
2018-07-13 18:56:38 -07:00
pub async fn analyze(module: *Module, parsed_file: *ParsedFile, old_code: *Code, expected_type: ?*Type) !*Code {
var ira = try Analyze.init(module, parsed_file, expected_type);
errdefer ira.abort();
const old_entry_bb = old_code.basic_block_list.at(0);
const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null);
new_entry_bb.ref();
ira.irb.current_basic_block = new_entry_bb;
ira.startBasicBlock(old_entry_bb, null);
while (ira.old_bb_index < old_code.basic_block_list.len) {
const old_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
if (old_instruction.ref_count == 0 and !old_instruction.hasSideEffects()) {
ira.instruction_index += 1;
continue;
}
const return_inst = try old_instruction.analyze(&ira);
// Note: if we ever modify the above to handle error.CompileError by continuing analysis,
// then here we want to check if ira.isCompTime() and return early if true
if (return_inst.isNoReturn()) {
try ira.finishBasicBlock(old_code);
continue;
}
ira.instruction_index += 1;
}
if (ira.src_implicit_return_type_list.len == 0) {
ira.irb.code.return_type = &Type.NoReturn.get(module).base;
return ira.irb.finish();
}
ira.irb.code.return_type = try ira.resolvePeerTypes(expected_type, ira.src_implicit_return_type_list.toSliceConst());
return ira.irb.finish();
}