const std = @import("std"); const builtin = @import("builtin"); const Compilation = @import("compilation.zig").Compilation; const Scope = @import("scope.zig").Scope; const ast = std.zig.ast; const Allocator = std.mem.Allocator; const Value = @import("value.zig").Value; const Type = Value.Type; const assert = std.debug.assert; const Token = std.zig.Token; const ParsedFile = @import("parsed_file.zig").ParsedFile; const Span = @import("errmsg.zig").Span; const llvm = @import("llvm.zig"); const ObjectFile = @import("codegen.zig").ObjectFile; pub const LVal = enum { None, Ptr, }; pub const IrVal = union(enum) { Unknown, KnownType: *Type, KnownValue: *Value, const Init = enum { Unknown, NoReturn, Void, }; pub fn dump(self: IrVal) void { switch (self) { IrVal.Unknown => typeof.dump(), IrVal.KnownType => |typeof| { std.debug.warn("KnownType("); typeof.dump(); std.debug.warn(")"); }, IrVal.KnownValue => |value| { std.debug.warn("KnownValue("); value.dump(); std.debug.warn(")"); }, } } }; pub const Instruction = struct { id: Id, scope: *Scope, debug_id: usize, val: IrVal, ref_count: usize, span: Span, owner_bb: *BasicBlock, /// true if this instruction was generated by zig and not from user code is_generated: bool, /// the instruction that is derived from this one in analysis child: ?*Instruction, /// the instruction that this one derives from in analysis parent: ?*Instruction, /// populated durign codegen llvm_value: ?llvm.ValueRef, pub fn cast(base: *Instruction, comptime T: type) ?*T { if (base.id == comptime typeToId(T)) { return @fieldParentPtr(T, "base", base); } return null; } pub fn typeToId(comptime T: type) Id { comptime var i = 0; inline while (i < @memberCount(Id)) : (i += 1) { if (T == @field(Instruction, @memberName(Id, i))) { return @field(Id, @memberName(Id, i)); } } unreachable; } pub fn dump(base: *const Instruction) void { comptime var i = 0; inline while (i < @memberCount(Id)) : (i += 1) { if (base.id == @field(Id, @memberName(Id, i))) { const T = @field(Instruction, @memberName(Id, i)); std.debug.warn("#{} = {}(", base.debug_id, @tagName(base.id)); @fieldParentPtr(T, "base", base).dump(); std.debug.warn(")"); return; } } unreachable; } pub fn hasSideEffects(base: *const Instruction) bool { comptime var i = 0; inline while (i < @memberCount(Id)) : (i += 1) { if (base.id == @field(Id, @memberName(Id, i))) { const T = @field(Instruction, @memberName(Id, i)); return @fieldParentPtr(T, "base", base).hasSideEffects(); } } unreachable; } pub fn analyze(base: *Instruction, ira: *Analyze) Analyze.Error!*Instruction { comptime var i = 0; inline while (i < @memberCount(Id)) : (i += 1) { if (base.id == @field(Id, @memberName(Id, i))) { const T = @field(Instruction, @memberName(Id, i)); return @fieldParentPtr(T, "base", base).analyze(ira); } } unreachable; } pub fn render(base: *Instruction, ofile: *ObjectFile, fn_val: *Value.Fn) (error{OutOfMemory}!?llvm.ValueRef) { switch (base.id) { Id.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val), Id.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val), Id.Ref => @panic("TODO"), Id.DeclVar => @panic("TODO"), Id.CheckVoidStmt => @panic("TODO"), Id.Phi => @panic("TODO"), Id.Br => @panic("TODO"), Id.AddImplicitReturnType => @panic("TODO"), } } fn ref(base: *Instruction, builder: *Builder) void { base.ref_count += 1; if (base.owner_bb != builder.current_basic_block and !base.isCompTime()) { base.owner_bb.ref(); } } fn getAsParam(param: *Instruction) !*Instruction { const child = param.child orelse return error.SemanticAnalysisFailed; switch (child.val) { IrVal.Unknown => return error.SemanticAnalysisFailed, else => return child, } } /// asserts that the type is known fn getKnownType(self: *Instruction) *Type { switch (self.val) { IrVal.KnownType => |typeof| return typeof, IrVal.KnownValue => |value| return value.typeof, IrVal.Unknown => unreachable, } } pub fn setGenerated(base: *Instruction) void { base.is_generated = true; } pub fn isNoReturn(base: *const Instruction) bool { switch (base.val) { IrVal.Unknown => return false, IrVal.KnownValue => |x| return x.typeof.id == Type.Id.NoReturn, IrVal.KnownType => |typeof| return typeof.id == Type.Id.NoReturn, } } pub fn isCompTime(base: *const Instruction) bool { return base.val == IrVal.KnownValue; } pub fn linkToParent(self: *Instruction, parent: *Instruction) void { assert(self.parent == null); assert(parent.child == null); self.parent = parent; parent.child = self; } pub const Id = enum { Return, Const, Ref, DeclVar, CheckVoidStmt, Phi, Br, AddImplicitReturnType, }; pub const Const = struct { base: Instruction, params: Params, const Params = struct {}; // Use Builder.buildConst* methods, or, after building a Const instruction, // manually set the ir_val field. const ir_val_init = IrVal.Init.Unknown; pub fn dump(self: *const Const) void { self.base.val.KnownValue.dump(); } pub fn hasSideEffects(self: *const Const) bool { return false; } pub fn analyze(self: *const Const, ira: *Analyze) !*Instruction { const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{}); new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() }; return new_inst; } pub fn render(self: *Const, ofile: *ObjectFile, fn_val: *Value.Fn) !?llvm.ValueRef { return self.base.val.KnownValue.getLlvmConst(ofile); } }; pub const Return = struct { base: Instruction, params: Params, const Params = struct { return_value: *Instruction, }; const ir_val_init = IrVal.Init.NoReturn; pub fn dump(self: *const Return) void { std.debug.warn("#{}", self.params.return_value.debug_id); } pub fn hasSideEffects(self: *const Return) bool { return true; } pub fn analyze(self: *const Return, ira: *Analyze) !*Instruction { const value = try self.params.return_value.getAsParam(); const casted_value = try ira.implicitCast(value, ira.explicit_return_type); // TODO detect returning local variable address return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value }); } pub fn render(self: *Return, ofile: *ObjectFile, fn_val: *Value.Fn) ?llvm.ValueRef { const value = self.params.return_value.llvm_value; const return_type = self.params.return_value.getKnownType(); if (return_type.handleIsPtr()) { @panic("TODO"); } else { _ = llvm.BuildRet(ofile.builder, value); } return null; } }; pub const Ref = struct { base: Instruction, params: Params, const Params = struct { target: *Instruction, mut: Type.Pointer.Mut, volatility: Type.Pointer.Vol, }; const ir_val_init = IrVal.Init.Unknown; pub fn dump(inst: *const Ref) void {} pub fn hasSideEffects(inst: *const Ref) bool { return false; } pub fn analyze(self: *const Ref, ira: *Analyze) !*Instruction { const target = try self.params.target.getAsParam(); if (ira.getCompTimeValOrNullUndefOk(target)) |val| { return ira.getCompTimeRef( val, Value.Ptr.Mut.CompTimeConst, self.params.mut, self.params.volatility, val.typeof.getAbiAlignment(ira.irb.comp), ); } const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{ .target = target, .mut = self.params.mut, .volatility = self.params.volatility, }); const elem_type = target.getKnownType(); const ptr_type = Type.Pointer.get( ira.irb.comp, elem_type, self.params.mut, self.params.volatility, Type.Pointer.Size.One, elem_type.getAbiAlignment(ira.irb.comp), ); // TODO: potentially set the hint that this is a stack pointer. But it might not be - this // could be a ref of a global, for example new_inst.val = IrVal{ .KnownType = &ptr_type.base }; // TODO potentially add an alloca entry here return new_inst; } }; pub const DeclVar = struct { base: Instruction, params: Params, const Params = struct { variable: *Variable, }; const ir_val_init = IrVal.Init.Unknown; pub fn dump(inst: *const DeclVar) void {} pub fn hasSideEffects(inst: *const DeclVar) bool { return true; } pub fn analyze(self: *const DeclVar, ira: *Analyze) !*Instruction { return error.Unimplemented; // TODO } }; pub const CheckVoidStmt = struct { base: Instruction, params: Params, const Params = struct { target: *Instruction, }; const ir_val_init = IrVal.Init.Unknown; pub fn dump(inst: *const CheckVoidStmt) void {} pub fn hasSideEffects(inst: *const CheckVoidStmt) bool { return true; } pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Instruction { return error.Unimplemented; // TODO } }; pub const Phi = struct { base: Instruction, params: Params, const Params = struct { incoming_blocks: []*BasicBlock, incoming_values: []*Instruction, }; const ir_val_init = IrVal.Init.Unknown; pub fn dump(inst: *const Phi) void {} pub fn hasSideEffects(inst: *const Phi) bool { return false; } pub fn analyze(self: *const Phi, ira: *Analyze) !*Instruction { return error.Unimplemented; // TODO } }; pub const Br = struct { base: Instruction, params: Params, const Params = struct { dest_block: *BasicBlock, is_comptime: *Instruction, }; const ir_val_init = IrVal.Init.NoReturn; pub fn dump(inst: *const Br) void {} pub fn hasSideEffects(inst: *const Br) bool { return true; } pub fn analyze(self: *const Br, ira: *Analyze) !*Instruction { return error.Unimplemented; // TODO } }; pub const AddImplicitReturnType = struct { base: Instruction, params: Params, pub const Params = struct { target: *Instruction, }; const ir_val_init = IrVal.Init.Unknown; pub fn dump(inst: *const AddImplicitReturnType) void { std.debug.warn("#{}", inst.params.target.debug_id); } pub fn hasSideEffects(inst: *const AddImplicitReturnType) bool { return true; } pub fn analyze(self: *const AddImplicitReturnType, ira: *Analyze) !*Instruction { const target = try self.params.target.getAsParam(); try ira.src_implicit_return_type_list.append(target); return ira.irb.buildConstVoid(self.base.scope, self.base.span, true); } }; }; pub const Variable = struct { child_scope: *Scope, }; pub const BasicBlock = struct { ref_count: usize, name_hint: [*]const u8, // must be a C string literal debug_id: usize, scope: *Scope, instruction_list: std.ArrayList(*Instruction), ref_instruction: ?*Instruction, /// for codegen llvm_block: llvm.BasicBlockRef, llvm_exit_block: llvm.BasicBlockRef, /// the basic block that is derived from this one in analysis child: ?*BasicBlock, /// the basic block that this one derives from in analysis parent: ?*BasicBlock, pub fn ref(self: *BasicBlock) void { self.ref_count += 1; } pub fn linkToParent(self: *BasicBlock, parent: *BasicBlock) void { assert(self.parent == null); assert(parent.child == null); self.parent = parent; parent.child = self; } }; /// Stuff that survives longer than Builder pub const Code = struct { basic_block_list: std.ArrayList(*BasicBlock), arena: std.heap.ArenaAllocator, return_type: ?*Type, /// allocator is comp.gpa() pub fn destroy(self: *Code, allocator: *Allocator) void { self.arena.deinit(); allocator.destroy(self); } pub fn dump(self: *Code) void { var bb_i: usize = 0; for (self.basic_block_list.toSliceConst()) |bb| { std.debug.warn("{s}_{}:\n", bb.name_hint, bb.debug_id); for (bb.instruction_list.toSliceConst()) |instr| { std.debug.warn(" "); instr.dump(); std.debug.warn("\n"); } } } }; pub const Builder = struct { comp: *Compilation, code: *Code, current_basic_block: *BasicBlock, next_debug_id: usize, parsed_file: *ParsedFile, is_comptime: bool, pub const Error = Analyze.Error; pub fn init(comp: *Compilation, parsed_file: *ParsedFile) !Builder { const code = try comp.gpa().create(Code{ .basic_block_list = undefined, .arena = std.heap.ArenaAllocator.init(comp.gpa()), .return_type = null, }); code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator); errdefer code.destroy(comp.gpa()); return Builder{ .comp = comp, .parsed_file = parsed_file, .current_basic_block = undefined, .code = code, .next_debug_id = 0, .is_comptime = false, }; } pub fn abort(self: *Builder) void { self.code.destroy(self.comp.gpa()); } /// Call code.destroy() when done pub fn finish(self: *Builder) *Code { return self.code; } /// No need to clean up resources thanks to the arena allocator. pub fn createBasicBlock(self: *Builder, scope: *Scope, name_hint: [*]const u8) !*BasicBlock { const basic_block = try self.arena().create(BasicBlock{ .ref_count = 0, .name_hint = name_hint, .debug_id = self.next_debug_id, .scope = scope, .instruction_list = std.ArrayList(*Instruction).init(self.arena()), .child = null, .parent = null, .ref_instruction = null, .llvm_block = undefined, .llvm_exit_block = undefined, }); self.next_debug_id += 1; return basic_block; } pub fn setCursorAtEndAndAppendBlock(self: *Builder, basic_block: *BasicBlock) !void { try self.code.basic_block_list.append(basic_block); self.setCursorAtEnd(basic_block); } pub fn setCursorAtEnd(self: *Builder, basic_block: *BasicBlock) void { self.current_basic_block = basic_block; } pub fn genNode(irb: *Builder, node: *ast.Node, scope: *Scope, lval: LVal) Error!*Instruction { switch (node.id) { ast.Node.Id.Root => unreachable, ast.Node.Id.Use => unreachable, ast.Node.Id.TestDecl => unreachable, ast.Node.Id.VarDecl => @panic("TODO"), ast.Node.Id.Defer => @panic("TODO"), ast.Node.Id.InfixOp => @panic("TODO"), ast.Node.Id.PrefixOp => @panic("TODO"), ast.Node.Id.SuffixOp => @panic("TODO"), ast.Node.Id.Switch => @panic("TODO"), ast.Node.Id.While => @panic("TODO"), ast.Node.Id.For => @panic("TODO"), ast.Node.Id.If => @panic("TODO"), ast.Node.Id.ControlFlowExpression => return error.Unimplemented, ast.Node.Id.Suspend => @panic("TODO"), ast.Node.Id.VarType => @panic("TODO"), ast.Node.Id.ErrorType => @panic("TODO"), ast.Node.Id.FnProto => @panic("TODO"), ast.Node.Id.PromiseType => @panic("TODO"), ast.Node.Id.IntegerLiteral => @panic("TODO"), ast.Node.Id.FloatLiteral => @panic("TODO"), ast.Node.Id.StringLiteral => @panic("TODO"), ast.Node.Id.MultilineStringLiteral => @panic("TODO"), ast.Node.Id.CharLiteral => @panic("TODO"), ast.Node.Id.BoolLiteral => @panic("TODO"), ast.Node.Id.NullLiteral => @panic("TODO"), ast.Node.Id.UndefinedLiteral => @panic("TODO"), ast.Node.Id.ThisLiteral => @panic("TODO"), ast.Node.Id.Unreachable => @panic("TODO"), ast.Node.Id.Identifier => @panic("TODO"), ast.Node.Id.GroupedExpression => { const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", node); return irb.genNode(grouped_expr.expr, scope, lval); }, ast.Node.Id.BuiltinCall => @panic("TODO"), ast.Node.Id.ErrorSetDecl => @panic("TODO"), ast.Node.Id.ContainerDecl => @panic("TODO"), ast.Node.Id.Asm => @panic("TODO"), ast.Node.Id.Comptime => @panic("TODO"), ast.Node.Id.Block => { const block = @fieldParentPtr(ast.Node.Block, "base", node); return irb.lvalWrap(scope, try irb.genBlock(block, scope), lval); }, ast.Node.Id.DocComment => @panic("TODO"), ast.Node.Id.SwitchCase => @panic("TODO"), ast.Node.Id.SwitchElse => @panic("TODO"), ast.Node.Id.Else => @panic("TODO"), ast.Node.Id.Payload => @panic("TODO"), ast.Node.Id.PointerPayload => @panic("TODO"), ast.Node.Id.PointerIndexPayload => @panic("TODO"), ast.Node.Id.StructField => @panic("TODO"), ast.Node.Id.UnionTag => @panic("TODO"), ast.Node.Id.EnumTag => @panic("TODO"), ast.Node.Id.ErrorTag => @panic("TODO"), ast.Node.Id.AsmInput => @panic("TODO"), ast.Node.Id.AsmOutput => @panic("TODO"), ast.Node.Id.AsyncAttribute => @panic("TODO"), ast.Node.Id.ParamDecl => @panic("TODO"), ast.Node.Id.FieldInitializer => @panic("TODO"), } } fn isCompTime(irb: *Builder, target_scope: *Scope) bool { if (irb.is_comptime) return true; var scope = target_scope; while (true) { switch (scope.id) { Scope.Id.CompTime => return true, Scope.Id.FnDef => return false, Scope.Id.Decls => unreachable, Scope.Id.Block, Scope.Id.Defer, Scope.Id.DeferExpr, => scope = scope.parent orelse return false, } } } pub fn genBlock(irb: *Builder, block: *ast.Node.Block, parent_scope: *Scope) !*Instruction { const block_scope = try Scope.Block.create(irb.comp, parent_scope); const outer_block_scope = &block_scope.base; var child_scope = outer_block_scope; if (parent_scope.findFnDef()) |fndef_scope| { if (fndef_scope.fn_val.child_scope == parent_scope) { fndef_scope.fn_val.block_scope = block_scope; } } if (block.statements.len == 0) { // {} return irb.buildConstVoid(child_scope, Span.token(block.lbrace), false); } if (block.label) |label| { block_scope.incoming_values = std.ArrayList(*Instruction).init(irb.arena()); block_scope.incoming_blocks = std.ArrayList(*BasicBlock).init(irb.arena()); block_scope.end_block = try irb.createBasicBlock(parent_scope, c"BlockEnd"); block_scope.is_comptime = try irb.buildConstBool( parent_scope, Span.token(block.lbrace), irb.isCompTime(parent_scope), ); } var is_continuation_unreachable = false; var noreturn_return_value: ?*Instruction = null; var stmt_it = block.statements.iterator(0); while (stmt_it.next()) |statement_node_ptr| { const statement_node = statement_node_ptr.*; if (statement_node.cast(ast.Node.Defer)) |defer_node| { // defer starts a new scope const defer_token = irb.parsed_file.tree.tokens.at(defer_node.defer_token); const kind = switch (defer_token.id) { Token.Id.Keyword_defer => Scope.Defer.Kind.ScopeExit, Token.Id.Keyword_errdefer => Scope.Defer.Kind.ErrorExit, else => unreachable, }; const defer_expr_scope = try Scope.DeferExpr.create(irb.comp, parent_scope, defer_node.expr); const defer_child_scope = try Scope.Defer.create(irb.comp, parent_scope, kind, defer_expr_scope); child_scope = &defer_child_scope.base; continue; } const statement_value = try irb.genNode(statement_node, child_scope, LVal.None); is_continuation_unreachable = statement_value.isNoReturn(); if (is_continuation_unreachable) { // keep the last noreturn statement value around in case we need to return it noreturn_return_value = statement_value; } if (statement_value.cast(Instruction.DeclVar)) |decl_var| { // variable declarations start a new scope child_scope = decl_var.params.variable.child_scope; } else if (!is_continuation_unreachable) { // this statement's value must be void _ = irb.build( Instruction.CheckVoidStmt, child_scope, statement_value.span, Instruction.CheckVoidStmt.Params{ .target = statement_value }, ); } } if (is_continuation_unreachable) { assert(noreturn_return_value != null); if (block.label == null or block_scope.incoming_blocks.len == 0) { return noreturn_return_value.?; } try irb.setCursorAtEndAndAppendBlock(block_scope.end_block); return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{ .incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(), .incoming_values = block_scope.incoming_values.toOwnedSlice(), }); } if (block.label) |label| { try block_scope.incoming_blocks.append(irb.current_basic_block); try block_scope.incoming_values.append( try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true), ); _ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit); _ = try irb.buildGen(Instruction.Br, parent_scope, Span.token(block.rbrace), Instruction.Br.Params{ .dest_block = block_scope.end_block, .is_comptime = block_scope.is_comptime, }); try irb.setCursorAtEndAndAppendBlock(block_scope.end_block); return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{ .incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(), .incoming_values = block_scope.incoming_values.toOwnedSlice(), }); } _ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit); return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true); } fn genDefersForBlock( irb: *Builder, inner_scope: *Scope, outer_scope: *Scope, gen_kind: Scope.Defer.Kind, ) !bool { var scope = inner_scope; var is_noreturn = false; while (true) { switch (scope.id) { Scope.Id.Defer => { const defer_scope = @fieldParentPtr(Scope.Defer, "base", scope); const generate = switch (defer_scope.kind) { Scope.Defer.Kind.ScopeExit => true, Scope.Defer.Kind.ErrorExit => gen_kind == Scope.Defer.Kind.ErrorExit, }; if (generate) { const defer_expr_scope = defer_scope.defer_expr_scope; const instruction = try irb.genNode( defer_expr_scope.expr_node, &defer_expr_scope.base, LVal.None, ); if (instruction.isNoReturn()) { is_noreturn = true; } else { _ = try irb.build( Instruction.CheckVoidStmt, &defer_expr_scope.base, Span.token(defer_expr_scope.expr_node.lastToken()), Instruction.CheckVoidStmt.Params{ .target = instruction }, ); } } }, Scope.Id.FnDef, Scope.Id.Decls, => return is_noreturn, Scope.Id.CompTime, Scope.Id.Block, => scope = scope.parent orelse return is_noreturn, Scope.Id.DeferExpr => unreachable, } } } pub fn lvalWrap(irb: *Builder, scope: *Scope, instruction: *Instruction, lval: LVal) !*Instruction { switch (lval) { LVal.None => return instruction, LVal.Ptr => { // We needed a pointer to a value, but we got a value. So we create // an instruction which just makes a const pointer of it. return irb.build(Instruction.Ref, scope, instruction.span, Instruction.Ref.Params{ .target = instruction, .mut = Type.Pointer.Mut.Const, .volatility = Type.Pointer.Vol.Non, }); }, } } fn arena(self: *Builder) *Allocator { return &self.code.arena.allocator; } fn buildExtra( self: *Builder, comptime I: type, scope: *Scope, span: Span, params: I.Params, is_generated: bool, ) !*Instruction { const inst = try self.arena().create(I{ .base = Instruction{ .id = Instruction.typeToId(I), .is_generated = is_generated, .scope = scope, .debug_id = self.next_debug_id, .val = switch (I.ir_val_init) { IrVal.Init.Unknown => IrVal.Unknown, IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.comp).base }, IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.comp).base }, }, .ref_count = 0, .span = span, .child = null, .parent = null, .llvm_value = undefined, .owner_bb = self.current_basic_block, }, .params = params, }); // Look at the params and ref() other instructions comptime var i = 0; inline while (i < @memberCount(I.Params)) : (i += 1) { const FieldType = comptime @typeOf(@field(I.Params(undefined), @memberName(I.Params, i))); switch (FieldType) { *Instruction => @field(inst.params, @memberName(I.Params, i)).ref(self), ?*Instruction => if (@field(inst.params, @memberName(I.Params, i))) |other| other.ref(self), else => {}, } } self.next_debug_id += 1; try self.current_basic_block.instruction_list.append(&inst.base); return &inst.base; } fn build( self: *Builder, comptime I: type, scope: *Scope, span: Span, params: I.Params, ) !*Instruction { return self.buildExtra(I, scope, span, params, false); } fn buildGen( self: *Builder, comptime I: type, scope: *Scope, span: Span, params: I.Params, ) !*Instruction { return self.buildExtra(I, scope, span, params, true); } fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Instruction { const inst = try self.build(Instruction.Const, scope, span, Instruction.Const.Params{}); inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.comp, x).base }; return inst; } fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Instruction { const inst = try self.buildExtra(Instruction.Const, scope, span, Instruction.Const.Params{}, is_generated); inst.val = IrVal{ .KnownValue = &Value.Void.get(self.comp).base }; return inst; } }; const Analyze = struct { irb: Builder, old_bb_index: usize, const_predecessor_bb: ?*BasicBlock, parent_basic_block: *BasicBlock, instruction_index: usize, src_implicit_return_type_list: std.ArrayList(*Instruction), explicit_return_type: ?*Type, pub const Error = error{ /// This is only for when we have already reported a compile error. It is the poison value. SemanticAnalysisFailed, /// This is a placeholder - it is useful to use instead of panicking but once the compiler is /// done this error code will be removed. Unimplemented, OutOfMemory, }; pub fn init(comp: *Compilation, parsed_file: *ParsedFile, explicit_return_type: ?*Type) !Analyze { var irb = try Builder.init(comp, parsed_file); errdefer irb.abort(); return Analyze{ .irb = irb, .old_bb_index = 0, .const_predecessor_bb = null, .parent_basic_block = undefined, // initialized with startBasicBlock .instruction_index = undefined, // initialized with startBasicBlock .src_implicit_return_type_list = std.ArrayList(*Instruction).init(irb.arena()), .explicit_return_type = explicit_return_type, }; } pub fn abort(self: *Analyze) void { self.irb.abort(); } pub fn getNewBasicBlock(self: *Analyze, old_bb: *BasicBlock, ref_old_instruction: ?*Instruction) !*BasicBlock { if (old_bb.child) |child| { if (ref_old_instruction == null or child.ref_instruction != ref_old_instruction) return child; } const new_bb = try self.irb.createBasicBlock(old_bb.scope, old_bb.name_hint); new_bb.linkToParent(old_bb); new_bb.ref_instruction = ref_old_instruction; return new_bb; } pub fn startBasicBlock(self: *Analyze, old_bb: *BasicBlock, const_predecessor_bb: ?*BasicBlock) void { self.instruction_index = 0; self.parent_basic_block = old_bb; self.const_predecessor_bb = const_predecessor_bb; } pub fn finishBasicBlock(ira: *Analyze, old_code: *Code) !void { try ira.irb.code.basic_block_list.append(ira.irb.current_basic_block); ira.instruction_index += 1; while (ira.instruction_index < ira.parent_basic_block.instruction_list.len) { const next_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index); if (!next_instruction.is_generated) { try ira.addCompileError(next_instruction.span, "unreachable code"); break; } ira.instruction_index += 1; } ira.old_bb_index += 1; var need_repeat = true; while (true) { while (ira.old_bb_index < old_code.basic_block_list.len) { const old_bb = old_code.basic_block_list.at(ira.old_bb_index); const new_bb = old_bb.child orelse { ira.old_bb_index += 1; continue; }; if (new_bb.instruction_list.len != 0) { ira.old_bb_index += 1; continue; } ira.irb.current_basic_block = new_bb; ira.startBasicBlock(old_bb, null); return; } if (!need_repeat) return; need_repeat = false; ira.old_bb_index = 0; continue; } } fn addCompileError(self: *Analyze, span: Span, comptime fmt: []const u8, args: ...) !void { return self.irb.comp.addCompileError(self.irb.parsed_file, span, fmt, args); } fn resolvePeerTypes(self: *Analyze, expected_type: ?*Type, peers: []const *Instruction) Analyze.Error!*Type { // TODO actual implementation return &Type.Void.get(self.irb.comp).base; } fn implicitCast(self: *Analyze, target: *Instruction, optional_dest_type: ?*Type) Analyze.Error!*Instruction { const dest_type = optional_dest_type orelse return target; @panic("TODO implicitCast"); } fn getCompTimeValOrNullUndefOk(self: *Analyze, target: *Instruction) ?*Value { @panic("TODO getCompTimeValOrNullUndefOk"); } fn getCompTimeRef( self: *Analyze, value: *Value, ptr_mut: Value.Ptr.Mut, mut: Type.Pointer.Mut, volatility: Type.Pointer.Vol, ptr_align: u32, ) Analyze.Error!*Instruction { @panic("TODO getCompTimeRef"); } }; pub async fn gen( comp: *Compilation, body_node: *ast.Node, scope: *Scope, end_span: Span, parsed_file: *ParsedFile, ) !*Code { var irb = try Builder.init(comp, parsed_file); errdefer irb.abort(); const entry_block = try irb.createBasicBlock(scope, c"Entry"); entry_block.ref(); // Entry block gets a reference because we enter it to begin. try irb.setCursorAtEndAndAppendBlock(entry_block); const result = try irb.genNode(body_node, scope, LVal.None); if (!result.isNoReturn()) { _ = irb.buildGen( Instruction.AddImplicitReturnType, scope, end_span, Instruction.AddImplicitReturnType.Params{ .target = result }, ); _ = irb.buildGen( Instruction.Return, scope, end_span, Instruction.Return.Params{ .return_value = result }, ); } return irb.finish(); } pub async fn analyze(comp: *Compilation, parsed_file: *ParsedFile, old_code: *Code, expected_type: ?*Type) !*Code { var ira = try Analyze.init(comp, parsed_file, expected_type); errdefer ira.abort(); const old_entry_bb = old_code.basic_block_list.at(0); const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null); new_entry_bb.ref(); ira.irb.current_basic_block = new_entry_bb; ira.startBasicBlock(old_entry_bb, null); while (ira.old_bb_index < old_code.basic_block_list.len) { const old_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index); if (old_instruction.ref_count == 0 and !old_instruction.hasSideEffects()) { ira.instruction_index += 1; continue; } const return_inst = try old_instruction.analyze(&ira); return_inst.linkToParent(old_instruction); // Note: if we ever modify the above to handle error.CompileError by continuing analysis, // then here we want to check if ira.isCompTime() and return early if true if (return_inst.isNoReturn()) { try ira.finishBasicBlock(old_code); continue; } ira.instruction_index += 1; } if (ira.src_implicit_return_type_list.len == 0) { ira.irb.code.return_type = &Type.NoReturn.get(comp).base; return ira.irb.finish(); } ira.irb.code.return_type = try ira.resolvePeerTypes(expected_type, ira.src_implicit_return_type_list.toSliceConst()); return ira.irb.finish(); }