self-hosted: refactor ParsedFile out of existence
also we are successfully analyzing the return type of mainmaster
parent
1d85b588ea
commit
3908b4fdee
|
@ -21,7 +21,6 @@ const Scope = @import("scope.zig").Scope;
|
|||
const Decl = @import("decl.zig").Decl;
|
||||
const ir = @import("ir.zig");
|
||||
const Visib = @import("visib.zig").Visib;
|
||||
const ParsedFile = @import("parsed_file.zig").ParsedFile;
|
||||
const Value = @import("value.zig").Value;
|
||||
const Type = Value.Type;
|
||||
const Span = errmsg.Span;
|
||||
|
@ -470,6 +469,35 @@ pub const Compilation = struct {
|
|||
return comp;
|
||||
}
|
||||
|
||||
/// it does ref the result because it could be an arbitrary integer size
|
||||
pub fn getPrimitiveType(comp: *Compilation, name: []const u8) !?*Type {
|
||||
if (name.len >= 2) {
|
||||
switch (name[0]) {
|
||||
'i', 'u' => blk: {
|
||||
for (name[1..]) |byte|
|
||||
switch (byte) {
|
||||
'0'...'9' => {},
|
||||
else => break :blk,
|
||||
};
|
||||
const is_signed = name[0] == 'i';
|
||||
const bit_count = std.fmt.parseUnsigned(u32, name[1..], 10) catch |err| switch (err) {
|
||||
error.Overflow => return error.Overflow,
|
||||
error.InvalidCharacter => unreachable, // we just checked the characters above
|
||||
};
|
||||
@panic("get int type - need to make everything async");
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
if (comp.primitive_type_table.get(name)) |entry| {
|
||||
entry.value.base.ref();
|
||||
return entry.value;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn initTypes(comp: *Compilation) !void {
|
||||
comp.meta_type = try comp.arena().create(Type.MetaType{
|
||||
.base = Type{
|
||||
|
@ -671,82 +699,81 @@ pub const Compilation = struct {
|
|||
}
|
||||
|
||||
async fn compileAndLink(self: *Compilation) !void {
|
||||
const root_src_path = self.root_src_path orelse @panic("TODO handle null root src path");
|
||||
// TODO async/await os.path.real
|
||||
const root_src_real_path = os.path.real(self.gpa(), root_src_path) catch |err| {
|
||||
try printError("unable to get real path '{}': {}", root_src_path, err);
|
||||
return err;
|
||||
};
|
||||
errdefer self.gpa().free(root_src_real_path);
|
||||
if (self.root_src_path) |root_src_path| {
|
||||
// TODO async/await os.path.real
|
||||
const root_src_real_path = os.path.real(self.gpa(), root_src_path) catch |err| {
|
||||
try printError("unable to get real path '{}': {}", root_src_path, err);
|
||||
return err;
|
||||
};
|
||||
const root_scope = blk: {
|
||||
errdefer self.gpa().free(root_src_real_path);
|
||||
|
||||
// TODO async/await readFileAlloc()
|
||||
const source_code = io.readFileAlloc(self.gpa(), root_src_real_path) catch |err| {
|
||||
try printError("unable to open '{}': {}", root_src_real_path, err);
|
||||
return err;
|
||||
};
|
||||
errdefer self.gpa().free(source_code);
|
||||
// TODO async/await readFileAlloc()
|
||||
const source_code = io.readFileAlloc(self.gpa(), root_src_real_path) catch |err| {
|
||||
try printError("unable to open '{}': {}", root_src_real_path, err);
|
||||
return err;
|
||||
};
|
||||
errdefer self.gpa().free(source_code);
|
||||
|
||||
const parsed_file = try self.gpa().create(ParsedFile{
|
||||
.tree = undefined,
|
||||
.realpath = root_src_real_path,
|
||||
});
|
||||
errdefer self.gpa().destroy(parsed_file);
|
||||
var tree = try std.zig.parse(self.gpa(), source_code);
|
||||
errdefer tree.deinit();
|
||||
|
||||
parsed_file.tree = try std.zig.parse(self.gpa(), source_code);
|
||||
errdefer parsed_file.tree.deinit();
|
||||
break :blk try Scope.Root.create(self, tree, root_src_real_path);
|
||||
};
|
||||
defer root_scope.base.deref(self);
|
||||
|
||||
const tree = &parsed_file.tree;
|
||||
const tree = &root_scope.tree;
|
||||
|
||||
// create empty struct for it
|
||||
const decls = try Scope.Decls.create(self, null);
|
||||
defer decls.base.deref(self);
|
||||
const decls = try Scope.Decls.create(self, &root_scope.base);
|
||||
defer decls.base.deref(self);
|
||||
|
||||
var decl_group = event.Group(BuildError!void).init(self.loop);
|
||||
errdefer decl_group.cancelAll();
|
||||
var decl_group = event.Group(BuildError!void).init(self.loop);
|
||||
errdefer decl_group.cancelAll();
|
||||
|
||||
var it = tree.root_node.decls.iterator(0);
|
||||
while (it.next()) |decl_ptr| {
|
||||
const decl = decl_ptr.*;
|
||||
switch (decl.id) {
|
||||
ast.Node.Id.Comptime => {
|
||||
const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", decl);
|
||||
var it = tree.root_node.decls.iterator(0);
|
||||
while (it.next()) |decl_ptr| {
|
||||
const decl = decl_ptr.*;
|
||||
switch (decl.id) {
|
||||
ast.Node.Id.Comptime => {
|
||||
const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", decl);
|
||||
|
||||
try decl_group.call(addCompTimeBlock, self, parsed_file, &decls.base, comptime_node);
|
||||
},
|
||||
ast.Node.Id.VarDecl => @panic("TODO"),
|
||||
ast.Node.Id.FnProto => {
|
||||
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
|
||||
try decl_group.call(addCompTimeBlock, self, &decls.base, comptime_node);
|
||||
},
|
||||
ast.Node.Id.VarDecl => @panic("TODO"),
|
||||
ast.Node.Id.FnProto => {
|
||||
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
|
||||
|
||||
const name = if (fn_proto.name_token) |name_token| tree.tokenSlice(name_token) else {
|
||||
try self.addCompileError(parsed_file, Span{
|
||||
.first = fn_proto.fn_token,
|
||||
.last = fn_proto.fn_token + 1,
|
||||
}, "missing function name");
|
||||
continue;
|
||||
};
|
||||
const name = if (fn_proto.name_token) |name_token| tree.tokenSlice(name_token) else {
|
||||
try self.addCompileError(root_scope, Span{
|
||||
.first = fn_proto.fn_token,
|
||||
.last = fn_proto.fn_token + 1,
|
||||
}, "missing function name");
|
||||
continue;
|
||||
};
|
||||
|
||||
const fn_decl = try self.gpa().create(Decl.Fn{
|
||||
.base = Decl{
|
||||
.id = Decl.Id.Fn,
|
||||
.name = name,
|
||||
.visib = parseVisibToken(tree, fn_proto.visib_token),
|
||||
.resolution = event.Future(BuildError!void).init(self.loop),
|
||||
.resolution_in_progress = 0,
|
||||
.parsed_file = parsed_file,
|
||||
.parent_scope = &decls.base,
|
||||
},
|
||||
.value = Decl.Fn.Val{ .Unresolved = {} },
|
||||
.fn_proto = fn_proto,
|
||||
});
|
||||
errdefer self.gpa().destroy(fn_decl);
|
||||
const fn_decl = try self.gpa().create(Decl.Fn{
|
||||
.base = Decl{
|
||||
.id = Decl.Id.Fn,
|
||||
.name = name,
|
||||
.visib = parseVisibToken(tree, fn_proto.visib_token),
|
||||
.resolution = event.Future(BuildError!void).init(self.loop),
|
||||
.resolution_in_progress = 0,
|
||||
.parent_scope = &decls.base,
|
||||
},
|
||||
.value = Decl.Fn.Val{ .Unresolved = {} },
|
||||
.fn_proto = fn_proto,
|
||||
});
|
||||
errdefer self.gpa().destroy(fn_decl);
|
||||
|
||||
try decl_group.call(addTopLevelDecl, self, &fn_decl.base);
|
||||
},
|
||||
ast.Node.Id.TestDecl => @panic("TODO"),
|
||||
else => unreachable,
|
||||
try decl_group.call(addTopLevelDecl, self, &fn_decl.base);
|
||||
},
|
||||
ast.Node.Id.TestDecl => @panic("TODO"),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
try await (async decl_group.wait() catch unreachable);
|
||||
}
|
||||
try await (async decl_group.wait() catch unreachable);
|
||||
|
||||
try await (async self.prelink_group.wait() catch unreachable);
|
||||
|
||||
const any_prelink_errors = blk: {
|
||||
|
@ -764,23 +791,15 @@ pub const Compilation = struct {
|
|||
/// caller takes ownership of resulting Code
|
||||
async fn genAndAnalyzeCode(
|
||||
comp: *Compilation,
|
||||
parsed_file: *ParsedFile,
|
||||
scope: *Scope,
|
||||
node: *ast.Node,
|
||||
expected_type: ?*Type,
|
||||
) !?*ir.Code {
|
||||
const unanalyzed_code = (await (async ir.gen(
|
||||
) !*ir.Code {
|
||||
const unanalyzed_code = try await (async ir.gen(
|
||||
comp,
|
||||
node,
|
||||
scope,
|
||||
parsed_file,
|
||||
) catch unreachable)) catch |err| switch (err) {
|
||||
// This poison value should not cause the errdefers to run. It simply means
|
||||
// that self.compile_errors is populated.
|
||||
// TODO https://github.com/ziglang/zig/issues/769
|
||||
error.SemanticAnalysisFailed => return null,
|
||||
else => return err,
|
||||
};
|
||||
) catch unreachable);
|
||||
defer unanalyzed_code.destroy(comp.gpa());
|
||||
|
||||
if (comp.verbose_ir) {
|
||||
|
@ -788,44 +807,46 @@ pub const Compilation = struct {
|
|||
unanalyzed_code.dump();
|
||||
}
|
||||
|
||||
const analyzed_code = (await (async ir.analyze(
|
||||
const analyzed_code = try await (async ir.analyze(
|
||||
comp,
|
||||
parsed_file,
|
||||
unanalyzed_code,
|
||||
expected_type,
|
||||
) catch unreachable)) catch |err| switch (err) {
|
||||
// This poison value should not cause the errdefers to run. It simply means
|
||||
// that self.compile_errors is populated.
|
||||
// TODO https://github.com/ziglang/zig/issues/769
|
||||
error.SemanticAnalysisFailed => return null,
|
||||
else => return err,
|
||||
};
|
||||
) catch unreachable);
|
||||
errdefer analyzed_code.destroy(comp.gpa());
|
||||
|
||||
if (comp.verbose_ir) {
|
||||
std.debug.warn("analyzed:\n");
|
||||
analyzed_code.dump();
|
||||
}
|
||||
|
||||
return analyzed_code;
|
||||
}
|
||||
|
||||
async fn addCompTimeBlock(
|
||||
comp: *Compilation,
|
||||
parsed_file: *ParsedFile,
|
||||
scope: *Scope,
|
||||
comptime_node: *ast.Node.Comptime,
|
||||
) !void {
|
||||
const void_type = Type.Void.get(comp);
|
||||
defer void_type.base.base.deref(comp);
|
||||
|
||||
const analyzed_code = (try await (async genAndAnalyzeCode(
|
||||
const analyzed_code = (await (async genAndAnalyzeCode(
|
||||
comp,
|
||||
parsed_file,
|
||||
scope,
|
||||
comptime_node.expr,
|
||||
&void_type.base,
|
||||
) catch unreachable)) orelse return;
|
||||
) catch unreachable)) catch |err| switch (err) {
|
||||
// This poison value should not cause the errdefers to run. It simply means
|
||||
// that comp.compile_errors is populated.
|
||||
error.SemanticAnalysisFailed => return {},
|
||||
else => return err,
|
||||
};
|
||||
analyzed_code.destroy(comp.gpa());
|
||||
}
|
||||
|
||||
async fn addTopLevelDecl(self: *Compilation, decl: *Decl) !void {
|
||||
const is_export = decl.isExported(&decl.parsed_file.tree);
|
||||
const tree = &decl.findRootScope().tree;
|
||||
const is_export = decl.isExported(tree);
|
||||
|
||||
if (is_export) {
|
||||
try self.prelink_group.call(verifyUniqueSymbol, self, decl);
|
||||
|
@ -833,24 +854,24 @@ pub const Compilation = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn addCompileError(self: *Compilation, parsed_file: *ParsedFile, span: Span, comptime fmt: []const u8, args: ...) !void {
|
||||
const text = try std.fmt.allocPrint(self.loop.allocator, fmt, args);
|
||||
errdefer self.loop.allocator.free(text);
|
||||
fn addCompileError(self: *Compilation, root: *Scope.Root, span: Span, comptime fmt: []const u8, args: ...) !void {
|
||||
const text = try std.fmt.allocPrint(self.gpa(), fmt, args);
|
||||
errdefer self.gpa().free(text);
|
||||
|
||||
try self.prelink_group.call(addCompileErrorAsync, self, parsed_file, span, text);
|
||||
try self.prelink_group.call(addCompileErrorAsync, self, root, span, text);
|
||||
}
|
||||
|
||||
async fn addCompileErrorAsync(
|
||||
self: *Compilation,
|
||||
parsed_file: *ParsedFile,
|
||||
root: *Scope.Root,
|
||||
span: Span,
|
||||
text: []u8,
|
||||
) !void {
|
||||
const msg = try self.loop.allocator.create(errmsg.Msg{
|
||||
.path = parsed_file.realpath,
|
||||
.path = root.realpath,
|
||||
.text = text,
|
||||
.span = span,
|
||||
.tree = &parsed_file.tree,
|
||||
.tree = &root.tree,
|
||||
});
|
||||
errdefer self.loop.allocator.destroy(msg);
|
||||
|
||||
|
@ -866,7 +887,7 @@ pub const Compilation = struct {
|
|||
|
||||
if (try exported_symbol_names.value.put(decl.name, decl)) |other_decl| {
|
||||
try self.addCompileError(
|
||||
decl.parsed_file,
|
||||
decl.findRootScope(),
|
||||
decl.getSpan(),
|
||||
"exported symbol collision: '{}'",
|
||||
decl.name,
|
||||
|
@ -988,6 +1009,24 @@ pub const Compilation = struct {
|
|||
fn registerGarbage(comp: *Compilation, comptime T: type, node: *std.atomic.Stack(*T).Node) void {
|
||||
// TODO put the garbage somewhere
|
||||
}
|
||||
|
||||
/// Returns a value which has been ref()'d once
|
||||
async fn analyzeConstValue(comp: *Compilation, scope: *Scope, node: *ast.Node, expected_type: *Type) !*Value {
|
||||
const analyzed_code = try await (async comp.genAndAnalyzeCode(scope, node, expected_type) catch unreachable);
|
||||
defer analyzed_code.destroy(comp.gpa());
|
||||
|
||||
return analyzed_code.getCompTimeResult(comp);
|
||||
}
|
||||
|
||||
async fn analyzeTypeExpr(comp: *Compilation, scope: *Scope, node: *ast.Node) !*Type {
|
||||
const meta_type = &Type.MetaType.get(comp).base;
|
||||
defer meta_type.base.deref(comp);
|
||||
|
||||
const result_val = try await (async comp.analyzeConstValue(scope, node, meta_type) catch unreachable);
|
||||
errdefer result_val.base.deref(comp);
|
||||
|
||||
return result_val.cast(Type).?;
|
||||
}
|
||||
};
|
||||
|
||||
fn printError(comptime format: []const u8, args: ...) !void {
|
||||
|
@ -1011,7 +1050,12 @@ fn parseVisibToken(tree: *ast.Tree, optional_token_index: ?ast.TokenIndex) Visib
|
|||
pub async fn resolveDecl(comp: *Compilation, decl: *Decl) !void {
|
||||
if (await (async decl.resolution.start() catch unreachable)) |ptr| return ptr.*;
|
||||
|
||||
decl.resolution.data = await (async generateDecl(comp, decl) catch unreachable);
|
||||
decl.resolution.data = (await (async generateDecl(comp, decl) catch unreachable)) catch |err| switch (err) {
|
||||
// This poison value should not cause the errdefers to run. It simply means
|
||||
// that comp.compile_errors is populated.
|
||||
error.SemanticAnalysisFailed => {},
|
||||
else => err,
|
||||
};
|
||||
decl.resolution.resolve();
|
||||
return decl.resolution.data;
|
||||
}
|
||||
|
@ -1034,9 +1078,12 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
|
|||
const fndef_scope = try Scope.FnDef.create(comp, fn_decl.base.parent_scope);
|
||||
defer fndef_scope.base.deref(comp);
|
||||
|
||||
// TODO actually look at the return type of the AST
|
||||
const return_type = &Type.Void.get(comp).base;
|
||||
defer return_type.base.deref(comp);
|
||||
const return_type_node = switch (fn_decl.fn_proto.return_type) {
|
||||
ast.Node.FnProto.ReturnType.Explicit => |n| n,
|
||||
ast.Node.FnProto.ReturnType.InferErrorSet => |n| n,
|
||||
};
|
||||
const return_type = try await (async comp.analyzeTypeExpr(&fndef_scope.base, return_type_node) catch unreachable);
|
||||
return_type.base.deref(comp);
|
||||
|
||||
const is_var_args = false;
|
||||
const params = ([*]Type.Fn.Param)(undefined)[0..0];
|
||||
|
@ -1050,19 +1097,13 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
|
|||
const fn_val = try Value.Fn.create(comp, fn_type, fndef_scope, symbol_name);
|
||||
fn_decl.value = Decl.Fn.Val{ .Ok = fn_val };
|
||||
|
||||
const analyzed_code = (try await (async comp.genAndAnalyzeCode(
|
||||
fn_decl.base.parsed_file,
|
||||
const analyzed_code = try await (async comp.genAndAnalyzeCode(
|
||||
&fndef_scope.base,
|
||||
body_node,
|
||||
return_type,
|
||||
) catch unreachable)) orelse return;
|
||||
) catch unreachable);
|
||||
errdefer analyzed_code.destroy(comp.gpa());
|
||||
|
||||
if (comp.verbose_ir) {
|
||||
std.debug.warn("analyzed:\n");
|
||||
analyzed_code.dump();
|
||||
}
|
||||
|
||||
// Kick off rendering to LLVM module, but it doesn't block the fn decl
|
||||
// analysis from being complete.
|
||||
try comp.prelink_group.call(codegen.renderToLlvm, comp, fn_val, analyzed_code);
|
||||
|
|
|
@ -3,7 +3,6 @@ const Allocator = mem.Allocator;
|
|||
const mem = std.mem;
|
||||
const ast = std.zig.ast;
|
||||
const Visib = @import("visib.zig").Visib;
|
||||
const ParsedFile = @import("parsed_file.zig").ParsedFile;
|
||||
const event = std.event;
|
||||
const Value = @import("value.zig").Value;
|
||||
const Token = std.zig.Token;
|
||||
|
@ -17,7 +16,6 @@ pub const Decl = struct {
|
|||
visib: Visib,
|
||||
resolution: event.Future(Compilation.BuildError!void),
|
||||
resolution_in_progress: u8,
|
||||
parsed_file: *ParsedFile,
|
||||
parent_scope: *Scope,
|
||||
|
||||
pub const Table = std.HashMap([]const u8, *Decl, mem.hash_slice_u8, mem.eql_slice_u8);
|
||||
|
@ -48,6 +46,10 @@ pub const Decl = struct {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn findRootScope(base: *const Decl) *Scope.Root {
|
||||
return base.parent_scope.findRoot();
|
||||
}
|
||||
|
||||
pub const Id = enum {
|
||||
Var,
|
||||
Fn,
|
||||
|
|
|
@ -8,7 +8,6 @@ const Value = @import("value.zig").Value;
|
|||
const Type = Value.Type;
|
||||
const assert = std.debug.assert;
|
||||
const Token = std.zig.Token;
|
||||
const ParsedFile = @import("parsed_file.zig").ParsedFile;
|
||||
const Span = @import("errmsg.zig").Span;
|
||||
const llvm = @import("llvm.zig");
|
||||
const ObjectFile = @import("codegen.zig").ObjectFile;
|
||||
|
@ -611,6 +610,33 @@ pub const Code = struct {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// returns a ref-incremented value, or adds a compile error
|
||||
pub fn getCompTimeResult(self: *Code, comp: *Compilation) !*Value {
|
||||
const bb = self.basic_block_list.at(0);
|
||||
for (bb.instruction_list.toSliceConst()) |inst| {
|
||||
if (inst.cast(Inst.Return)) |ret_inst| {
|
||||
const ret_value = ret_inst.params.return_value;
|
||||
if (ret_value.isCompTime()) {
|
||||
return ret_value.val.KnownValue.getRef();
|
||||
}
|
||||
try comp.addCompileError(
|
||||
ret_value.scope.findRoot(),
|
||||
ret_value.span,
|
||||
"unable to evaluate constant expression",
|
||||
);
|
||||
return error.SemanticAnalysisFailed;
|
||||
} else if (inst.hasSideEffects()) {
|
||||
try comp.addCompileError(
|
||||
inst.scope.findRoot(),
|
||||
inst.span,
|
||||
"unable to evaluate constant expression",
|
||||
);
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
}
|
||||
unreachable;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Builder = struct {
|
||||
|
@ -618,14 +644,14 @@ pub const Builder = struct {
|
|||
code: *Code,
|
||||
current_basic_block: *BasicBlock,
|
||||
next_debug_id: usize,
|
||||
parsed_file: *ParsedFile,
|
||||
root_scope: *Scope.Root,
|
||||
is_comptime: bool,
|
||||
is_async: bool,
|
||||
begin_scope: ?*Scope,
|
||||
|
||||
pub const Error = Analyze.Error;
|
||||
|
||||
pub fn init(comp: *Compilation, parsed_file: *ParsedFile, begin_scope: ?*Scope) !Builder {
|
||||
pub fn init(comp: *Compilation, root_scope: *Scope.Root, begin_scope: ?*Scope) !Builder {
|
||||
const code = try comp.gpa().create(Code{
|
||||
.basic_block_list = undefined,
|
||||
.arena = std.heap.ArenaAllocator.init(comp.gpa()),
|
||||
|
@ -636,7 +662,7 @@ pub const Builder = struct {
|
|||
|
||||
return Builder{
|
||||
.comp = comp,
|
||||
.parsed_file = parsed_file,
|
||||
.root_scope = root_scope,
|
||||
.current_basic_block = undefined,
|
||||
.code = code,
|
||||
.next_debug_id = 0,
|
||||
|
@ -718,7 +744,10 @@ pub const Builder = struct {
|
|||
ast.Node.Id.UndefinedLiteral => return error.Unimplemented,
|
||||
ast.Node.Id.ThisLiteral => return error.Unimplemented,
|
||||
ast.Node.Id.Unreachable => return error.Unimplemented,
|
||||
ast.Node.Id.Identifier => return error.Unimplemented,
|
||||
ast.Node.Id.Identifier => {
|
||||
const identifier = @fieldParentPtr(ast.Node.Identifier, "base", node);
|
||||
return irb.genIdentifier(identifier, scope, lval);
|
||||
},
|
||||
ast.Node.Id.GroupedExpression => {
|
||||
const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", node);
|
||||
return irb.genNode(grouped_expr.expr, scope, lval);
|
||||
|
@ -761,16 +790,17 @@ pub const Builder = struct {
|
|||
Scope.Id.CompTime => return true,
|
||||
Scope.Id.FnDef => return false,
|
||||
Scope.Id.Decls => unreachable,
|
||||
Scope.Id.Root => unreachable,
|
||||
Scope.Id.Block,
|
||||
Scope.Id.Defer,
|
||||
Scope.Id.DeferExpr,
|
||||
=> scope = scope.parent orelse return false,
|
||||
=> scope = scope.parent.?,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn genIntLit(irb: *Builder, int_lit: *ast.Node.IntegerLiteral, scope: *Scope) !*Inst {
|
||||
const int_token = irb.parsed_file.tree.tokenSlice(int_lit.token);
|
||||
const int_token = irb.root_scope.tree.tokenSlice(int_lit.token);
|
||||
|
||||
var base: u8 = undefined;
|
||||
var rest: []const u8 = undefined;
|
||||
|
@ -845,7 +875,7 @@ pub const Builder = struct {
|
|||
|
||||
if (statement_node.cast(ast.Node.Defer)) |defer_node| {
|
||||
// defer starts a new scope
|
||||
const defer_token = irb.parsed_file.tree.tokens.at(defer_node.defer_token);
|
||||
const defer_token = irb.root_scope.tree.tokens.at(defer_node.defer_token);
|
||||
const kind = switch (defer_token.id) {
|
||||
Token.Id.Keyword_defer => Scope.Defer.Kind.ScopeExit,
|
||||
Token.Id.Keyword_errdefer => Scope.Defer.Kind.ErrorExit,
|
||||
|
@ -928,7 +958,7 @@ pub const Builder = struct {
|
|||
const src_span = Span.token(control_flow_expr.ltoken);
|
||||
if (scope.findFnDef() == null) {
|
||||
try irb.comp.addCompileError(
|
||||
irb.parsed_file,
|
||||
irb.root_scope,
|
||||
src_span,
|
||||
"return expression outside function definition",
|
||||
);
|
||||
|
@ -938,7 +968,7 @@ pub const Builder = struct {
|
|||
if (scope.findDeferExpr()) |scope_defer_expr| {
|
||||
if (!scope_defer_expr.reported_err) {
|
||||
try irb.comp.addCompileError(
|
||||
irb.parsed_file,
|
||||
irb.root_scope,
|
||||
src_span,
|
||||
"cannot return from defer expression",
|
||||
);
|
||||
|
@ -1012,6 +1042,69 @@ pub const Builder = struct {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn genIdentifier(irb: *Builder, identifier: *ast.Node.Identifier, scope: *Scope, lval: LVal) !*Inst {
|
||||
const src_span = Span.token(identifier.token);
|
||||
const name = irb.root_scope.tree.tokenSlice(identifier.token);
|
||||
|
||||
//if (buf_eql_str(variable_name, "_") && lval == LValPtr) {
|
||||
// IrInstructionConst *const_instruction = ir_build_instruction<IrInstructionConst>(irb, scope, node);
|
||||
// const_instruction->base.value.type = get_pointer_to_type(irb->codegen,
|
||||
// irb->codegen->builtin_types.entry_void, false);
|
||||
// const_instruction->base.value.special = ConstValSpecialStatic;
|
||||
// const_instruction->base.value.data.x_ptr.special = ConstPtrSpecialDiscard;
|
||||
// return &const_instruction->base;
|
||||
//}
|
||||
|
||||
if (irb.comp.getPrimitiveType(name)) |result| {
|
||||
if (result) |primitive_type| {
|
||||
defer primitive_type.base.deref(irb.comp);
|
||||
switch (lval) {
|
||||
LVal.Ptr => return error.Unimplemented,
|
||||
LVal.None => return irb.buildConstValue(scope, src_span, &primitive_type.base),
|
||||
}
|
||||
}
|
||||
} else |err| switch (err) {
|
||||
error.Overflow => {
|
||||
try irb.comp.addCompileError(irb.root_scope, src_span, "integer too large");
|
||||
return error.SemanticAnalysisFailed;
|
||||
},
|
||||
}
|
||||
//TypeTableEntry *primitive_type = get_primitive_type(irb->codegen, variable_name);
|
||||
//if (primitive_type != nullptr) {
|
||||
// IrInstruction *value = ir_build_const_type(irb, scope, node, primitive_type);
|
||||
// if (lval == LValPtr) {
|
||||
// return ir_build_ref(irb, scope, node, value, false, false);
|
||||
// } else {
|
||||
// return value;
|
||||
// }
|
||||
//}
|
||||
|
||||
//VariableTableEntry *var = find_variable(irb->codegen, scope, variable_name);
|
||||
//if (var) {
|
||||
// IrInstruction *var_ptr = ir_build_var_ptr(irb, scope, node, var);
|
||||
// if (lval == LValPtr)
|
||||
// return var_ptr;
|
||||
// else
|
||||
// return ir_build_load_ptr(irb, scope, node, var_ptr);
|
||||
//}
|
||||
|
||||
//Tld *tld = find_decl(irb->codegen, scope, variable_name);
|
||||
//if (tld)
|
||||
// return ir_build_decl_ref(irb, scope, node, tld, lval);
|
||||
|
||||
//if (node->owner->any_imports_failed) {
|
||||
// // skip the error message since we had a failing import in this file
|
||||
// // if an import breaks we don't need redundant undeclared identifier errors
|
||||
// return irb->codegen->invalid_instruction;
|
||||
//}
|
||||
|
||||
// TODO put a variable of same name with invalid type in global scope
|
||||
// so that future references to this same name will find a variable with an invalid type
|
||||
|
||||
try irb.comp.addCompileError(irb.root_scope, src_span, "unknown identifier '{}'", name);
|
||||
return error.SemanticAnalysisFailed;
|
||||
}
|
||||
|
||||
const DeferCounts = struct {
|
||||
scope_exit: usize,
|
||||
error_exit: usize,
|
||||
|
@ -1035,10 +1128,11 @@ pub const Builder = struct {
|
|||
|
||||
Scope.Id.CompTime,
|
||||
Scope.Id.Block,
|
||||
Scope.Id.Decls,
|
||||
Scope.Id.Root,
|
||||
=> scope = scope.parent orelse break,
|
||||
|
||||
Scope.Id.DeferExpr => unreachable,
|
||||
Scope.Id.Decls => unreachable,
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -1081,6 +1175,7 @@ pub const Builder = struct {
|
|||
},
|
||||
Scope.Id.FnDef,
|
||||
Scope.Id.Decls,
|
||||
Scope.Id.Root,
|
||||
=> return is_noreturn,
|
||||
|
||||
Scope.Id.CompTime,
|
||||
|
@ -1188,6 +1283,12 @@ pub const Builder = struct {
|
|||
return inst;
|
||||
}
|
||||
|
||||
fn buildConstValue(self: *Builder, scope: *Scope, span: Span, v: *Value) !*Inst {
|
||||
const inst = try self.build(Inst.Const, scope, span, Inst.Const.Params{});
|
||||
inst.val = IrVal{ .KnownValue = v.getRef() };
|
||||
return inst;
|
||||
}
|
||||
|
||||
/// If the code is explicitly set to be comptime, then builds a const bool,
|
||||
/// otherwise builds a TestCompTime instruction.
|
||||
fn buildTestCompTime(self: *Builder, scope: *Scope, span: Span, target: *Inst) !*Inst {
|
||||
|
@ -1259,8 +1360,8 @@ const Analyze = struct {
|
|||
OutOfMemory,
|
||||
};
|
||||
|
||||
pub fn init(comp: *Compilation, parsed_file: *ParsedFile, explicit_return_type: ?*Type) !Analyze {
|
||||
var irb = try Builder.init(comp, parsed_file, null);
|
||||
pub fn init(comp: *Compilation, root_scope: *Scope.Root, explicit_return_type: ?*Type) !Analyze {
|
||||
var irb = try Builder.init(comp, root_scope, null);
|
||||
errdefer irb.abort();
|
||||
|
||||
return Analyze{
|
||||
|
@ -1338,7 +1439,7 @@ const Analyze = struct {
|
|||
}
|
||||
|
||||
fn addCompileError(self: *Analyze, span: Span, comptime fmt: []const u8, args: ...) !void {
|
||||
return self.irb.comp.addCompileError(self.irb.parsed_file, span, fmt, args);
|
||||
return self.irb.comp.addCompileError(self.irb.root_scope, span, fmt, args);
|
||||
}
|
||||
|
||||
fn resolvePeerTypes(self: *Analyze, expected_type: ?*Type, peers: []const *Inst) Analyze.Error!*Type {
|
||||
|
@ -1800,9 +1901,8 @@ pub async fn gen(
|
|||
comp: *Compilation,
|
||||
body_node: *ast.Node,
|
||||
scope: *Scope,
|
||||
parsed_file: *ParsedFile,
|
||||
) !*Code {
|
||||
var irb = try Builder.init(comp, parsed_file, scope);
|
||||
var irb = try Builder.init(comp, scope.findRoot(), scope);
|
||||
errdefer irb.abort();
|
||||
|
||||
const entry_block = try irb.createBasicBlock(scope, c"Entry");
|
||||
|
@ -1818,11 +1918,12 @@ pub async fn gen(
|
|||
return irb.finish();
|
||||
}
|
||||
|
||||
pub async fn analyze(comp: *Compilation, parsed_file: *ParsedFile, old_code: *Code, expected_type: ?*Type) !*Code {
|
||||
var ira = try Analyze.init(comp, parsed_file, expected_type);
|
||||
errdefer ira.abort();
|
||||
|
||||
pub async fn analyze(comp: *Compilation, old_code: *Code, expected_type: ?*Type) !*Code {
|
||||
const old_entry_bb = old_code.basic_block_list.at(0);
|
||||
const root_scope = old_entry_bb.scope.findRoot();
|
||||
|
||||
var ira = try Analyze.init(comp, root_scope, expected_type);
|
||||
errdefer ira.abort();
|
||||
|
||||
const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null);
|
||||
new_entry_bb.ref();
|
||||
|
|
|
@ -1,6 +0,0 @@
|
|||
const ast = @import("std").zig.ast;
|
||||
|
||||
pub const ParsedFile = struct {
|
||||
tree: ast.Tree,
|
||||
realpath: []const u8,
|
||||
};
|
|
@ -8,6 +8,7 @@ const ast = std.zig.ast;
|
|||
const Value = @import("value.zig").Value;
|
||||
const ir = @import("ir.zig");
|
||||
const Span = @import("errmsg.zig").Span;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
pub const Scope = struct {
|
||||
id: Id,
|
||||
|
@ -23,7 +24,8 @@ pub const Scope = struct {
|
|||
if (base.ref_count == 0) {
|
||||
if (base.parent) |parent| parent.deref(comp);
|
||||
switch (base.id) {
|
||||
Id.Decls => @fieldParentPtr(Decls, "base", base).destroy(),
|
||||
Id.Root => @fieldParentPtr(Root, "base", base).destroy(comp),
|
||||
Id.Decls => @fieldParentPtr(Decls, "base", base).destroy(comp),
|
||||
Id.Block => @fieldParentPtr(Block, "base", base).destroy(comp),
|
||||
Id.FnDef => @fieldParentPtr(FnDef, "base", base).destroy(comp),
|
||||
Id.CompTime => @fieldParentPtr(CompTime, "base", base).destroy(comp),
|
||||
|
@ -33,6 +35,15 @@ pub const Scope = struct {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn findRoot(base: *Scope) *Root {
|
||||
var scope = base;
|
||||
while (scope.parent) |parent| {
|
||||
scope = parent;
|
||||
}
|
||||
assert(scope.id == Id.Root);
|
||||
return @fieldParentPtr(Root, "base", scope);
|
||||
}
|
||||
|
||||
pub fn findFnDef(base: *Scope) ?*FnDef {
|
||||
var scope = base;
|
||||
while (true) {
|
||||
|
@ -44,6 +55,7 @@ pub const Scope = struct {
|
|||
Id.Defer,
|
||||
Id.DeferExpr,
|
||||
Id.CompTime,
|
||||
Id.Root,
|
||||
=> scope = scope.parent orelse return null,
|
||||
}
|
||||
}
|
||||
|
@ -62,12 +74,14 @@ pub const Scope = struct {
|
|||
Id.Block,
|
||||
Id.Defer,
|
||||
Id.CompTime,
|
||||
Id.Root,
|
||||
=> scope = scope.parent orelse return null,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const Id = enum {
|
||||
Root,
|
||||
Decls,
|
||||
Block,
|
||||
FnDef,
|
||||
|
@ -76,12 +90,43 @@ pub const Scope = struct {
|
|||
DeferExpr,
|
||||
};
|
||||
|
||||
pub const Root = struct {
|
||||
base: Scope,
|
||||
tree: ast.Tree,
|
||||
realpath: []const u8,
|
||||
|
||||
/// Creates a Root scope with 1 reference
|
||||
/// Takes ownership of realpath
|
||||
/// Caller must set tree
|
||||
pub fn create(comp: *Compilation, tree: ast.Tree, realpath: []u8) !*Root {
|
||||
const self = try comp.gpa().create(Root{
|
||||
.base = Scope{
|
||||
.id = Id.Root,
|
||||
.parent = null,
|
||||
.ref_count = 1,
|
||||
},
|
||||
.tree = tree,
|
||||
.realpath = realpath,
|
||||
});
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Root, comp: *Compilation) void {
|
||||
comp.gpa().free(self.tree.source);
|
||||
self.tree.deinit();
|
||||
comp.gpa().free(self.realpath);
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Decls = struct {
|
||||
base: Scope,
|
||||
table: Decl.Table,
|
||||
|
||||
/// Creates a Decls scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: ?*Scope) !*Decls {
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*Decls {
|
||||
const self = try comp.gpa().create(Decls{
|
||||
.base = Scope{
|
||||
.id = Id.Decls,
|
||||
|
@ -95,14 +140,14 @@ pub const Scope = struct {
|
|||
self.table = Decl.Table.init(comp.gpa());
|
||||
errdefer self.table.deinit();
|
||||
|
||||
if (parent) |p| p.ref();
|
||||
parent.ref();
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn destroy(self: *Decls) void {
|
||||
pub fn destroy(self: *Decls, comp: *Compilation) void {
|
||||
self.table.deinit();
|
||||
self.table.allocator.destroy(self);
|
||||
comp.gpa().destroy(self);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -143,7 +188,7 @@ pub const Scope = struct {
|
|||
};
|
||||
|
||||
/// Creates a Block scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: ?*Scope) !*Block {
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*Block {
|
||||
const self = try comp.gpa().create(Block{
|
||||
.base = Scope{
|
||||
.id = Id.Block,
|
||||
|
@ -158,7 +203,7 @@ pub const Scope = struct {
|
|||
});
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
if (parent) |p| p.ref();
|
||||
parent.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
|
@ -175,7 +220,7 @@ pub const Scope = struct {
|
|||
|
||||
/// Creates a FnDef scope with 1 reference
|
||||
/// Must set the fn_val later
|
||||
pub fn create(comp: *Compilation, parent: ?*Scope) !*FnDef {
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*FnDef {
|
||||
const self = try comp.gpa().create(FnDef{
|
||||
.base = Scope{
|
||||
.id = Id.FnDef,
|
||||
|
@ -185,7 +230,7 @@ pub const Scope = struct {
|
|||
.fn_val = undefined,
|
||||
});
|
||||
|
||||
if (parent) |p| p.ref();
|
||||
parent.ref();
|
||||
|
||||
return self;
|
||||
}
|
||||
|
@ -199,7 +244,7 @@ pub const Scope = struct {
|
|||
base: Scope,
|
||||
|
||||
/// Creates a CompTime scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: ?*Scope) !*CompTime {
|
||||
pub fn create(comp: *Compilation, parent: *Scope) !*CompTime {
|
||||
const self = try comp.gpa().create(CompTime{
|
||||
.base = Scope{
|
||||
.id = Id.CompTime,
|
||||
|
@ -208,7 +253,7 @@ pub const Scope = struct {
|
|||
},
|
||||
});
|
||||
|
||||
if (parent) |p| p.ref();
|
||||
parent.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
|
@ -230,7 +275,7 @@ pub const Scope = struct {
|
|||
/// Creates a Defer scope with 1 reference
|
||||
pub fn create(
|
||||
comp: *Compilation,
|
||||
parent: ?*Scope,
|
||||
parent: *Scope,
|
||||
kind: Kind,
|
||||
defer_expr_scope: *DeferExpr,
|
||||
) !*Defer {
|
||||
|
@ -247,7 +292,7 @@ pub const Scope = struct {
|
|||
|
||||
defer_expr_scope.base.ref();
|
||||
|
||||
if (parent) |p| p.ref();
|
||||
parent.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
|
@ -263,7 +308,7 @@ pub const Scope = struct {
|
|||
reported_err: bool,
|
||||
|
||||
/// Creates a DeferExpr scope with 1 reference
|
||||
pub fn create(comp: *Compilation, parent: ?*Scope, expr_node: *ast.Node) !*DeferExpr {
|
||||
pub fn create(comp: *Compilation, parent: *Scope, expr_node: *ast.Node) !*DeferExpr {
|
||||
const self = try comp.gpa().create(DeferExpr{
|
||||
.base = Scope{
|
||||
.id = Id.DeferExpr,
|
||||
|
@ -275,7 +320,7 @@ pub const Scope = struct {
|
|||
});
|
||||
errdefer comp.gpa().destroy(self);
|
||||
|
||||
if (parent) |p| p.ref();
|
||||
parent.ref();
|
||||
return self;
|
||||
}
|
||||
|
||||
|
|
|
@ -39,6 +39,11 @@ pub const Value = struct {
|
|||
return base;
|
||||
}
|
||||
|
||||
pub fn cast(base: *Value, comptime T: type) ?*T {
|
||||
if (base.id != @field(Id, @typeName(T))) return null;
|
||||
return @fieldParentPtr(T, "base", base);
|
||||
}
|
||||
|
||||
pub fn dump(base: *const Value) void {
|
||||
std.debug.warn("{}", @tagName(base.id));
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue