self-hosted: AST flattening, astgen improvements, result locations, and more

* AST: flatten ControlFlowExpression into Continue, Break, and Return.
 * AST: unify identifiers and literals into the same AST type: OneToken
 * AST: ControlFlowExpression uses TrailerFlags to optimize storage
   space.
 * astgen: support `var` as well as `const` locals, and support
   explicitly typed locals. Corresponding Module and codegen code is not
   implemented yet.
 * astgen: support result locations.
 * ZIR: add the following instructions (see the corresponding doc
   comments for explanations of semantics):
   - alloc
   - alloc_inferred
   - bitcast_result_ptr
   - coerce_result_block_ptr
   - coerce_result_ptr
   - coerce_to_ptr_elem
   - ensure_result_used
   - ensure_result_non_error
   - ret_ptr
   - ret_type
   - store
   - param_type
 * the skeleton structure for result locations is set up. It's looking
   pretty clean so far.
 * add compile error for unused result and compile error for discarding
   errors.
 * astgen: split builtin calls up to implemented manually, and implement
   `@as`, `@bitCast` (and others) with respect to result locations.
 * add CLI support for hex and raw object formats. They are not
   supported by the self-hosted compiler yet, and emit errors.
 * rename `--c` CLI to `-ofmt=[objectformat]` which can be any of the
   object formats. Only ELF and C are supported so far. Also added missing
   help to the help text.
 * Remove hard tabs from C backend test cases. Shame on you Noam, you
   are grounded, you should know better, etc. Bad boy.
 * Delete C backend code and test case that relied on comptime_int
   incorrectly making it all the way to codegen.
master
Andrew Kelley 2020-07-23 23:05:26 -07:00
parent b4d383a478
commit aac6e8c418
12 changed files with 1183 additions and 771 deletions

View File

@ -436,6 +436,8 @@ pub const Target = struct {
macho,
wasm,
c,
hex,
raw,
};
pub const SubSystem = enum {

View File

@ -495,8 +495,10 @@ pub const Node = struct {
While,
For,
If,
ControlFlowExpression,
Suspend,
Continue,
Break,
Return,
// Type expressions
AnyType,
@ -601,6 +603,24 @@ pub const Node = struct {
.Try,
=> SimplePrefixOp,
.Identifier,
.BoolLiteral,
.NullLiteral,
.UndefinedLiteral,
.Unreachable,
.AnyType,
.ErrorType,
.IntegerLiteral,
.FloatLiteral,
.StringLiteral,
.CharLiteral,
=> OneToken,
.Continue,
.Break,
.Return,
=> ControlFlowExpression,
.ArrayType => ArrayType,
.ArrayTypeSentinel => ArrayTypeSentinel,
@ -621,23 +641,11 @@ pub const Node = struct {
.While => While,
.For => For,
.If => If,
.ControlFlowExpression => ControlFlowExpression,
.Suspend => Suspend,
.AnyType => AnyType,
.ErrorType => ErrorType,
.FnProto => FnProto,
.AnyFrameType => AnyFrameType,
.IntegerLiteral => IntegerLiteral,
.FloatLiteral => FloatLiteral,
.EnumLiteral => EnumLiteral,
.StringLiteral => StringLiteral,
.MultilineStringLiteral => MultilineStringLiteral,
.CharLiteral => CharLiteral,
.BoolLiteral => BoolLiteral,
.NullLiteral => NullLiteral,
.UndefinedLiteral => UndefinedLiteral,
.Unreachable => Unreachable,
.Identifier => Identifier,
.GroupedExpression => GroupedExpression,
.BuiltinCall => BuiltinCall,
.ErrorSetDecl => ErrorSetDecl,
@ -1182,19 +1190,19 @@ pub const Node = struct {
}
};
pub const Identifier = struct {
base: Node = Node{ .tag = .Identifier },
pub const OneToken = struct {
base: Node,
token: TokenIndex,
pub fn iterate(self: *const Identifier, index: usize) ?*Node {
pub fn iterate(self: *const OneToken, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const Identifier) TokenIndex {
pub fn firstToken(self: *const OneToken) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const Identifier) TokenIndex {
pub fn lastToken(self: *const OneToken) TokenIndex {
return self.token;
}
};
@ -2569,34 +2577,65 @@ pub const Node = struct {
}
};
/// TODO break this into separate Break, Continue, Return AST Nodes to save memory.
/// Could be further broken into LabeledBreak, LabeledContinue, and ReturnVoid to save even more.
/// Trailed in memory by possibly many things, with each optional thing
/// determined by a bit in `trailer_flags`.
/// Can be: return, break, continue
pub const ControlFlowExpression = struct {
base: Node = Node{ .tag = .ControlFlowExpression },
base: Node,
trailer_flags: TrailerFlags,
ltoken: TokenIndex,
kind: Kind,
rhs: ?*Node,
pub const Kind = union(enum) {
Break: ?*Node,
Continue: ?*Node,
Return,
pub const TrailerFlags = std.meta.TrailerFlags(struct {
rhs: *Node,
label: TokenIndex,
});
pub const RequiredFields = struct {
tag: Tag,
ltoken: TokenIndex,
};
pub fn getRHS(self: *const ControlFlowExpression) ?*Node {
return self.getTrailer("rhs");
}
pub fn getLabel(self: *const ControlFlowExpression) ?TokenIndex {
return self.getTrailer("label");
}
pub fn getTrailer(self: *const ControlFlowExpression, comptime name: []const u8) ?TrailerFlags.Field(name) {
const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(ControlFlowExpression);
return self.trailer_flags.get(trailers_start, name);
}
pub fn setTrailer(self: *ControlFlowExpression, comptime name: []const u8, value: TrailerFlags.Field(name)) void {
const trailers_start = @ptrCast([*]u8, self) + @sizeOf(ControlFlowExpression);
self.trailer_flags.set(trailers_start, name, value);
}
pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: anytype) !*ControlFlowExpression {
const trailer_flags = TrailerFlags.init(trailers);
const bytes = try allocator.alignedAlloc(u8, @alignOf(ControlFlowExpression), sizeInBytes(trailer_flags));
const ctrl_flow_expr = @ptrCast(*ControlFlowExpression, bytes.ptr);
ctrl_flow_expr.* = .{
.base = .{ .tag = required.tag },
.trailer_flags = trailer_flags,
.ltoken = required.ltoken,
};
const trailers_start = bytes.ptr + @sizeOf(ControlFlowExpression);
trailer_flags.setMany(trailers_start, trailers);
return ctrl_flow_expr;
}
pub fn destroy(self: *ControlFlowExpression, allocator: *mem.Allocator) void {
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)];
allocator.free(bytes);
}
pub fn iterate(self: *const ControlFlowExpression, index: usize) ?*Node {
var i = index;
switch (self.kind) {
.Break, .Continue => |maybe_label| {
if (maybe_label) |label| {
if (i < 1) return label;
i -= 1;
}
},
.Return => {},
}
if (self.rhs) |rhs| {
if (self.getRHS()) |rhs| {
if (i < 1) return rhs;
i -= 1;
}
@ -2609,21 +2648,20 @@ pub const Node = struct {
}
pub fn lastToken(self: *const ControlFlowExpression) TokenIndex {
if (self.rhs) |rhs| {
if (self.getRHS()) |rhs| {
return rhs.lastToken();
}
switch (self.kind) {
.Break, .Continue => |maybe_label| {
if (maybe_label) |label| {
return label.lastToken();
}
},
.Return => return self.ltoken,
if (self.getLabel()) |label| {
return label;
}
return self.ltoken;
}
fn sizeInBytes(trailer_flags: TrailerFlags) usize {
return @sizeOf(ControlFlowExpression) + trailer_flags.sizeInBytes();
}
};
pub const Suspend = struct {
@ -2655,23 +2693,6 @@ pub const Node = struct {
}
};
pub const IntegerLiteral = struct {
base: Node = Node{ .tag = .IntegerLiteral },
token: TokenIndex,
pub fn iterate(self: *const IntegerLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const IntegerLiteral) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const IntegerLiteral) TokenIndex {
return self.token;
}
};
pub const EnumLiteral = struct {
base: Node = Node{ .tag = .EnumLiteral },
dot: TokenIndex,
@ -2690,23 +2711,6 @@ pub const Node = struct {
}
};
pub const FloatLiteral = struct {
base: Node = Node{ .tag = .FloatLiteral },
token: TokenIndex,
pub fn iterate(self: *const FloatLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const FloatLiteral) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const FloatLiteral) TokenIndex {
return self.token;
}
};
/// Parameters are in memory following BuiltinCall.
pub const BuiltinCall = struct {
base: Node = Node{ .tag = .BuiltinCall },
@ -2757,23 +2761,6 @@ pub const Node = struct {
}
};
pub const StringLiteral = struct {
base: Node = Node{ .tag = .StringLiteral },
token: TokenIndex,
pub fn iterate(self: *const StringLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const StringLiteral) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const StringLiteral) TokenIndex {
return self.token;
}
};
/// The string literal tokens appear directly in memory after MultilineStringLiteral.
pub const MultilineStringLiteral = struct {
base: Node = Node{ .tag = .MultilineStringLiteral },
@ -2817,74 +2804,6 @@ pub const Node = struct {
}
};
pub const CharLiteral = struct {
base: Node = Node{ .tag = .CharLiteral },
token: TokenIndex,
pub fn iterate(self: *const CharLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const CharLiteral) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const CharLiteral) TokenIndex {
return self.token;
}
};
pub const BoolLiteral = struct {
base: Node = Node{ .tag = .BoolLiteral },
token: TokenIndex,
pub fn iterate(self: *const BoolLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const BoolLiteral) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const BoolLiteral) TokenIndex {
return self.token;
}
};
pub const NullLiteral = struct {
base: Node = Node{ .tag = .NullLiteral },
token: TokenIndex,
pub fn iterate(self: *const NullLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const NullLiteral) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const NullLiteral) TokenIndex {
return self.token;
}
};
pub const UndefinedLiteral = struct {
base: Node = Node{ .tag = .UndefinedLiteral },
token: TokenIndex,
pub fn iterate(self: *const UndefinedLiteral, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const UndefinedLiteral) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const UndefinedLiteral) TokenIndex {
return self.token;
}
};
pub const Asm = struct {
base: Node = Node{ .tag = .Asm },
asm_token: TokenIndex,
@ -2904,7 +2823,7 @@ pub const Node = struct {
rparen: TokenIndex,
pub const Kind = union(enum) {
Variable: *Identifier,
Variable: *OneToken,
Return: *Node,
};
@ -3005,57 +2924,6 @@ pub const Node = struct {
}
};
pub const Unreachable = struct {
base: Node = Node{ .tag = .Unreachable },
token: TokenIndex,
pub fn iterate(self: *const Unreachable, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const Unreachable) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const Unreachable) TokenIndex {
return self.token;
}
};
pub const ErrorType = struct {
base: Node = Node{ .tag = .ErrorType },
token: TokenIndex,
pub fn iterate(self: *const ErrorType, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const ErrorType) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const ErrorType) TokenIndex {
return self.token;
}
};
pub const AnyType = struct {
base: Node = Node{ .tag = .AnyType },
token: TokenIndex,
pub fn iterate(self: *const AnyType, index: usize) ?*Node {
return null;
}
pub fn firstToken(self: *const AnyType) TokenIndex {
return self.token;
}
pub fn lastToken(self: *const AnyType) TokenIndex {
return self.token;
}
};
/// TODO remove from the Node base struct
/// TODO actually maybe remove entirely in favor of iterating backward from Node.firstToken()
/// and forwards to find same-line doc comments.

View File

@ -628,8 +628,11 @@ const Parser = struct {
var type_expr: ?*Node = null;
if (p.eatToken(.Colon)) |_| {
if (p.eatToken(.Keyword_anytype) orelse p.eatToken(.Keyword_var)) |anytype_tok| {
const node = try p.arena.allocator.create(Node.AnyType);
node.* = .{ .token = anytype_tok };
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .AnyType },
.token = anytype_tok,
};
type_expr = &node.base;
} else {
type_expr = try p.expectNode(parseTypeExpr, .{
@ -1079,12 +1082,13 @@ const Parser = struct {
if (p.eatToken(.Keyword_break)) |token| {
const label = try p.parseBreakLabel();
const expr_node = try p.parseExpr();
const node = try p.arena.allocator.create(Node.ControlFlowExpression);
node.* = .{
const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{
.tag = .Break,
.ltoken = token,
.kind = .{ .Break = label },
}, .{
.label = label,
.rhs = expr_node,
};
});
return &node.base;
}
@ -1115,12 +1119,13 @@ const Parser = struct {
if (p.eatToken(.Keyword_continue)) |token| {
const label = try p.parseBreakLabel();
const node = try p.arena.allocator.create(Node.ControlFlowExpression);
node.* = .{
const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{
.tag = .Continue,
.ltoken = token,
.kind = .{ .Continue = label },
}, .{
.label = label,
.rhs = null,
};
});
return &node.base;
}
@ -1139,12 +1144,12 @@ const Parser = struct {
if (p.eatToken(.Keyword_return)) |token| {
const expr_node = try p.parseExpr();
const node = try p.arena.allocator.create(Node.ControlFlowExpression);
node.* = .{
const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{
.tag = .Return,
.ltoken = token,
.kind = .Return,
}, .{
.rhs = expr_node,
};
});
return &node.base;
}
@ -1516,8 +1521,9 @@ const Parser = struct {
fn parsePrimaryTypeExpr(p: *Parser) !?*Node {
if (try p.parseBuiltinCall()) |node| return node;
if (p.eatToken(.CharLiteral)) |token| {
const node = try p.arena.allocator.create(Node.CharLiteral);
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .CharLiteral },
.token = token,
};
return &node.base;
@ -1547,7 +1553,7 @@ const Parser = struct {
const identifier = try p.expectNodeRecoverable(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
const global_error_set = try p.createLiteral(Node.ErrorType, token);
const global_error_set = try p.createLiteral(.ErrorType, token);
if (period == null or identifier == null) return global_error_set;
const node = try p.arena.allocator.create(Node.SimpleInfixOp);
@ -1559,8 +1565,8 @@ const Parser = struct {
};
return &node.base;
}
if (p.eatToken(.Keyword_false)) |token| return p.createLiteral(Node.BoolLiteral, token);
if (p.eatToken(.Keyword_null)) |token| return p.createLiteral(Node.NullLiteral, token);
if (p.eatToken(.Keyword_false)) |token| return p.createLiteral(.BoolLiteral, token);
if (p.eatToken(.Keyword_null)) |token| return p.createLiteral(.NullLiteral, token);
if (p.eatToken(.Keyword_anyframe)) |token| {
const node = try p.arena.allocator.create(Node.AnyFrameType);
node.* = .{
@ -1569,9 +1575,9 @@ const Parser = struct {
};
return &node.base;
}
if (p.eatToken(.Keyword_true)) |token| return p.createLiteral(Node.BoolLiteral, token);
if (p.eatToken(.Keyword_undefined)) |token| return p.createLiteral(Node.UndefinedLiteral, token);
if (p.eatToken(.Keyword_unreachable)) |token| return p.createLiteral(Node.Unreachable, token);
if (p.eatToken(.Keyword_true)) |token| return p.createLiteral(.BoolLiteral, token);
if (p.eatToken(.Keyword_undefined)) |token| return p.createLiteral(.UndefinedLiteral, token);
if (p.eatToken(.Keyword_unreachable)) |token| return p.createLiteral(.Unreachable, token);
if (try p.parseStringLiteral()) |node| return node;
if (try p.parseSwitchExpr()) |node| return node;
@ -1865,7 +1871,7 @@ const Parser = struct {
const variable = try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
break :blk .{ .Variable = variable.cast(Node.Identifier).? };
break :blk .{ .Variable = variable.castTag(.Identifier).? };
};
const rparen = try p.expectToken(.RParen);
@ -1906,11 +1912,10 @@ const Parser = struct {
}
/// BreakLabel <- COLON IDENTIFIER
fn parseBreakLabel(p: *Parser) !?*Node {
fn parseBreakLabel(p: *Parser) !?TokenIndex {
_ = p.eatToken(.Colon) orelse return null;
return try p.expectNode(parseIdentifier, .{
.ExpectedIdentifier = .{ .token = p.tok_i },
});
const ident = try p.expectToken(.Identifier);
return ident;
}
/// BlockLabel <- IDENTIFIER COLON
@ -3022,8 +3027,9 @@ const Parser = struct {
});
// lets pretend this was an identifier so we can continue parsing
const node = try p.arena.allocator.create(Node.Identifier);
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .Identifier },
.token = token,
};
return &node.base;
@ -3054,8 +3060,9 @@ const Parser = struct {
fn parseIdentifier(p: *Parser) !?*Node {
const token = p.eatToken(.Identifier) orelse return null;
const node = try p.arena.allocator.create(Node.Identifier);
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .Identifier },
.token = token,
};
return &node.base;
@ -3064,16 +3071,18 @@ const Parser = struct {
fn parseAnyType(p: *Parser) !?*Node {
const token = p.eatToken(.Keyword_anytype) orelse
p.eatToken(.Keyword_var) orelse return null; // TODO remove in next release cycle
const node = try p.arena.allocator.create(Node.AnyType);
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .AnyType },
.token = token,
};
return &node.base;
}
fn createLiteral(p: *Parser, comptime T: type, token: TokenIndex) !*Node {
const result = try p.arena.allocator.create(T);
result.* = T{
fn createLiteral(p: *Parser, tag: ast.Node.Tag, token: TokenIndex) !*Node {
const result = try p.arena.allocator.create(Node.OneToken);
result.* = .{
.base = .{ .tag = tag },
.token = token,
};
return &result.base;
@ -3081,8 +3090,9 @@ const Parser = struct {
fn parseStringLiteralSingle(p: *Parser) !?*Node {
if (p.eatToken(.StringLiteral)) |token| {
const node = try p.arena.allocator.create(Node.StringLiteral);
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .StringLiteral },
.token = token,
};
return &node.base;
@ -3131,8 +3141,9 @@ const Parser = struct {
fn parseIntegerLiteral(p: *Parser) !?*Node {
const token = p.eatToken(.IntegerLiteral) orelse return null;
const node = try p.arena.allocator.create(Node.IntegerLiteral);
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .IntegerLiteral },
.token = token,
};
return &node.base;
@ -3140,8 +3151,9 @@ const Parser = struct {
fn parseFloatLiteral(p: *Parser) !?*Node {
const token = p.eatToken(.FloatLiteral) orelse return null;
const node = try p.arena.allocator.create(Node.FloatLiteral);
const node = try p.arena.allocator.create(Node.OneToken);
node.* = .{
.base = .{ .tag = .FloatLiteral },
.token = token,
};
return &node.base;

View File

@ -366,10 +366,32 @@ fn renderExpression(
space: Space,
) (@TypeOf(stream).Error || Error)!void {
switch (base.tag) {
.Identifier => {
const identifier = @fieldParentPtr(ast.Node.Identifier, "base", base);
return renderToken(tree, stream, identifier.token, indent, start_col, space);
.Identifier,
.IntegerLiteral,
.FloatLiteral,
.StringLiteral,
.CharLiteral,
.BoolLiteral,
.NullLiteral,
.Unreachable,
.ErrorType,
.UndefinedLiteral,
=> {
const casted_node = base.cast(ast.Node.OneToken).?;
return renderToken(tree, stream, casted_node.token, indent, start_col, space);
},
.AnyType => {
const any_type = base.castTag(.AnyType).?;
if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) {
// TODO remove in next release cycle
try stream.writeAll("anytype");
if (space == .Comma) try stream.writeAll(",\n");
return;
}
return renderToken(tree, stream, any_type.token, indent, start_col, space);
},
.Block => {
const block = @fieldParentPtr(ast.Node.Block, "base", base);
@ -399,6 +421,7 @@ fn renderExpression(
return renderToken(tree, stream, block.rbrace, indent, start_col, space);
}
},
.Defer => {
const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base);
@ -1107,50 +1130,48 @@ fn renderExpression(
return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // ?
},
.ControlFlowExpression => {
const flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", base);
.Break => {
const flow_expr = base.castTag(.Break).?;
const maybe_rhs = flow_expr.getRHS();
const maybe_label = flow_expr.getLabel();
switch (flow_expr.kind) {
.Break => |maybe_label| {
if (maybe_label == null and flow_expr.rhs == null) {
return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space); // break
}
try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space); // break
if (maybe_label) |label| {
const colon = tree.nextToken(flow_expr.ltoken);
try renderToken(tree, stream, colon, indent, start_col, Space.None); // :
if (flow_expr.rhs == null) {
return renderExpression(allocator, stream, tree, indent, start_col, label, space); // label
}
try renderExpression(allocator, stream, tree, indent, start_col, label, Space.Space); // label
}
},
.Continue => |maybe_label| {
assert(flow_expr.rhs == null);
if (maybe_label == null and flow_expr.rhs == null) {
return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space); // continue
}
try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space); // continue
if (maybe_label) |label| {
const colon = tree.nextToken(flow_expr.ltoken);
try renderToken(tree, stream, colon, indent, start_col, Space.None); // :
return renderExpression(allocator, stream, tree, indent, start_col, label, space);
}
},
.Return => {
if (flow_expr.rhs == null) {
return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space);
}
try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space);
},
if (maybe_label == null and maybe_rhs == null) {
return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space); // break
}
return renderExpression(allocator, stream, tree, indent, start_col, flow_expr.rhs.?, space);
try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space); // break
if (maybe_label) |label| {
const colon = tree.nextToken(flow_expr.ltoken);
try renderToken(tree, stream, colon, indent, start_col, Space.None); // :
if (maybe_rhs == null) {
return renderToken(tree, stream, label, indent, start_col, space); // label
}
try renderToken(tree, stream, label, indent, start_col, Space.Space); // label
}
return renderExpression(allocator, stream, tree, indent, start_col, maybe_rhs.?, space);
},
.Continue => {
const flow_expr = base.castTag(.Continue).?;
if (flow_expr.getLabel()) |label| {
try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space); // continue
const colon = tree.nextToken(flow_expr.ltoken);
try renderToken(tree, stream, colon, indent, start_col, Space.None); // :
return renderToken(tree, stream, label, indent, start_col, space); // label
} else {
return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space); // continue
}
},
.Return => {
const flow_expr = base.castTag(.Return).?;
if (flow_expr.getRHS()) |rhs| {
try renderToken(tree, stream, flow_expr.ltoken, indent, start_col, Space.Space);
return renderExpression(allocator, stream, tree, indent, start_col, rhs, space);
} else {
return renderToken(tree, stream, flow_expr.ltoken, indent, start_col, space);
}
},
.Payload => {
@ -1208,48 +1229,6 @@ fn renderExpression(
return renderExpression(allocator, stream, tree, indent, start_col, field_init.expr, space);
},
.IntegerLiteral => {
const integer_literal = @fieldParentPtr(ast.Node.IntegerLiteral, "base", base);
return renderToken(tree, stream, integer_literal.token, indent, start_col, space);
},
.FloatLiteral => {
const float_literal = @fieldParentPtr(ast.Node.FloatLiteral, "base", base);
return renderToken(tree, stream, float_literal.token, indent, start_col, space);
},
.StringLiteral => {
const string_literal = @fieldParentPtr(ast.Node.StringLiteral, "base", base);
return renderToken(tree, stream, string_literal.token, indent, start_col, space);
},
.CharLiteral => {
const char_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
return renderToken(tree, stream, char_literal.token, indent, start_col, space);
},
.BoolLiteral => {
const bool_literal = @fieldParentPtr(ast.Node.CharLiteral, "base", base);
return renderToken(tree, stream, bool_literal.token, indent, start_col, space);
},
.NullLiteral => {
const null_literal = @fieldParentPtr(ast.Node.NullLiteral, "base", base);
return renderToken(tree, stream, null_literal.token, indent, start_col, space);
},
.Unreachable => {
const unreachable_node = @fieldParentPtr(ast.Node.Unreachable, "base", base);
return renderToken(tree, stream, unreachable_node.token, indent, start_col, space);
},
.ErrorType => {
const error_type = @fieldParentPtr(ast.Node.ErrorType, "base", base);
return renderToken(tree, stream, error_type.token, indent, start_col, space);
},
.AnyType => {
const any_type = @fieldParentPtr(ast.Node.AnyType, "base", base);
if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) {
// TODO remove in next release cycle
try stream.writeAll("anytype");
if (space == .Comma) try stream.writeAll(",\n");
return;
}
return renderToken(tree, stream, any_type.token, indent, start_col, space);
},
.ContainerDecl => {
const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base);
@ -1468,10 +1447,6 @@ fn renderExpression(
}
try stream.writeByteNTimes(' ', indent);
},
.UndefinedLiteral => {
const undefined_literal = @fieldParentPtr(ast.Node.UndefinedLiteral, "base", base);
return renderToken(tree, stream, undefined_literal.token, indent, start_col, space);
},
.BuiltinCall => {
const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base);

View File

@ -212,7 +212,8 @@ pub const Decl = struct {
},
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.decl => unreachable,
}
}
@ -308,7 +309,8 @@ pub const Scope = struct {
.block => return self.cast(Block).?.arena,
.decl => return &self.cast(DeclAnalysis).?.arena.allocator,
.gen_zir => return self.cast(GenZIR).?.arena,
.local_var => return self.cast(LocalVar).?.gen_zir.arena,
.local_val => return self.cast(LocalVal).?.gen_zir.arena,
.local_ptr => return self.cast(LocalPtr).?.gen_zir.arena,
.zir_module => return &self.cast(ZIRModule).?.contents.module.arena.allocator,
.file => unreachable,
}
@ -320,7 +322,8 @@ pub const Scope = struct {
return switch (self.tag) {
.block => self.cast(Block).?.decl,
.gen_zir => self.cast(GenZIR).?.decl,
.local_var => return self.cast(LocalVar).?.gen_zir.decl,
.local_val => return self.cast(LocalVal).?.gen_zir.decl,
.local_ptr => return self.cast(LocalPtr).?.gen_zir.decl,
.decl => self.cast(DeclAnalysis).?.decl,
.zir_module => null,
.file => null,
@ -333,7 +336,8 @@ pub const Scope = struct {
switch (self.tag) {
.block => return self.cast(Block).?.decl.scope,
.gen_zir => return self.cast(GenZIR).?.decl.scope,
.local_var => return self.cast(LocalVar).?.gen_zir.decl.scope,
.local_val => return self.cast(LocalVal).?.gen_zir.decl.scope,
.local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.scope,
.decl => return self.cast(DeclAnalysis).?.decl.scope,
.zir_module, .file => return self,
}
@ -346,7 +350,8 @@ pub const Scope = struct {
switch (self.tag) {
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.decl => unreachable,
.zir_module => return self.cast(ZIRModule).?.fullyQualifiedNameHash(name),
.file => return self.cast(File).?.fullyQualifiedNameHash(name),
@ -361,7 +366,8 @@ pub const Scope = struct {
.decl => return self.cast(DeclAnalysis).?.decl.scope.cast(File).?.contents.tree,
.block => return self.cast(Block).?.decl.scope.cast(File).?.contents.tree,
.gen_zir => return self.cast(GenZIR).?.decl.scope.cast(File).?.contents.tree,
.local_var => return self.cast(LocalVar).?.gen_zir.decl.scope.cast(File).?.contents.tree,
.local_val => return self.cast(LocalVal).?.gen_zir.decl.scope.cast(File).?.contents.tree,
.local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.scope.cast(File).?.contents.tree,
}
}
@ -370,7 +376,8 @@ pub const Scope = struct {
return switch (self.tag) {
.block => unreachable,
.gen_zir => self.cast(GenZIR).?,
.local_var => return self.cast(LocalVar).?.gen_zir,
.local_val => return self.cast(LocalVal).?.gen_zir,
.local_ptr => return self.cast(LocalPtr).?.gen_zir,
.decl => unreachable,
.zir_module => unreachable,
.file => unreachable,
@ -397,7 +404,8 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).sub_file_path,
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.decl => unreachable,
}
}
@ -408,7 +416,8 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).unload(gpa),
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.decl => unreachable,
}
}
@ -418,7 +427,8 @@ pub const Scope = struct {
.file => return @fieldParentPtr(File, "base", base).getSource(module),
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).getSource(module),
.gen_zir => unreachable,
.local_var => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.block => unreachable,
.decl => unreachable,
}
@ -431,7 +441,8 @@ pub const Scope = struct {
.zir_module => return @fieldParentPtr(ZIRModule, "base", base).removeDecl(child),
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.decl => unreachable,
}
}
@ -451,7 +462,8 @@ pub const Scope = struct {
},
.block => unreachable,
.gen_zir => unreachable,
.local_var => unreachable,
.local_val => unreachable,
.local_ptr => unreachable,
.decl => unreachable,
}
}
@ -472,7 +484,8 @@ pub const Scope = struct {
block,
decl,
gen_zir,
local_var,
local_val,
local_ptr,
};
pub const File = struct {
@ -708,17 +721,31 @@ pub const Scope = struct {
instructions: std.ArrayListUnmanaged(*zir.Inst) = .{},
};
/// This is always a `const` local and importantly the `inst` is a value type, not a pointer.
/// This structure lives as long as the AST generation of the Block
/// node that contains the variable.
pub const LocalVar = struct {
pub const base_tag: Tag = .local_var;
pub const LocalVal = struct {
pub const base_tag: Tag = .local_val;
base: Scope = Scope{ .tag = base_tag },
/// Parents can be: `LocalVar`, `GenZIR`.
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZIR`.
parent: *Scope,
gen_zir: *GenZIR,
name: []const u8,
inst: *zir.Inst,
};
/// This could be a `const` or `var` local. It has a pointer instead of a value.
/// This structure lives as long as the AST generation of the Block
/// node that contains the variable.
pub const LocalPtr = struct {
pub const base_tag: Tag = .local_ptr;
base: Scope = Scope{ .tag = base_tag },
/// Parents can be: `LocalVal`, `LocalPtr`, `GenZIR`.
parent: *Scope,
gen_zir: *GenZIR,
name: []const u8,
ptr: *zir.Inst,
};
};
pub const AllErrors = struct {
@ -1176,12 +1203,19 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
const param_decls = fn_proto.params();
const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_decls.len);
const fn_src = tree.token_locs[fn_proto.fn_token].start;
const type_type = try self.addZIRInstConst(&fn_type_scope.base, fn_src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.type_type),
});
const type_type_rl: astgen.ResultLoc = .{ .ty = type_type };
for (param_decls) |param_decl, i| {
const param_type_node = switch (param_decl.param_type) {
.any_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
.type_expr => |node| node,
};
param_types[i] = try astgen.expr(self, &fn_type_scope.base, param_type_node);
param_types[i] = try astgen.expr(self, &fn_type_scope.base, type_type_rl, param_type_node);
}
if (fn_proto.getTrailer("var_args_token")) |var_args_token| {
return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{});
@ -1209,8 +1243,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.Invalid => |tok| return self.failTok(&fn_type_scope.base, tok, "unable to parse return type", .{}),
};
const return_type_inst = try astgen.expr(self, &fn_type_scope.base, return_type_expr);
const fn_src = tree.token_locs[fn_proto.fn_token].start;
const return_type_inst = try astgen.expr(self, &fn_type_scope.base, type_type_rl, return_type_expr);
const fn_type_inst = try self.addZIRInst(&fn_type_scope.base, fn_src, zir.Inst.FnType, .{
.return_type = return_type_inst,
.param_types = param_types,
@ -1266,7 +1299,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.kw_args = .{},
};
gen_scope.instructions.items[i] = &arg.base;
const sub_scope = try gen_scope_arena.allocator.create(Scope.LocalVar);
const sub_scope = try gen_scope_arena.allocator.create(Scope.LocalVal);
sub_scope.* = .{
.parent = params_scope,
.gen_zir = &gen_scope,
@ -1829,6 +1862,7 @@ fn resolveInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
return self.analyzeDeref(scope, old_inst.src, decl_ref, old_inst.src);
}
/// TODO split this into `requireRuntimeBlock` and `requireFunctionBlock` and audit callsites.
fn requireRuntimeBlock(self: *Module, scope: *Scope, src: usize) !*Scope.Block {
return scope.cast(Scope.Block) orelse
return self.fail(scope, src, "instruction illegal outside function body", .{});
@ -2098,12 +2132,7 @@ pub fn addZIRInstSpecial(
return inst;
}
pub fn addZIRNoOp(
self: *Module,
scope: *Scope,
src: usize,
tag: zir.Inst.Tag,
) !*zir.Inst {
pub fn addZIRNoOpT(self: *Module, scope: *Scope, src: usize, tag: zir.Inst.Tag) !*zir.Inst.NoOp {
const gen_zir = scope.getGenZIR();
try gen_zir.instructions.ensureCapacity(self.gpa, gen_zir.instructions.items.len + 1);
const inst = try gen_zir.arena.create(zir.Inst.NoOp);
@ -2116,6 +2145,11 @@ pub fn addZIRNoOp(
.kw_args = .{},
};
gen_zir.instructions.appendAssumeCapacity(&inst.base);
return inst;
}
pub fn addZIRNoOp(self: *Module, scope: *Scope, src: usize, tag: zir.Inst.Tag) !*zir.Inst {
const inst = try self.addZIRNoOpT(scope, src, tag);
return &inst.base;
}
@ -2320,24 +2354,36 @@ fn analyzeInstConst(self: *Module, scope: *Scope, const_inst: *zir.Inst.Const) I
fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*Inst {
switch (old_inst.tag) {
.alloc => return self.analyzeInstAlloc(scope, old_inst.castTag(.alloc).?),
.alloc_inferred => return self.analyzeInstAllocInferred(scope, old_inst.castTag(.alloc_inferred).?),
.arg => return self.analyzeInstArg(scope, old_inst.castTag(.arg).?),
.bitcast_result_ptr => return self.analyzeInstBitCastResultPtr(scope, old_inst.castTag(.bitcast_result_ptr).?),
.block => return self.analyzeInstBlock(scope, old_inst.castTag(.block).?),
.@"break" => return self.analyzeInstBreak(scope, old_inst.castTag(.@"break").?),
.breakpoint => return self.analyzeInstBreakpoint(scope, old_inst.castTag(.breakpoint).?),
.breakvoid => return self.analyzeInstBreakVoid(scope, old_inst.castTag(.breakvoid).?),
.call => return self.analyzeInstCall(scope, old_inst.castTag(.call).?),
.coerce_result_block_ptr => return self.analyzeInstCoerceResultBlockPtr(scope, old_inst.castTag(.coerce_result_block_ptr).?),
.coerce_result_ptr => return self.analyzeInstCoerceResultPtr(scope, old_inst.castTag(.coerce_result_ptr).?),
.coerce_to_ptr_elem => return self.analyzeInstCoerceToPtrElem(scope, old_inst.castTag(.coerce_to_ptr_elem).?),
.compileerror => return self.analyzeInstCompileError(scope, old_inst.castTag(.compileerror).?),
.@"const" => return self.analyzeInstConst(scope, old_inst.castTag(.@"const").?),
.declref => return self.analyzeInstDeclRef(scope, old_inst.castTag(.declref).?),
.declref_str => return self.analyzeInstDeclRefStr(scope, old_inst.castTag(.declref_str).?),
.declval => return self.analyzeInstDeclVal(scope, old_inst.castTag(.declval).?),
.declval_in_module => return self.analyzeInstDeclValInModule(scope, old_inst.castTag(.declval_in_module).?),
.ensure_result_used => return self.analyzeInstEnsureResultUsed(scope, old_inst.castTag(.ensure_result_used).?),
.ensure_result_non_error => return self.analyzeInstEnsureResultNonError(scope, old_inst.castTag(.ensure_result_non_error).?),
.ret_ptr => return self.analyzeInstRetPtr(scope, old_inst.castTag(.ret_ptr).?),
.ret_type => return self.analyzeInstRetType(scope, old_inst.castTag(.ret_type).?),
.store => return self.analyzeInstStore(scope, old_inst.castTag(.store).?),
.str => return self.analyzeInstStr(scope, old_inst.castTag(.str).?),
.int => {
const big_int = old_inst.castTag(.int).?.positionals.int;
return self.constIntBig(scope, old_inst.src, Type.initTag(.comptime_int), big_int);
},
.inttype => return self.analyzeInstIntType(scope, old_inst.castTag(.inttype).?),
.param_type => return self.analyzeInstParamType(scope, old_inst.castTag(.param_type).?),
.ptrtoint => return self.analyzeInstPtrToInt(scope, old_inst.castTag(.ptrtoint).?),
.fieldptr => return self.analyzeInstFieldPtr(scope, old_inst.castTag(.fieldptr).?),
.deref => return self.analyzeInstDeref(scope, old_inst.castTag(.deref).?),
@ -2369,6 +2415,94 @@ fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
}
}
fn analyzeInstCoerceResultBlockPtr(
self: *Module,
scope: *Scope,
inst: *zir.Inst.CoerceResultBlockPtr,
) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultBlockPtr", .{});
}
fn analyzeInstBitCastResultPtr(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstBitCastResultPtr", .{});
}
fn analyzeInstCoerceResultPtr(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultPtr", .{});
}
fn analyzeInstCoerceToPtrElem(self: *Module, scope: *Scope, inst: *zir.Inst.CoerceToPtrElem) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceToPtrElem", .{});
}
fn analyzeInstRetPtr(self: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstRetPtr", .{});
}
fn analyzeInstRetType(self: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, inst.base.src);
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const ret_type = fn_ty.fnReturnType();
return self.constType(scope, inst.base.src, ret_type);
}
fn analyzeInstEnsureResultUsed(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const operand = try self.resolveInst(scope, inst.positionals.operand);
switch (operand.ty.zigTypeTag()) {
.Void, .NoReturn => return self.constVoid(scope, operand.src),
else => return self.fail(scope, operand.src, "expression value is ignored", .{}),
}
}
fn analyzeInstEnsureResultNonError(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const operand = try self.resolveInst(scope, inst.positionals.operand);
switch (operand.ty.zigTypeTag()) {
.ErrorSet, .ErrorUnion => return self.fail(scope, operand.src, "error is discarded", .{}),
else => return self.constVoid(scope, operand.src),
}
}
fn analyzeInstAlloc(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstAlloc", .{});
}
fn analyzeInstAllocInferred(self: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstAllocInferred", .{});
}
fn analyzeInstStore(self: *Module, scope: *Scope, inst: *zir.Inst.Store) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstStore", .{});
}
fn analyzeInstParamType(self: *Module, scope: *Scope, inst: *zir.Inst.ParamType) InnerError!*Inst {
const fn_inst = try self.resolveInst(scope, inst.positionals.func);
const arg_index = inst.positionals.arg_index;
const fn_ty: Type = switch (fn_inst.ty.zigTypeTag()) {
.Fn => fn_inst.ty,
.BoundFn => {
return self.fail(scope, fn_inst.src, "TODO implement analyzeInstParamType for method call syntax", .{});
},
else => {
return self.fail(scope, fn_inst.src, "expected function, found '{}'", .{fn_inst.ty});
},
};
// TODO support C-style var args
const param_count = fn_ty.fnParamLen();
if (arg_index >= param_count) {
return self.fail(scope, inst.base.src, "arg index {} out of bounds; '{}' has {} arguments", .{
arg_index,
fn_ty,
param_count,
});
}
// TODO support generic functions
const param_type = fn_ty.fnParamType(arg_index);
return self.constType(scope, inst.base.src, param_type);
}
fn analyzeInstStr(self: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerError!*Inst {
// The bytes references memory inside the ZIR module, which can get deallocated
// after semantic analysis is complete. We need the memory to be in the new anonymous Decl's arena.
@ -2746,13 +2880,13 @@ fn analyzeInstPrimitive(self: *Module, scope: *Scope, primitive: *zir.Inst.Primi
return self.constInst(scope, primitive.base.src, primitive.positionals.tag.toTypedValue());
}
fn analyzeInstAs(self: *Module, scope: *Scope, as: *zir.Inst.As) InnerError!*Inst {
const dest_type = try self.resolveType(scope, as.positionals.dest_type);
const new_inst = try self.resolveInst(scope, as.positionals.value);
fn analyzeInstAs(self: *Module, scope: *Scope, as: *zir.Inst.BinOp) InnerError!*Inst {
const dest_type = try self.resolveType(scope, as.positionals.lhs);
const new_inst = try self.resolveInst(scope, as.positionals.rhs);
return self.coerce(scope, dest_type, new_inst);
}
fn analyzeInstPtrToInt(self: *Module, scope: *Scope, ptrtoint: *zir.Inst.PtrToInt) InnerError!*Inst {
fn analyzeInstPtrToInt(self: *Module, scope: *Scope, ptrtoint: *zir.Inst.UnOp) InnerError!*Inst {
const ptr = try self.resolveInst(scope, ptrtoint.positionals.operand);
if (ptr.ty.zigTypeTag() != .Pointer) {
return self.fail(scope, ptrtoint.positionals.operand.src, "expected pointer, found '{}'", .{ptr.ty});
@ -2797,16 +2931,16 @@ fn analyzeInstFieldPtr(self: *Module, scope: *Scope, fieldptr: *zir.Inst.FieldPt
}
}
fn analyzeInstIntCast(self: *Module, scope: *Scope, inst: *zir.Inst.IntCast) InnerError!*Inst {
const dest_type = try self.resolveType(scope, inst.positionals.dest_type);
const operand = try self.resolveInst(scope, inst.positionals.operand);
fn analyzeInstIntCast(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const dest_type = try self.resolveType(scope, inst.positionals.lhs);
const operand = try self.resolveInst(scope, inst.positionals.rhs);
const dest_is_comptime_int = switch (dest_type.zigTypeTag()) {
.ComptimeInt => true,
.Int => false,
else => return self.fail(
scope,
inst.positionals.dest_type.src,
inst.positionals.lhs.src,
"expected integer type, found '{}'",
.{
dest_type,
@ -2818,7 +2952,7 @@ fn analyzeInstIntCast(self: *Module, scope: *Scope, inst: *zir.Inst.IntCast) Inn
.ComptimeInt, .Int => {},
else => return self.fail(
scope,
inst.positionals.operand.src,
inst.positionals.rhs.src,
"expected integer type, found '{}'",
.{operand.ty},
),
@ -2833,22 +2967,22 @@ fn analyzeInstIntCast(self: *Module, scope: *Scope, inst: *zir.Inst.IntCast) Inn
return self.fail(scope, inst.base.src, "TODO implement analyze widen or shorten int", .{});
}
fn analyzeInstBitCast(self: *Module, scope: *Scope, inst: *zir.Inst.BitCast) InnerError!*Inst {
const dest_type = try self.resolveType(scope, inst.positionals.dest_type);
const operand = try self.resolveInst(scope, inst.positionals.operand);
fn analyzeInstBitCast(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const dest_type = try self.resolveType(scope, inst.positionals.lhs);
const operand = try self.resolveInst(scope, inst.positionals.rhs);
return self.bitcast(scope, dest_type, operand);
}
fn analyzeInstFloatCast(self: *Module, scope: *Scope, inst: *zir.Inst.FloatCast) InnerError!*Inst {
const dest_type = try self.resolveType(scope, inst.positionals.dest_type);
const operand = try self.resolveInst(scope, inst.positionals.operand);
fn analyzeInstFloatCast(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const dest_type = try self.resolveType(scope, inst.positionals.lhs);
const operand = try self.resolveInst(scope, inst.positionals.rhs);
const dest_is_comptime_float = switch (dest_type.zigTypeTag()) {
.ComptimeFloat => true,
.Float => false,
else => return self.fail(
scope,
inst.positionals.dest_type.src,
inst.positionals.lhs.src,
"expected float type, found '{}'",
.{
dest_type,
@ -2860,7 +2994,7 @@ fn analyzeInstFloatCast(self: *Module, scope: *Scope, inst: *zir.Inst.FloatCast)
.ComptimeFloat, .Float, .ComptimeInt => {},
else => return self.fail(
scope,
inst.positionals.operand.src,
inst.positionals.rhs.src,
"expected float type, found '{}'",
.{operand.ty},
),
@ -3560,8 +3694,14 @@ fn failWithOwnedErrorMsg(self: *Module, scope: *Scope, src: usize, err_msg: *Err
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
.local_var => {
const gen_zir = scope.cast(Scope.LocalVar).?.gen_zir;
.local_val => {
const gen_zir = scope.cast(Scope.LocalVal).?.gen_zir;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);
},
.local_ptr => {
const gen_zir = scope.cast(Scope.LocalPtr).?.gen_zir;
gen_zir.decl.analysis = .sema_failure;
gen_zir.decl.generation = self.generation;
self.failed_decls.putAssumeCapacityNoClobber(gen_zir.decl, err_msg);

View File

@ -1,5 +1,6 @@
const std = @import("std");
const mem = std.mem;
const Allocator = std.mem.Allocator;
const Value = @import("value.zig").Value;
const Type = @import("type.zig").Type;
const TypedValue = @import("TypedValue.zig");
@ -11,35 +12,67 @@ const trace = @import("tracy.zig").trace;
const Scope = Module.Scope;
const InnerError = Module.InnerError;
pub const ResultLoc = union(enum) {
/// The expression is the right-hand side of assignment to `_`.
discard,
/// The expression has an inferred type, and it will be evaluated as an rvalue.
none,
/// The expression will be type coerced into this type, but it will be evaluated as an rvalue.
ty: *zir.Inst,
/// The expression must store its result into this typed pointer.
ptr: *zir.Inst,
/// The expression must store its result into this allocation, which has an inferred type.
inferred_ptr: *zir.Inst.Tag.alloc_inferred.Type(),
/// The expression must store its result into this pointer, which is a typed pointer that
/// has been bitcasted to whatever the expression's type is.
bitcasted_ptr: *zir.Inst.UnOp,
/// There is a pointer for the expression to store its result into, however, its type
/// is inferred based on peer type resolution for a `zir.Inst.Block`.
block_ptr: *zir.Inst.Block,
};
pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*zir.Inst {
const type_src = scope.tree().token_locs[type_node.firstToken()].start;
const type_type = try mod.addZIRInstConst(scope, type_src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.type_type),
});
const type_rl: ResultLoc = .{ .ty = type_type };
return expr(mod, scope, type_rl, type_node);
}
/// Turn Zig AST into untyped ZIR istructions.
pub fn expr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst {
switch (node.tag) {
.VarDecl => unreachable, // Handled in `blockExpr`.
.Assign => unreachable, // Handled in `blockExpr`.
.Add => return simpleInfixOp(mod, scope, node.castTag(.Add).?, .add),
.Sub => return simpleInfixOp(mod, scope, node.castTag(.Sub).?, .sub),
.BangEqual => return simpleInfixOp(mod, scope, node.castTag(.BangEqual).?, .cmp_neq),
.EqualEqual => return simpleInfixOp(mod, scope, node.castTag(.EqualEqual).?, .cmp_eq),
.GreaterThan => return simpleInfixOp(mod, scope, node.castTag(.GreaterThan).?, .cmp_gt),
.GreaterOrEqual => return simpleInfixOp(mod, scope, node.castTag(.GreaterOrEqual).?, .cmp_gte),
.LessThan => return simpleInfixOp(mod, scope, node.castTag(.LessThan).?, .cmp_lt),
.LessOrEqual => return simpleInfixOp(mod, scope, node.castTag(.LessOrEqual).?, .cmp_lte),
.Add => return arithmetic(mod, scope, rl, node.castTag(.Add).?, .add),
.Sub => return arithmetic(mod, scope, rl, node.castTag(.Sub).?, .sub),
.Identifier => return identifier(mod, scope, node.castTag(.Identifier).?),
.Asm => return assembly(mod, scope, node.castTag(.Asm).?),
.StringLiteral => return stringLiteral(mod, scope, node.castTag(.StringLiteral).?),
.IntegerLiteral => return integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?),
.BuiltinCall => return builtinCall(mod, scope, node.castTag(.BuiltinCall).?),
.Call => return callExpr(mod, scope, node.castTag(.Call).?),
.BangEqual => return cmp(mod, scope, rl, node.castTag(.BangEqual).?, .cmp_neq),
.EqualEqual => return cmp(mod, scope, rl, node.castTag(.EqualEqual).?, .cmp_eq),
.GreaterThan => return cmp(mod, scope, rl, node.castTag(.GreaterThan).?, .cmp_gt),
.GreaterOrEqual => return cmp(mod, scope, rl, node.castTag(.GreaterOrEqual).?, .cmp_gte),
.LessThan => return cmp(mod, scope, rl, node.castTag(.LessThan).?, .cmp_lt),
.LessOrEqual => return cmp(mod, scope, rl, node.castTag(.LessOrEqual).?, .cmp_lte),
.Identifier => return rlWrap(mod, scope, rl, try identifier(mod, scope, node.castTag(.Identifier).?)),
.Asm => return rlWrap(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)),
.StringLiteral => return rlWrap(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)),
.IntegerLiteral => return rlWrap(mod, scope, rl, try integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?)),
.BuiltinCall => return builtinCall(mod, scope, rl, node.castTag(.BuiltinCall).?),
.Call => return callExpr(mod, scope, rl, node.castTag(.Call).?),
.Unreachable => return unreach(mod, scope, node.castTag(.Unreachable).?),
.ControlFlowExpression => return controlFlowExpr(mod, scope, node.castTag(.ControlFlowExpression).?),
.If => return ifExpr(mod, scope, node.castTag(.If).?),
.Assign => return assign(mod, scope, node.castTag(.Assign).?),
.Period => return field(mod, scope, node.castTag(.Period).?),
.Deref => return deref(mod, scope, node.castTag(.Deref).?),
.BoolNot => return boolNot(mod, scope, node.castTag(.BoolNot).?),
.FloatLiteral => return floatLiteral(mod, scope, node.castTag(.FloatLiteral).?),
.UndefinedLiteral, .BoolLiteral, .NullLiteral => return primitiveLiteral(mod, scope, node),
.Return => return ret(mod, scope, node.castTag(.Return).?),
.If => return ifExpr(mod, scope, rl, node.castTag(.If).?),
.Period => return rlWrap(mod, scope, rl, try field(mod, scope, node.castTag(.Period).?)),
.Deref => return rlWrap(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)),
.BoolNot => return rlWrap(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)),
.FloatLiteral => return rlWrap(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)),
.UndefinedLiteral => return rlWrap(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
.BoolLiteral => return rlWrap(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
.NullLiteral => return rlWrap(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)),
else => return mod.failNode(scope, node, "TODO implement astgen.Expr for {}", .{@tagName(node.tag)}),
}
}
@ -59,17 +92,28 @@ pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.Block
for (block_node.statements()) |statement| {
switch (statement.tag) {
.VarDecl => {
const sub_scope = try block_arena.allocator.create(Scope.LocalVar);
const var_decl_node = @fieldParentPtr(ast.Node.VarDecl, "base", statement);
sub_scope.* = try varDecl(mod, scope, var_decl_node);
scope = &sub_scope.base;
const var_decl_node = statement.castTag(.VarDecl).?;
scope = try varDecl(mod, scope, var_decl_node, &block_arena.allocator);
},
.Assign => {
const ass = statement.castTag(.Assign).?;
try assign(mod, scope, ass);
},
else => {
const possibly_unused_result = try expr(mod, scope, .none, statement);
const src = scope.tree().token_locs[statement.firstToken()].start;
_ = try mod.addZIRUnOp(scope, src, .ensure_result_used, possibly_unused_result);
},
else => _ = try expr(mod, scope, statement),
}
}
}
fn varDecl(mod: *Module, scope: *Scope, node: *ast.Node.VarDecl) InnerError!Scope.LocalVar {
fn varDecl(
mod: *Module,
scope: *Scope,
node: *ast.Node.VarDecl,
block_arena: *Allocator,
) InnerError!*Scope {
// TODO implement detection of shadowing
if (node.getTrailer("comptime_token")) |comptime_token| {
return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{});
@ -78,48 +122,96 @@ fn varDecl(mod: *Module, scope: *Scope, node: *ast.Node.VarDecl) InnerError!Scop
return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{});
}
const tree = scope.tree();
const name_src = tree.token_locs[node.name_token].start;
const ident_name = try identifierTokenString(mod, scope, node.name_token);
const init_node = node.getTrailer("init_node").?;
switch (tree.token_ids[node.mut_token]) {
.Keyword_const => {
if (node.getTrailer("type_node")) |type_node| {
return mod.failNode(scope, type_node, "TODO implement typed const locals", .{});
}
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
const init_node = node.getTrailer("init_node").?;
if (nodeMayNeedMemoryLocation(init_node)) {
return mod.failNode(scope, init_node, "TODO implement result locations", .{});
if (node.getTrailer("type_node")) |type_node| {
const type_inst = try typeExpr(mod, scope, type_node);
const alloc = try mod.addZIRUnOp(scope, name_src, .alloc, type_inst);
const result_loc: ResultLoc = .{ .ptr = alloc };
const init_inst = try expr(mod, scope, result_loc, init_node);
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.inst = init_inst,
};
return &sub_scope.base;
} else {
const alloc = try mod.addZIRNoOpT(scope, name_src, .alloc_inferred);
const result_loc: ResultLoc = .{ .inferred_ptr = alloc };
const init_inst = try expr(mod, scope, result_loc, init_node);
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.inst = init_inst,
};
return &sub_scope.base;
}
} else {
const result_loc: ResultLoc = if (node.getTrailer("type_node")) |type_node|
.{ .ty = try typeExpr(mod, scope, type_node) }
else
.none;
const init_inst = try expr(mod, scope, result_loc, init_node);
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.inst = init_inst,
};
return &sub_scope.base;
}
const init_inst = try expr(mod, scope, init_node);
const ident_name = try identifierTokenString(mod, scope, node.name_token);
return Scope.LocalVar{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.inst = init_inst,
};
},
.Keyword_var => {
return mod.failNode(scope, &node.base, "TODO implement local vars", .{});
if (node.getTrailer("type_node")) |type_node| {
const type_inst = try typeExpr(mod, scope, type_node);
const alloc = try mod.addZIRUnOp(scope, name_src, .alloc, type_inst);
const result_loc: ResultLoc = .{ .ptr = alloc };
const init_inst = try expr(mod, scope, result_loc, init_node);
const sub_scope = try block_arena.create(Scope.LocalPtr);
sub_scope.* = .{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.ptr = alloc,
};
return &sub_scope.base;
} else {
const alloc = try mod.addZIRNoOp(scope, name_src, .alloc_inferred);
const result_loc = .{ .inferred_ptr = alloc.castTag(.alloc_inferred).? };
const init_inst = try expr(mod, scope, result_loc, init_node);
const sub_scope = try block_arena.create(Scope.LocalPtr);
sub_scope.* = .{
.parent = scope,
.gen_zir = scope.getGenZIR(),
.name = ident_name,
.ptr = alloc,
};
return &sub_scope.base;
}
},
else => unreachable,
}
}
fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
const operand = try expr(mod, scope, node.rhs);
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
return mod.addZIRUnOp(scope, src, .boolnot, operand);
}
fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
if (infix_node.lhs.tag == .Identifier) {
const ident = @fieldParentPtr(ast.Node.Identifier, "base", infix_node.lhs);
fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!void {
if (infix_node.lhs.castTag(.Identifier)) |ident| {
const tree = scope.tree();
const ident_name = try identifierTokenString(mod, scope, ident.token);
if (std.mem.eql(u8, ident_name, "_")) {
return expr(mod, scope, infix_node.rhs);
_ = try expr(mod, scope, .discard, infix_node.rhs);
return;
} else {
return mod.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
}
@ -128,6 +220,17 @@ fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) Inne
}
}
fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
const bool_type = try mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.bool_type),
});
const operand = try expr(mod, scope, .{ .ty = bool_type }, node.rhs);
return mod.addZIRUnOp(scope, src, .boolnot, operand);
}
/// Identifier token -> String (allocated in scope.arena())
pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
const tree = scope.tree();
@ -148,7 +251,7 @@ pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex)
return ident_name;
}
pub fn identifierStringInst(mod: *Module, scope: *Scope, node: *ast.Node.Identifier) InnerError!*zir.Inst {
pub fn identifierStringInst(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.token].start;
@ -158,10 +261,11 @@ pub fn identifierStringInst(mod: *Module, scope: *Scope, node: *ast.Node.Identif
}
fn field(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
// TODO introduce lvalues
const tree = scope.tree();
const src = tree.token_locs[node.op_token].start;
const lhs = try expr(mod, scope, node.lhs);
const lhs = try expr(mod, scope, .none, node.lhs);
const field_name = try identifierStringInst(mod, scope, node.rhs.castTag(.Identifier).?);
const pointer = try mod.addZIRInst(scope, src, zir.Inst.FieldPtr, .{ .object_ptr = lhs, .field_name = field_name }, .{});
@ -171,26 +275,44 @@ fn field(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp) InnerError!
fn deref(mod: *Module, scope: *Scope, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[node.rtoken].start;
const lhs = try expr(mod, scope, node.lhs);
const lhs = try expr(mod, scope, .none, node.lhs);
return mod.addZIRUnOp(scope, src, .deref, lhs);
}
fn simpleInfixOp(
fn cmp(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
infix_node: *ast.Node.SimpleInfixOp,
cmp_inst_tag: zir.Inst.Tag,
) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[infix_node.op_token].start;
const lhs = try expr(mod, scope, .none, infix_node.lhs);
const rhs = try expr(mod, scope, .none, infix_node.rhs);
const result = try mod.addZIRBinOp(scope, src, cmp_inst_tag, lhs, rhs);
return rlWrap(mod, scope, rl, result);
}
fn arithmetic(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
infix_node: *ast.Node.SimpleInfixOp,
op_inst_tag: zir.Inst.Tag,
) InnerError!*zir.Inst {
const lhs = try expr(mod, scope, infix_node.lhs);
const rhs = try expr(mod, scope, infix_node.rhs);
const lhs = try expr(mod, scope, .none, infix_node.lhs);
const rhs = try expr(mod, scope, .none, infix_node.rhs);
const tree = scope.tree();
const src = tree.token_locs[infix_node.op_token].start;
return mod.addZIRBinOp(scope, src, op_inst_tag, lhs, rhs);
const result = try mod.addZIRBinOp(scope, src, op_inst_tag, lhs, rhs);
return rlWrap(mod, scope, rl, result);
}
fn ifExpr(mod: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.Inst {
fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) InnerError!*zir.Inst {
if (if_node.payload) |payload| {
return mod.failNode(scope, payload, "TODO implement astgen.IfExpr for optionals", .{});
}
@ -207,10 +329,14 @@ fn ifExpr(mod: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.In
};
defer block_scope.instructions.deinit(mod.gpa);
const cond = try expr(mod, &block_scope.base, if_node.condition);
const tree = scope.tree();
const if_src = tree.token_locs[if_node.if_token].start;
const bool_type = try mod.addZIRInstConst(scope, if_src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.bool_type),
});
const cond = try expr(mod, &block_scope.base, .{ .ty = bool_type }, if_node.condition);
const condbr = try mod.addZIRInstSpecial(&block_scope.base, if_src, zir.Inst.CondBr, .{
.condition = cond,
.then_body = undefined, // populated below
@ -228,7 +354,16 @@ fn ifExpr(mod: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.In
};
defer then_scope.instructions.deinit(mod.gpa);
const then_result = try expr(mod, &then_scope.base, if_node.body);
// Most result location types can be forwarded directly; however
// if we need to write to a pointer which has an inferred type,
// proper type inference requires peer type resolution on the if's
// branches.
const branch_rl: ResultLoc = switch (rl) {
.discard, .none, .ty, .ptr => rl,
.inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
};
const then_result = try expr(mod, &then_scope.base, branch_rl, if_node.body);
if (!then_result.tag.isNoReturn()) {
const then_src = tree.token_locs[if_node.body.lastToken()].start;
_ = try mod.addZIRInst(&then_scope.base, then_src, zir.Inst.Break, .{
@ -249,7 +384,7 @@ fn ifExpr(mod: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.In
defer else_scope.instructions.deinit(mod.gpa);
if (if_node.@"else") |else_node| {
const else_result = try expr(mod, &else_scope.base, else_node.body);
const else_result = try expr(mod, &else_scope.base, branch_rl, else_node.body);
if (!else_result.tag.isNoReturn()) {
const else_src = tree.token_locs[else_node.body.lastToken()].start;
_ = try mod.addZIRInst(&else_scope.base, else_src, zir.Inst.Break, .{
@ -272,27 +407,25 @@ fn ifExpr(mod: *Module, scope: *Scope, if_node: *ast.Node.If) InnerError!*zir.In
return &block.base;
}
fn controlFlowExpr(
mod: *Module,
scope: *Scope,
cfe: *ast.Node.ControlFlowExpression,
) InnerError!*zir.Inst {
switch (cfe.kind) {
.Break => return mod.failNode(scope, &cfe.base, "TODO implement astgen.Expr for Break", .{}),
.Continue => return mod.failNode(scope, &cfe.base, "TODO implement astgen.Expr for Continue", .{}),
.Return => {},
}
fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[cfe.ltoken].start;
if (cfe.rhs) |rhs_node| {
const operand = try expr(mod, scope, rhs_node);
return mod.addZIRUnOp(scope, src, .@"return", operand);
if (cfe.getRHS()) |rhs_node| {
if (nodeMayNeedMemoryLocation(rhs_node)) {
const ret_ptr = try mod.addZIRNoOp(scope, src, .ret_ptr);
const operand = try expr(mod, scope, .{ .ptr = ret_ptr }, rhs_node);
return mod.addZIRUnOp(scope, src, .@"return", operand);
} else {
const fn_ret_ty = try mod.addZIRNoOp(scope, src, .ret_type);
const operand = try expr(mod, scope, .{ .ty = fn_ret_ty }, rhs_node);
return mod.addZIRUnOp(scope, src, .@"return", operand);
}
} else {
return mod.addZIRNoOp(scope, src, .returnvoid);
}
}
fn identifier(mod: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerError!*zir.Inst {
fn identifier(mod: *Module, scope: *Scope, ident: *ast.Node.OneToken) InnerError!*zir.Inst {
const tracy = trace(@src());
defer tracy.end();
@ -345,12 +478,19 @@ fn identifier(mod: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerErr
{
var s = scope;
while (true) switch (s.tag) {
.local_var => {
const local_var = s.cast(Scope.LocalVar).?;
if (mem.eql(u8, local_var.name, ident_name)) {
return local_var.inst;
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
if (mem.eql(u8, local_val.name, ident_name)) {
return local_val.inst;
}
s = local_var.parent;
s = local_val.parent;
},
.local_ptr => {
const local_ptr = s.cast(Scope.LocalPtr).?;
if (mem.eql(u8, local_ptr.name, ident_name)) {
return try mod.addZIRUnOp(scope, src, .deref, local_ptr.ptr);
}
s = local_ptr.parent;
},
.gen_zir => s = s.cast(Scope.GenZIR).?.parent,
else => break,
@ -364,7 +504,7 @@ fn identifier(mod: *Module, scope: *Scope, ident: *ast.Node.Identifier) InnerErr
return mod.failNode(scope, &ident.base, "use of undeclared identifier '{}'", .{ident_name});
}
fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.StringLiteral) InnerError!*zir.Inst {
fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.OneToken) InnerError!*zir.Inst {
const tree = scope.tree();
const unparsed_bytes = tree.tokenSlice(str_lit.token);
const arena = scope.arena();
@ -383,7 +523,7 @@ fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.StringLiteral)
return mod.addZIRInst(scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
}
fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.IntegerLiteral) InnerError!*zir.Inst {
fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
const prefixed_bytes = tree.tokenSlice(int_lit.token);
@ -414,7 +554,7 @@ fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.IntegerLiteral
}
}
fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.FloatLiteral) InnerError!*zir.Inst {
fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.OneToken) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
const bytes = tree.tokenSlice(float_lit.token);
@ -434,30 +574,38 @@ fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.FloatLiteral)
});
}
fn primitiveLiteral(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
fn undefLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
const src = tree.token_locs[node.firstToken()].start;
const src = tree.token_locs[node.token].start;
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.@"undefined"),
.val = Value.initTag(.undef),
});
}
if (node.cast(ast.Node.BoolLiteral)) |bool_node| {
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.bool),
.val = if (tree.token_ids[bool_node.token] == .Keyword_true)
Value.initTag(.bool_true)
else
Value.initTag(.bool_false),
});
} else if (node.tag == .UndefinedLiteral) {
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.@"undefined"),
.val = Value.initTag(.undef),
});
} else if (node.tag == .NullLiteral) {
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.@"null"),
.val = Value.initTag(.null_value),
});
} else unreachable;
fn boolLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
const src = tree.token_locs[node.token].start;
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.bool),
.val = switch (tree.token_ids[node.token]) {
.Keyword_true => Value.initTag(.bool_true),
.Keyword_false => Value.initTag(.bool_false),
else => unreachable,
},
});
}
fn nullLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
const src = tree.token_locs[node.token].start;
return mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.@"null"),
.val = Value.initTag(.null_value),
});
}
fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zir.Inst {
@ -470,19 +618,26 @@ fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zi
const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len);
const args = try arena.alloc(*zir.Inst, asm_node.inputs.len);
const src = tree.token_locs[asm_node.asm_token].start;
const str_type = try mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.const_slice_u8_type),
});
const str_type_rl: ResultLoc = .{ .ty = str_type };
for (asm_node.inputs) |input, i| {
// TODO semantically analyze constraints
inputs[i] = try expr(mod, scope, input.constraint);
args[i] = try expr(mod, scope, input.expr);
inputs[i] = try expr(mod, scope, str_type_rl, input.constraint);
args[i] = try expr(mod, scope, .none, input.expr);
}
const src = tree.token_locs[asm_node.asm_token].start;
const return_type = try mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.void_type),
});
const asm_inst = try mod.addZIRInst(scope, src, zir.Inst.Asm, .{
.asm_source = try expr(mod, scope, asm_node.template),
.asm_source = try expr(mod, scope, str_type_rl, asm_node.template),
.return_type = return_type,
}, .{
.@"volatile" = asm_node.volatile_token != null,
@ -493,63 +648,174 @@ fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zi
return asm_inst;
}
fn builtinCall(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
const tree = scope.tree();
const builtin_name = tree.tokenSlice(call.builtin_token);
const src = tree.token_locs[call.builtin_token].start;
fn ensureBuiltinParamCount(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall, count: u32) !void {
if (call.params_len == count)
return;
inline for (std.meta.declarations(zir.Inst)) |inst| {
if (inst.data != .Type) continue;
const T = inst.data.Type;
if (!@hasDecl(T, "builtin_name")) continue;
if (std.mem.eql(u8, builtin_name, T.builtin_name)) {
var value: T = undefined;
const positionals = @typeInfo(std.meta.fieldInfo(T, "positionals").field_type).Struct;
if (positionals.fields.len == 0) {
return mod.addZIRInst(scope, src, T, value.positionals, value.kw_args);
}
const arg_count: ?usize = if (positionals.fields[0].field_type == []*zir.Inst) null else positionals.fields.len;
if (arg_count) |some| {
if (call.params_len != some) {
return mod.failTok(
scope,
call.builtin_token,
"expected {} parameter{}, found {}",
.{ some, if (some == 1) "" else "s", call.params_len },
);
}
const params = call.params();
inline for (positionals.fields) |p, i| {
@field(value.positionals, p.name) = try expr(mod, scope, params[i]);
}
} else {
return mod.failTok(scope, call.builtin_token, "TODO var args builtin '{}'", .{builtin_name});
}
return mod.addZIRInst(scope, src, T, value.positionals, .{});
}
}
return mod.failTok(scope, call.builtin_token, "TODO implement builtin call for '{}'", .{builtin_name});
const s = if (count == 1) "" else "s";
return mod.failTok(scope, call.builtin_token, "expected {} parameter{}, found {}", .{ count, s, call.params_len });
}
fn callExpr(mod: *Module, scope: *Scope, node: *ast.Node.Call) InnerError!*zir.Inst {
fn simpleCast(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
call: *ast.Node.BuiltinCall,
inst_tag: zir.Inst.Tag,
) InnerError!*zir.Inst {
try ensureBuiltinParamCount(mod, scope, call, 2);
const tree = scope.tree();
const lhs = try expr(mod, scope, node.lhs);
const src = tree.token_locs[call.builtin_token].start;
const type_type = try mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.type_type),
});
const params = call.params();
const dest_type = try expr(mod, scope, .{ .ty = type_type }, params[0]);
const rhs = try expr(mod, scope, .none, params[1]);
const result = try mod.addZIRBinOp(scope, src, inst_tag, dest_type, rhs);
return rlWrap(mod, scope, rl, result);
}
fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
try ensureBuiltinParamCount(mod, scope, call, 1);
const operand = try expr(mod, scope, .none, call.params()[0]);
const tree = scope.tree();
const src = tree.token_locs[call.builtin_token].start;
return mod.addZIRUnOp(scope, src, .ptrtoint, operand);
}
fn as(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
try ensureBuiltinParamCount(mod, scope, call, 2);
const tree = scope.tree();
const src = tree.token_locs[call.builtin_token].start;
const params = call.params();
const dest_type = try typeExpr(mod, scope, params[0]);
switch (rl) {
.none => return try expr(mod, scope, .{ .ty = dest_type }, params[1]),
.discard => {
const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
_ = try mod.addZIRUnOp(scope, result.src, .ensure_result_non_error, result);
return result;
},
.ty => |result_ty| {
const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
return mod.addZIRBinOp(scope, src, .as, result_ty, result);
},
.ptr => |result_ptr| {
const casted_result_ptr = try mod.addZIRBinOp(scope, src, .coerce_result_ptr, dest_type, result_ptr);
return expr(mod, scope, .{ .ptr = casted_result_ptr }, params[1]);
},
.bitcasted_ptr => |bitcasted_ptr| {
// TODO here we should be able to resolve the inference; we now have a type for the result.
return mod.failTok(scope, call.builtin_token, "TODO implement @as with result location @bitCast", .{});
},
.inferred_ptr => |result_alloc| {
// TODO here we should be able to resolve the inference; we now have a type for the result.
return mod.failTok(scope, call.builtin_token, "TODO implement @as with inferred-type result location pointer", .{});
},
.block_ptr => |block_ptr| {
const casted_block_ptr = try mod.addZIRInst(scope, src, zir.Inst.CoerceResultBlockPtr, .{
.dest_type = dest_type,
.block = block_ptr,
}, .{});
return expr(mod, scope, .{ .ptr = casted_block_ptr }, params[1]);
},
}
}
fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
try ensureBuiltinParamCount(mod, scope, call, 2);
const tree = scope.tree();
const src = tree.token_locs[call.builtin_token].start;
const type_type = try mod.addZIRInstConst(scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.type_type),
});
const params = call.params();
const dest_type = try expr(mod, scope, .{ .ty = type_type }, params[0]);
switch (rl) {
.none => {
const operand = try expr(mod, scope, .none, params[1]);
return mod.addZIRBinOp(scope, src, .bitcast, dest_type, operand);
},
.discard => {
const operand = try expr(mod, scope, .none, params[1]);
const result = try mod.addZIRBinOp(scope, src, .bitcast, dest_type, operand);
_ = try mod.addZIRUnOp(scope, result.src, .ensure_result_non_error, result);
return result;
},
.ty => |result_ty| {
const result = try expr(mod, scope, .none, params[1]);
const bitcasted = try mod.addZIRBinOp(scope, src, .bitcast, dest_type, result);
return mod.addZIRBinOp(scope, src, .as, result_ty, bitcasted);
},
.ptr => |result_ptr| {
const casted_result_ptr = try mod.addZIRUnOp(scope, src, .bitcast_result_ptr, result_ptr);
return expr(mod, scope, .{ .bitcasted_ptr = casted_result_ptr.castTag(.bitcast_result_ptr).? }, params[1]);
},
.bitcasted_ptr => |bitcasted_ptr| {
return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with result location another @bitCast", .{});
},
.block_ptr => |block_ptr| {
return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with result location inferred peer types", .{});
},
.inferred_ptr => |result_alloc| {
// TODO here we should be able to resolve the inference; we now have a type for the result.
return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with inferred-type result location pointer", .{});
},
}
}
fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
const tree = scope.tree();
const builtin_name = tree.tokenSlice(call.builtin_token);
// We handle the different builtins manually because they have different semantics depending
// on the function. For example, `@as` and others participate in result location semantics,
// and `@cImport` creates a special scope that collects a .c source code text buffer.
// Also, some builtins have a variable number of parameters.
if (mem.eql(u8, builtin_name, "@ptrToInt")) {
return rlWrap(mod, scope, rl, try ptrToInt(mod, scope, call));
} else if (mem.eql(u8, builtin_name, "@as")) {
return as(mod, scope, rl, call);
} else if (mem.eql(u8, builtin_name, "@floatCast")) {
return simpleCast(mod, scope, rl, call, .floatcast);
} else if (mem.eql(u8, builtin_name, "@intCast")) {
return simpleCast(mod, scope, rl, call, .intcast);
} else if (mem.eql(u8, builtin_name, "@bitCast")) {
return bitCast(mod, scope, rl, call);
} else {
return mod.failTok(scope, call.builtin_token, "invalid builtin function: '{}'", .{builtin_name});
}
}
fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) InnerError!*zir.Inst {
const tree = scope.tree();
const lhs = try expr(mod, scope, .none, node.lhs);
const param_nodes = node.params();
const args = try scope.getGenZIR().arena.alloc(*zir.Inst, param_nodes.len);
for (param_nodes) |param_node, i| {
args[i] = try expr(mod, scope, param_node);
const param_src = tree.token_locs[param_node.firstToken()].start;
const param_type = try mod.addZIRInst(scope, param_src, zir.Inst.ParamType, .{
.func = lhs,
.arg_index = i,
}, .{});
args[i] = try expr(mod, scope, .{ .ty = param_type }, param_node);
}
const src = tree.token_locs[node.lhs.firstToken()].start;
return mod.addZIRInst(scope, src, zir.Inst.Call, .{
const result = try mod.addZIRInst(scope, src, zir.Inst.Call, .{
.func = lhs,
.args = args,
}, .{});
// TODO function call with result location
return rlWrap(mod, scope, rl, result);
}
fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.Unreachable) InnerError!*zir.Inst {
fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.OneToken) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[unreach_node.token].start;
return mod.addZIRNoOp(scope, src, .@"unreachable");
@ -611,7 +877,9 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node) bool {
.FieldInitializer,
=> unreachable,
.ControlFlowExpression,
.Return,
.Break,
.Continue,
.BitNot,
.BoolNot,
.VarDecl,
@ -722,3 +990,39 @@ fn nodeMayNeedMemoryLocation(start_node: *ast.Node) bool {
}
}
}
/// Applies `rl` semantics to `inst`. Expressions which do not do their own handling of
/// result locations must call this function on their result.
/// As an example, if the `ResultLoc` is `ptr`, it will write the result to the pointer.
/// If the `ResultLoc` is `ty`, it will coerce the result to the type.
fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerError!*zir.Inst {
switch (rl) {
.none => return result,
.discard => {
// Emit a compile error for discarding error values.
_ = try mod.addZIRUnOp(scope, result.src, .ensure_result_non_error, result);
return result;
},
.ty => |ty_inst| return mod.addZIRBinOp(scope, result.src, .as, ty_inst, result),
.ptr => |ptr_inst| {
const casted_result = try mod.addZIRInst(scope, result.src, zir.Inst.CoerceToPtrElem, .{
.ptr = ptr_inst,
.value = result,
}, .{});
_ = try mod.addZIRInst(scope, result.src, zir.Inst.Store, .{
.ptr = ptr_inst,
.value = casted_result,
}, .{});
return casted_result;
},
.bitcasted_ptr => |bitcasted_ptr| {
return mod.fail(scope, result.src, "TODO implement rlWrap .bitcasted_ptr", .{});
},
.inferred_ptr => |alloc| {
return mod.fail(scope, result.src, "TODO implement rlWrap .inferred_ptr", .{});
},
.block_ptr => |block_ptr| {
return mod.fail(scope, result.src, "TODO implement rlWrap .block_ptr", .{});
},
}
}

View File

@ -90,7 +90,7 @@ fn genFn(file: *C, decl: *Decl) !void {
const instructions = func.analysis.success.instructions;
if (instructions.len > 0) {
for (instructions) |inst| {
try writer.writeAll("\n\t");
try writer.writeAll("\n ");
switch (inst.tag) {
.assembly => try genAsm(file, inst.castTag(.assembly).?, decl),
.call => try genCall(file, inst.castTag(.call).?, decl),
@ -106,21 +106,7 @@ fn genFn(file: *C, decl: *Decl) !void {
}
fn genRet(file: *C, inst: *Inst.UnOp, decl: *Decl, expected_return_type: Type) !void {
const writer = file.main.writer();
const ret_value = inst.operand;
const value = ret_value.value().?;
if (expected_return_type.eql(ret_value.ty))
return file.fail(decl.src(), "TODO return {}", .{expected_return_type})
else if (expected_return_type.isInt() and ret_value.ty.tag() == .comptime_int)
if (value.intFitsInType(expected_return_type, file.options.target))
if (expected_return_type.intInfo(file.options.target).bits <= 64)
try writer.print("return {};", .{value.toUnsignedInt()})
else
return file.fail(decl.src(), "TODO return ints > 64 bits", .{})
else
return file.fail(decl.src(), "comptime int {} does not fit in {}", .{ value.toUnsignedInt(), expected_return_type })
else
return file.fail(decl.src(), "return type mismatch: expected {}, found {}", .{ expected_return_type, ret_value.ty });
return file.fail(decl.src(), "TODO return {}", .{expected_return_type});
}
fn genCall(file: *C, inst: *Inst.Call, decl: *Decl) !void {
@ -162,7 +148,7 @@ fn genAsm(file: *C, as: *Inst.Assembly, decl: *Decl) !void {
if (c.val.tag() == .int_u64) {
try writer.writeAll("register ");
try renderType(file, writer, arg.ty, decl.src());
try writer.print(" {}_constant __asm__(\"{}\") = {};\n\t", .{ reg, reg, c.val.toUnsignedInt() });
try writer.print(" {}_constant __asm__(\"{}\") = {};\n ", .{ reg, reg, c.val.toUnsignedInt() });
} else {
return file.fail(decl.src(), "TODO inline asm {} args", .{c.val.tag()});
}

View File

@ -1579,6 +1579,8 @@ pub fn createElfFile(allocator: *Allocator, file: fs.File, options: Options) !Fi
.elf => {},
.macho => return error.TODOImplementWritingMachO,
.wasm => return error.TODOImplementWritingWasmObjects,
.hex => return error.TODOImplementWritingHex,
.raw => return error.TODOImplementWritingRaw,
}
var self: File.Elf = .{
@ -1638,6 +1640,8 @@ fn openBinFileInner(allocator: *Allocator, file: fs.File, options: Options) !Fil
.elf => {},
.macho => return error.IncrFailed,
.wasm => return error.IncrFailed,
.hex => return error.IncrFailed,
.raw => return error.IncrFailed,
}
var self: File.Elf = .{
.allocator = allocator,

View File

@ -141,11 +141,19 @@ const usage_build_generic =
\\ --name [name] Override output name
\\ --mode [mode] Set the build mode
\\ Debug (default) optimizations off, safety on
\\ ReleaseFast optimizations on, safety off
\\ ReleaseSafe optimizations on, safety on
\\ ReleaseSmall optimize for small binary, safety off
\\ ReleaseFast Optimizations on, safety off
\\ ReleaseSafe Optimizations on, safety on
\\ ReleaseSmall Optimize for small binary, safety off
\\ --dynamic Force output to be dynamically linked
\\ --strip Exclude debug symbols
\\ -ofmt=[mode] Override target object format
\\ elf Executable and Linking Format
\\ c Compile to C source code
\\ coff (planned) Common Object File Format (Windows)
\\ pe (planned) Portable Executable (Windows)
\\ macho (planned) macOS relocatables
\\ hex (planned) Intel IHEX
\\ raw (planned) Dump machine code directly
\\
\\Link Options:
\\ -l[lib], --library [lib] Link against system library
@ -195,7 +203,7 @@ fn buildOutputType(
var target_arch_os_abi: []const u8 = "native";
var target_mcpu: ?[]const u8 = null;
var target_dynamic_linker: ?[]const u8 = null;
var object_format: ?std.builtin.ObjectFormat = null;
var target_ofmt: ?[]const u8 = null;
var system_libs = std.ArrayList([]const u8).init(gpa);
defer system_libs.deinit();
@ -282,12 +290,8 @@ fn buildOutputType(
}
i += 1;
target_mcpu = args[i];
} else if (mem.eql(u8, arg, "--c")) {
if (object_format) |old| {
std.debug.print("attempted to override object format {} with C\n", .{old});
process.exit(1);
}
object_format = .c;
} else if (mem.startsWith(u8, arg, "-ofmt=")) {
target_ofmt = arg["-ofmt=".len..];
} else if (mem.startsWith(u8, arg, "-mcpu=")) {
target_mcpu = arg["-mcpu=".len..];
} else if (mem.eql(u8, arg, "--dynamic-linker")) {
@ -434,6 +438,30 @@ fn buildOutputType(
process.exit(1);
};
const object_format: ?std.Target.ObjectFormat = blk: {
const ofmt = target_ofmt orelse break :blk null;
if (mem.eql(u8, ofmt, "elf")) {
break :blk .elf;
} else if (mem.eql(u8, ofmt, "c")) {
break :blk .c;
} else if (mem.eql(u8, ofmt, "coff")) {
break :blk .coff;
} else if (mem.eql(u8, ofmt, "pe")) {
break :blk .coff;
} else if (mem.eql(u8, ofmt, "macho")) {
break :blk .macho;
} else if (mem.eql(u8, ofmt, "wasm")) {
break :blk .wasm;
} else if (mem.eql(u8, ofmt, "hex")) {
break :blk .hex;
} else if (mem.eql(u8, ofmt, "raw")) {
break :blk .raw;
} else {
std.debug.print("unsupported object format: {}", .{ofmt});
process.exit(1);
}
};
const bin_path = switch (emit_bin) {
.no => {
std.debug.print("-fno-emit-bin not supported yet", .{});

View File

@ -1308,8 +1308,7 @@ fn transBinaryOperator(
const rhs = try transExpr(rp, &block_scope.base, ZigClangBinaryOperator_getRHS(stmt), .used, .r_value);
if (expr) {
_ = try appendToken(rp.c, .Semicolon, ";");
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = rhs;
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label, rhs);
try block_scope.statements.append(&break_node.base);
const block_node = try block_scope.complete(rp.c);
const rparen = try appendToken(rp.c, .RParen, ")");
@ -1881,12 +1880,19 @@ fn transReturnStmt(
scope: *Scope,
expr: *const ZigClangReturnStmt,
) TransError!*ast.Node {
const node = try transCreateNodeReturnExpr(rp.c);
if (ZigClangReturnStmt_getRetValue(expr)) |val_expr| {
node.rhs = try transExprCoercing(rp, scope, val_expr, .used, .r_value);
}
const return_kw = try appendToken(rp.c, .Keyword_return, "return");
const rhs: ?*ast.Node = if (ZigClangReturnStmt_getRetValue(expr)) |val_expr|
try transExprCoercing(rp, scope, val_expr, .used, .r_value)
else
null;
const return_expr = try ast.Node.ControlFlowExpression.create(rp.c.arena, .{
.ltoken = return_kw,
.tag = .Return,
}, .{
.rhs = rhs,
});
_ = try appendToken(rp.c, .Semicolon, ";");
return &node.base;
return &return_expr.base;
}
fn transStringLiteral(
@ -1912,8 +1918,9 @@ fn transStringLiteral(
buf[buf.len - 1] = '"';
const token = try appendToken(rp.c, .StringLiteral, buf);
const node = try rp.c.arena.create(ast.Node.StringLiteral);
const node = try rp.c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .StringLiteral },
.token = token,
};
return maybeSuppressResult(rp, scope, result_used, &node.base);
@ -2518,7 +2525,7 @@ fn transDoWhileLoop(
prefix_op.rhs = try transBoolExpr(rp, &cond_scope.base, @ptrCast(*const ZigClangExpr, ZigClangDoStmt_getCond(stmt)), .used, .r_value, true);
_ = try appendToken(rp.c, .RParen, ")");
if_node.condition = &prefix_op.base;
if_node.body = &(try transCreateNodeBreak(rp.c, null)).base;
if_node.body = &(try transCreateNodeBreak(rp.c, null, null)).base;
_ = try appendToken(rp.c, .Semicolon, ";");
const body_node = if (ZigClangStmt_getStmtClass(ZigClangDoStmt_getBody(stmt)) == .CompoundStmtClass) blk: {
@ -2688,7 +2695,7 @@ fn transSwitch(
_ = try appendToken(rp.c, .Colon, ":");
if (!switch_scope.has_default) {
const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c));
else_prong.expr = &(try transCreateNodeBreak(rp.c, "__switch")).base;
else_prong.expr = &(try transCreateNodeBreak(rp.c, "__switch", null)).base;
_ = try appendToken(rp.c, .Comma, ",");
if (switch_scope.case_index >= switch_scope.cases.len)
@ -2732,7 +2739,7 @@ fn transCase(
try transExpr(rp, scope, ZigClangCaseStmt_getLHS(stmt), .used, .r_value);
const switch_prong = try transCreateNodeSwitchCase(rp.c, expr);
switch_prong.expr = &(try transCreateNodeBreak(rp.c, label)).base;
switch_prong.expr = &(try transCreateNodeBreak(rp.c, label, null)).base;
_ = try appendToken(rp.c, .Comma, ",");
if (switch_scope.case_index >= switch_scope.cases.len)
@ -2768,7 +2775,7 @@ fn transDefault(
_ = try appendToken(rp.c, .Semicolon, ";");
const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c));
else_prong.expr = &(try transCreateNodeBreak(rp.c, label)).base;
else_prong.expr = &(try transCreateNodeBreak(rp.c, label, null)).base;
_ = try appendToken(rp.c, .Comma, ",");
if (switch_scope.case_index >= switch_scope.cases.len)
@ -2843,8 +2850,9 @@ fn transCharLiteral(
}
var char_buf: [4]u8 = undefined;
const token = try appendTokenFmt(rp.c, .CharLiteral, "'{}'", .{escapeChar(@intCast(u8, val), &char_buf)});
const node = try rp.c.arena.create(ast.Node.CharLiteral);
const node = try rp.c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .CharLiteral },
.token = token,
};
break :blk &node.base;
@ -2889,8 +2897,11 @@ fn transStmtExpr(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangStmtExpr,
const result = try transStmt(rp, &block_scope.base, it[0], .unused, .r_value);
try block_scope.statements.append(result);
}
const break_node = try transCreateNodeBreak(rp.c, "blk");
break_node.rhs = try transStmt(rp, &block_scope.base, it[0], .used, .r_value);
const break_node = blk: {
var tmp = try CtrlFlow.init(rp.c, .Break, "blk");
const rhs = try transStmt(rp, &block_scope.base, it[0], .used, .r_value);
break :blk try tmp.finish(rhs);
};
_ = try appendToken(rp.c, .Semicolon, ";");
try block_scope.statements.append(&break_node.base);
const block_node = try block_scope.complete(rp.c);
@ -3205,8 +3216,7 @@ fn transCreatePreCrement(
const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false);
try block_scope.statements.append(assign);
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = ref_node;
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label, ref_node);
try block_scope.statements.append(&break_node.base);
const block_node = try block_scope.complete(rp.c);
// semicolon must immediately follow rbrace because it is the last token in a block
@ -3297,8 +3307,11 @@ fn transCreatePostCrement(
const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false);
try block_scope.statements.append(assign);
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = try transCreateNodeIdentifier(rp.c, tmp);
const break_node = blk: {
var tmp_ctrl_flow = try CtrlFlow.initToken(rp.c, .Break, block_scope.label);
const rhs = try transCreateNodeIdentifier(rp.c, tmp);
break :blk try tmp_ctrl_flow.finish(rhs);
};
try block_scope.statements.append(&break_node.base);
_ = try appendToken(rp.c, .Semicolon, ";");
const block_node = try block_scope.complete(rp.c);
@ -3490,8 +3503,7 @@ fn transCreateCompoundAssign(
try block_scope.statements.append(assign);
}
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
break_node.rhs = ref_node;
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label, ref_node);
try block_scope.statements.append(&break_node.base);
const block_node = try block_scope.complete(rp.c);
const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
@ -3567,10 +3579,8 @@ fn transCPtrCast(
fn transBreak(rp: RestorePoint, scope: *Scope) TransError!*ast.Node {
const break_scope = scope.getBreakableScope();
const br = try transCreateNodeBreak(rp.c, if (break_scope.id == .Switch)
"__switch"
else
null);
const label_text: ?[]const u8 = if (break_scope.id == .Switch) "__switch" else null;
const br = try transCreateNodeBreak(rp.c, label_text, null);
_ = try appendToken(rp.c, .Semicolon, ";");
return &br.base;
}
@ -3578,8 +3588,9 @@ fn transBreak(rp: RestorePoint, scope: *Scope) TransError!*ast.Node {
fn transFloatingLiteral(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangFloatingLiteral, used: ResultUsed) TransError!*ast.Node {
// TODO use something more accurate
const dbl = ZigClangAPFloat_getValueAsApproximateDouble(stmt);
const node = try rp.c.arena.create(ast.Node.FloatLiteral);
const node = try rp.c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .FloatLiteral },
.token = try appendTokenFmt(rp.c, .FloatLiteral, "{d}", .{dbl}),
};
return maybeSuppressResult(rp, scope, used, &node.base);
@ -3619,7 +3630,7 @@ fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const
});
try block_scope.statements.append(&tmp_var.base);
const break_node = try transCreateNodeBreakToken(rp.c, block_scope.label);
var break_node_tmp = try CtrlFlow.initToken(rp.c, .Break, block_scope.label);
const if_node = try transCreateNodeIf(rp.c);
var cond_scope = Scope.Condition{
@ -3641,7 +3652,7 @@ fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const
if_node.@"else".?.body = try transExpr(rp, &block_scope.base, false_expr, .used, .r_value);
_ = try appendToken(rp.c, .Semicolon, ";");
break_node.rhs = &if_node.base;
const break_node = try break_node_tmp.finish(&if_node.base);
_ = try appendToken(rp.c, .Semicolon, ";");
try block_scope.statements.append(&break_node.base);
const block_node = try block_scope.complete(rp.c);
@ -3822,8 +3833,9 @@ fn qualTypeToLog2IntRef(rp: RestorePoint, qt: ZigClangQualType, source_loc: ZigC
if (int_bit_width != 0) {
// we can perform the log2 now.
const cast_bit_width = math.log2_int(u64, int_bit_width);
const node = try rp.c.arena.create(ast.Node.IntegerLiteral);
const node = try rp.c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .IntegerLiteral },
.token = try appendTokenFmt(rp.c, .Identifier, "u{}", .{cast_bit_width}),
};
return &node.base;
@ -3845,8 +3857,9 @@ fn qualTypeToLog2IntRef(rp: RestorePoint, qt: ZigClangQualType, source_loc: ZigC
const import_fn_call = try rp.c.createBuiltinCall("@import", 1);
const std_token = try appendToken(rp.c, .StringLiteral, "\"std\"");
const std_node = try rp.c.arena.create(ast.Node.StringLiteral);
const std_node = try rp.c.arena.create(ast.Node.OneToken);
std_node.* = .{
.base = .{ .tag = .StringLiteral },
.token = std_token,
};
import_fn_call.params()[0] = &std_node.base;
@ -4081,8 +4094,11 @@ fn transCreateNodeAssign(
const assign = try transCreateNodeInfixOp(rp, &block_scope.base, lhs_node, .Assign, lhs_eq_token, ident, .used, false);
try block_scope.statements.append(assign);
const break_node = try transCreateNodeBreak(rp.c, label_name);
break_node.rhs = try transCreateNodeIdentifier(rp.c, tmp);
const break_node = blk: {
var tmp_ctrl_flow = try CtrlFlow.init(rp.c, .Break, label_name);
const rhs_expr = try transCreateNodeIdentifier(rp.c, tmp);
break :blk try tmp_ctrl_flow.finish(rhs_expr);
};
_ = try appendToken(rp.c, .Semicolon, ";");
try block_scope.statements.append(&break_node.base);
const block_node = try block_scope.complete(rp.c);
@ -4255,28 +4271,19 @@ fn transCreateNodeAPInt(c: *Context, int: *const ZigClangAPSInt) !*ast.Node {
};
defer c.arena.free(str);
const token = try appendToken(c, .IntegerLiteral, str);
const node = try c.arena.create(ast.Node.IntegerLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .IntegerLiteral },
.token = token,
};
return &node.base;
}
fn transCreateNodeReturnExpr(c: *Context) !*ast.Node.ControlFlowExpression {
const ltoken = try appendToken(c, .Keyword_return, "return");
const node = try c.arena.create(ast.Node.ControlFlowExpression);
node.* = .{
.ltoken = ltoken,
.kind = .Return,
.rhs = null,
};
return node;
}
fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node {
const token = try appendToken(c, .Keyword_undefined, "undefined");
const node = try c.arena.create(ast.Node.UndefinedLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .UndefinedLiteral },
.token = token,
};
return &node.base;
@ -4284,8 +4291,9 @@ fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node {
fn transCreateNodeNullLiteral(c: *Context) !*ast.Node {
const token = try appendToken(c, .Keyword_null, "null");
const node = try c.arena.create(ast.Node.NullLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .NullLiteral },
.token = token,
};
return &node.base;
@ -4296,8 +4304,9 @@ fn transCreateNodeBoolLiteral(c: *Context, value: bool) !*ast.Node {
try appendToken(c, .Keyword_true, "true")
else
try appendToken(c, .Keyword_false, "false");
const node = try c.arena.create(ast.Node.BoolLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .BoolLiteral },
.token = token,
};
return &node.base;
@ -4305,8 +4314,9 @@ fn transCreateNodeBoolLiteral(c: *Context, value: bool) !*ast.Node {
fn transCreateNodeInt(c: *Context, int: anytype) !*ast.Node {
const token = try appendTokenFmt(c, .IntegerLiteral, "{}", .{int});
const node = try c.arena.create(ast.Node.IntegerLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .IntegerLiteral },
.token = token,
};
return &node.base;
@ -4314,8 +4324,9 @@ fn transCreateNodeInt(c: *Context, int: anytype) !*ast.Node {
fn transCreateNodeFloat(c: *Context, int: anytype) !*ast.Node {
const token = try appendTokenFmt(c, .FloatLiteral, "{}", .{int});
const node = try c.arena.create(ast.Node.FloatLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .FloatLiteral },
.token = token,
};
return &node.base;
@ -4362,7 +4373,7 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
const block_lbrace = try appendToken(c, .LBrace, "{");
const return_expr = try transCreateNodeReturnExpr(c);
const return_kw = try appendToken(c, .Keyword_return, "return");
const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.getTrailer("init_node").?);
const call_expr = try c.createCall(unwrap_expr, fn_params.items.len);
@ -4376,7 +4387,12 @@ fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_a
}
call_expr.rtoken = try appendToken(c, .RParen, ")");
return_expr.rhs = &call_expr.base;
const return_expr = try ast.Node.ControlFlowExpression.create(c.arena, .{
.ltoken = return_kw,
.tag = .Return,
}, .{
.rhs = &call_expr.base,
});
_ = try appendToken(c, .Semicolon, ";");
const block = try ast.Node.Block.alloc(c.arena, 1);
@ -4424,8 +4440,9 @@ fn transCreateNodeEnumLiteral(c: *Context, name: []const u8) !*ast.Node {
}
fn transCreateNodeStringLiteral(c: *Context, str: []const u8) !*ast.Node {
const node = try c.arena.create(ast.Node.StringLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .StringLiteral },
.token = try appendToken(c, .StringLiteral, str),
};
return &node.base;
@ -4455,28 +4472,77 @@ fn transCreateNodeElse(c: *Context) !*ast.Node.Else {
return node;
}
fn transCreateNodeBreakToken(c: *Context, label: ?ast.TokenIndex) !*ast.Node.ControlFlowExpression {
const other_token = label orelse return transCreateNodeBreak(c, null);
fn transCreateNodeBreakToken(
c: *Context,
label: ?ast.TokenIndex,
rhs: ?*ast.Node,
) !*ast.Node.ControlFlowExpression {
const other_token = label orelse return transCreateNodeBreak(c, null, rhs);
const loc = c.token_locs.items[other_token];
const label_name = c.source_buffer.items[loc.start..loc.end];
return transCreateNodeBreak(c, label_name);
return transCreateNodeBreak(c, label_name, rhs);
}
fn transCreateNodeBreak(c: *Context, label: ?[]const u8) !*ast.Node.ControlFlowExpression {
const ltoken = try appendToken(c, .Keyword_break, "break");
const label_node = if (label) |l| blk: {
_ = try appendToken(c, .Colon, ":");
break :blk try transCreateNodeIdentifier(c, l);
} else null;
const node = try c.arena.create(ast.Node.ControlFlowExpression);
node.* = .{
.ltoken = ltoken,
.kind = .{ .Break = label_node },
.rhs = null,
};
return node;
fn transCreateNodeBreak(
c: *Context,
label: ?[]const u8,
rhs: ?*ast.Node,
) !*ast.Node.ControlFlowExpression {
var ctrl_flow = try CtrlFlow.init(c, .Break, label);
return ctrl_flow.finish(rhs);
}
const CtrlFlow = struct {
c: *Context,
ltoken: ast.TokenIndex,
label_token: ?ast.TokenIndex,
tag: ast.Node.Tag,
/// Does everything except the RHS.
fn init(c: *Context, tag: ast.Node.Tag, label: ?[]const u8) !CtrlFlow {
const kw: Token.Id = switch (tag) {
.Break => .Keyword_break,
.Continue => .Keyword_continue,
.Return => .Keyword_return,
else => unreachable,
};
const kw_text = switch (tag) {
.Break => "break",
.Continue => "continue",
.Return => "return",
else => unreachable,
};
const ltoken = try appendToken(c, kw, kw_text);
const label_token = if (label) |l| blk: {
_ = try appendToken(c, .Colon, ":");
break :blk try appendToken(c, .Identifier, l);
} else null;
return CtrlFlow{
.c = c,
.ltoken = ltoken,
.label_token = label_token,
.tag = tag,
};
}
fn initToken(c: *Context, tag: ast.Node.Tag, label: ?ast.TokenIndex) !CtrlFlow {
const other_token = label orelse return init(c, tag, null);
const loc = c.token_locs.items[other_token];
const label_name = c.source_buffer.items[loc.start..loc.end];
return init(c, tag, label_name);
}
fn finish(self: *CtrlFlow, rhs: ?*ast.Node) !*ast.Node.ControlFlowExpression {
return ast.Node.ControlFlowExpression.create(self.c.arena, .{
.ltoken = self.ltoken,
.tag = self.tag,
}, .{
.label = self.label_token,
.rhs = rhs,
});
}
};
fn transCreateNodeWhile(c: *Context) !*ast.Node.While {
const while_tok = try appendToken(c, .Keyword_while, "while");
_ = try appendToken(c, .LParen, "(");
@ -4497,12 +4563,10 @@ fn transCreateNodeWhile(c: *Context) !*ast.Node.While {
fn transCreateNodeContinue(c: *Context) !*ast.Node {
const ltoken = try appendToken(c, .Keyword_continue, "continue");
const node = try c.arena.create(ast.Node.ControlFlowExpression);
node.* = .{
const node = try ast.Node.ControlFlowExpression.create(c.arena, .{
.ltoken = ltoken,
.kind = .{ .Continue = null },
.rhs = null,
};
.tag = .Continue,
}, .{});
_ = try appendToken(c, .Semicolon, ";");
return &node.base;
}
@ -5006,8 +5070,9 @@ pub fn failDecl(c: *Context, loc: ZigClangSourceLocation, name: []const u8, comp
const semi_tok = try appendToken(c, .Semicolon, ";");
_ = try appendTokenFmt(c, .LineComment, "// {}", .{c.locStr(loc)});
const msg_node = try c.arena.create(ast.Node.StringLiteral);
const msg_node = try c.arena.create(ast.Node.OneToken);
msg_node.* = .{
.base = .{ .tag = .StringLiteral },
.token = msg_tok,
};
@ -5110,8 +5175,9 @@ fn appendIdentifier(c: *Context, name: []const u8) !ast.TokenIndex {
fn transCreateNodeIdentifier(c: *Context, name: []const u8) !*ast.Node {
const token_index = try appendIdentifier(c, name);
const identifier = try c.arena.create(ast.Node.Identifier);
const identifier = try c.arena.create(ast.Node.OneToken);
identifier.* = .{
.base = .{ .tag = .Identifier },
.token = token_index,
};
return &identifier.base;
@ -5119,8 +5185,9 @@ fn transCreateNodeIdentifier(c: *Context, name: []const u8) !*ast.Node {
fn transCreateNodeIdentifierUnchecked(c: *Context, name: []const u8) !*ast.Node {
const token_index = try appendTokenFmt(c, .Identifier, "{}", .{name});
const identifier = try c.arena.create(ast.Node.Identifier);
const identifier = try c.arena.create(ast.Node.OneToken);
identifier.* = .{
.base = .{ .tag = .Identifier },
.token = token_index,
};
return &identifier.base;
@ -5289,8 +5356,9 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const param_name_tok = try appendIdentifier(c, mangled_name);
_ = try appendToken(c, .Colon, ":");
const any_type = try c.arena.create(ast.Node.AnyType);
const any_type = try c.arena.create(ast.Node.OneToken);
any_type.* = .{
.base = .{ .tag = .AnyType },
.token = try appendToken(c, .Keyword_anytype, "anytype"),
};
@ -5322,7 +5390,7 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const type_of = try c.createBuiltinCall("@TypeOf", 1);
const return_expr = try transCreateNodeReturnExpr(c);
const return_kw = try appendToken(c, .Keyword_return, "return");
const expr = try parseCExpr(c, it, source, source_loc, scope);
const last = it.next().?;
if (last.id != .Eof and last.id != .Nl)
@ -5337,13 +5405,17 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const type_of_arg = if (expr.tag != .Block) expr else blk: {
const blk = @fieldParentPtr(ast.Node.Block, "base", expr);
const blk_last = blk.statements()[blk.statements_len - 1];
std.debug.assert(blk_last.tag == .ControlFlowExpression);
const br = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", blk_last);
break :blk br.rhs.?;
const br = blk_last.cast(ast.Node.ControlFlowExpression).?;
break :blk br.getRHS().?;
};
type_of.params()[0] = type_of_arg;
type_of.rparen_token = try appendToken(c, .RParen, ")");
return_expr.rhs = expr;
const return_expr = try ast.Node.ControlFlowExpression.create(c.arena, .{
.ltoken = return_kw,
.tag = .Return,
}, .{
.rhs = expr,
});
try block_scope.statements.append(&return_expr.base);
const block_node = try block_scope.complete(c);
@ -5416,8 +5488,7 @@ fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_
}
}
const break_node = try transCreateNodeBreak(c, label_name);
break_node.rhs = last;
const break_node = try transCreateNodeBreak(c, label_name, last);
try block_scope.statements.append(&break_node.base);
const block_node = try block_scope.complete(c);
return &block_node.base;
@ -5656,15 +5727,17 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const first_tok = it.list.at(0);
if (source[tok.start] != '\'' or source[tok.start + 1] == '\\' or tok.end - tok.start == 3) {
const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, source[tok.start..tok.end], source[first_tok.start..first_tok.end], source_loc));
const node = try c.arena.create(ast.Node.CharLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .CharLiteral },
.token = token,
};
return &node.base;
} else {
const token = try appendTokenFmt(c, .IntegerLiteral, "0x{x}", .{source[tok.start + 1 .. tok.end - 1]});
const node = try c.arena.create(ast.Node.IntegerLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .IntegerLiteral },
.token = token,
};
return &node.base;
@ -5673,8 +5746,9 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.StringLiteral => {
const first_tok = it.list.at(0);
const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, source[tok.start..tok.end], source[first_tok.start..first_tok.end], source_loc));
const node = try c.arena.create(ast.Node.StringLiteral);
const node = try c.arena.create(ast.Node.OneToken);
node.* = .{
.base = .{ .tag = .StringLiteral },
.token = token,
};
return &node.base;
@ -6224,7 +6298,7 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
=> return node,
.Identifier => {
const ident = node.cast(ast.Node.Identifier).?;
const ident = node.castTag(.Identifier).?;
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
if (value.cast(ast.Node.VarDecl)) |var_decl|
return getContainer(c, var_decl.getTrailer("init_node").?);
@ -6238,7 +6312,7 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
for (container.fieldsAndDecls()) |field_ref| {
const field = field_ref.cast(ast.Node.ContainerField).?;
const ident = infix.rhs.cast(ast.Node.Identifier).?;
const ident = infix.rhs.castTag(.Identifier).?;
if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) {
return getContainer(c, field.type_expr.?);
}
@ -6253,7 +6327,7 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
}
fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
if (ref.cast(ast.Node.Identifier)) |ident| {
if (ref.castTag(.Identifier)) |ident| {
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
if (value.cast(ast.Node.VarDecl)) |var_decl| {
if (var_decl.getTrailer("type_node")) |ty|
@ -6265,7 +6339,7 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
for (container.fieldsAndDecls()) |field_ref| {
const field = field_ref.cast(ast.Node.ContainerField).?;
const ident = infix.rhs.cast(ast.Node.Identifier).?;
const ident = infix.rhs.castTag(.Identifier).?;
if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) {
return getContainer(c, field.type_expr.?);
}

View File

@ -34,9 +34,17 @@ pub const Inst = struct {
/// These names are used directly as the instruction names in the text format.
pub const Tag = enum {
/// Allocates stack local memory. Its lifetime ends when the block ends that contains
/// this instruction.
alloc,
/// Same as `alloc` except the type is inferred.
alloc_inferred,
/// Function parameter value. These must be first in a function's main block,
/// in respective order with the parameters.
arg,
/// A typed result location pointer is bitcasted to a new result location pointer.
/// The new result location pointer has an inferred type.
bitcast_result_ptr,
/// A labeled block of code, which can return a value.
block,
/// Return a value from a `Block`.
@ -45,6 +53,17 @@ pub const Inst = struct {
/// Same as `break` but without an operand; the operand is assumed to be the void value.
breakvoid,
call,
/// Coerces a result location pointer to a new element type. It is evaluated "backwards"-
/// as type coercion from the new element type to the old element type.
/// LHS is destination element type, RHS is result pointer.
coerce_result_ptr,
/// This instruction does a `coerce_result_ptr` operation on a `Block`'s
/// result location pointer, whose type is inferred by peer type resolution on the
/// `Block`'s corresponding `break` instructions.
coerce_result_block_ptr,
/// Equivalent to `as(ptr_child_type(typeof(ptr)), value)`.
coerce_to_ptr_elem,
/// Emit an error message and fail compilation.
compileerror,
/// Special case, has no textual representation.
@"const",
@ -57,7 +76,17 @@ pub const Inst = struct {
declval,
/// Same as declval but the parameter is a `*Module.Decl` rather than a name.
declval_in_module,
/// Emits a compile error if the operand is not `void`.
ensure_result_used,
/// Emits a compile error if an error is ignored.
ensure_result_non_error,
boolnot,
/// Obtains a pointer to the return value.
ret_ptr,
/// Obtains the return type of the in-scope function.
ret_type,
/// Write a value to a pointer.
store,
/// String Literal. Makes an anonymous Decl and then takes a pointer to it.
str,
int,
@ -73,6 +102,9 @@ pub const Inst = struct {
@"fn",
fntype,
@"export",
/// Given a reference to a function and a parameter index, returns the
/// type of the parameter. TODO what happens when the parameter is `anytype`?
param_type,
primitive,
intcast,
bitcast,
@ -96,6 +128,9 @@ pub const Inst = struct {
.breakpoint,
.@"unreachable",
.returnvoid,
.alloc_inferred,
.ret_ptr,
.ret_type,
=> NoOp,
.boolnot,
@ -103,6 +138,11 @@ pub const Inst = struct {
.@"return",
.isnull,
.isnonnull,
.ptrtoint,
.alloc,
.ensure_result_used,
.ensure_result_non_error,
.bitcast_result_ptr,
=> UnOp,
.add,
@ -113,32 +153,36 @@ pub const Inst = struct {
.cmp_gte,
.cmp_gt,
.cmp_neq,
.as,
.floatcast,
.intcast,
.bitcast,
.coerce_result_ptr,
=> BinOp,
.block => Block,
.@"break" => Break,
.breakvoid => BreakVoid,
.call => Call,
.coerce_to_ptr_elem => CoerceToPtrElem,
.declref => DeclRef,
.declref_str => DeclRefStr,
.declval => DeclVal,
.declval_in_module => DeclValInModule,
.coerce_result_block_ptr => CoerceResultBlockPtr,
.compileerror => CompileError,
.@"const" => Const,
.store => Store,
.str => Str,
.int => Int,
.inttype => IntType,
.ptrtoint => PtrToInt,
.fieldptr => FieldPtr,
.as => As,
.@"asm" => Asm,
.@"fn" => Fn,
.@"export" => Export,
.param_type => ParamType,
.primitive => Primitive,
.fntype => FnType,
.intcast => IntCast,
.bitcast => BitCast,
.floatcast => FloatCast,
.elemptr => ElemPtr,
.condbr => CondBr,
};
@ -148,15 +192,26 @@ pub const Inst = struct {
/// Function calls do not count.
pub fn isNoReturn(tag: Tag) bool {
return switch (tag) {
.alloc,
.alloc_inferred,
.arg,
.bitcast_result_ptr,
.block,
.breakpoint,
.call,
.coerce_result_ptr,
.coerce_result_block_ptr,
.coerce_to_ptr_elem,
.@"const",
.declref,
.declref_str,
.declval,
.declval_in_module,
.ensure_result_used,
.ensure_result_non_error,
.ret_ptr,
.ret_type,
.store,
.str,
.int,
.inttype,
@ -168,6 +223,7 @@ pub const Inst = struct {
.@"fn",
.fntype,
.@"export",
.param_type,
.primitive,
.intcast,
.bitcast,
@ -292,6 +348,17 @@ pub const Inst = struct {
},
};
pub const CoerceToPtrElem = struct {
pub const base_tag = Tag.coerce_to_ptr_elem;
base: Inst,
positionals: struct {
ptr: *Inst,
value: *Inst,
},
kw_args: struct {},
};
pub const DeclRef = struct {
pub const base_tag = Tag.declref;
base: Inst,
@ -332,6 +399,17 @@ pub const Inst = struct {
kw_args: struct {},
};
pub const CoerceResultBlockPtr = struct {
pub const base_tag = Tag.coerce_result_block_ptr;
base: Inst,
positionals: struct {
dest_type: *Inst,
block: *Block,
},
kw_args: struct {},
};
pub const CompileError = struct {
pub const base_tag = Tag.compileerror;
base: Inst,
@ -352,6 +430,17 @@ pub const Inst = struct {
kw_args: struct {},
};
pub const Store = struct {
pub const base_tag = Tag.store;
base: Inst,
positionals: struct {
ptr: *Inst,
value: *Inst,
},
kw_args: struct {},
};
pub const Str = struct {
pub const base_tag = Tag.str;
base: Inst,
@ -372,17 +461,6 @@ pub const Inst = struct {
kw_args: struct {},
};
pub const PtrToInt = struct {
pub const builtin_name = "@ptrToInt";
pub const base_tag = Tag.ptrtoint;
base: Inst,
positionals: struct {
operand: *Inst,
},
kw_args: struct {},
};
pub const FieldPtr = struct {
pub const base_tag = Tag.fieldptr;
base: Inst,
@ -394,18 +472,6 @@ pub const Inst = struct {
kw_args: struct {},
};
pub const As = struct {
pub const base_tag = Tag.as;
pub const builtin_name = "@as";
base: Inst,
positionals: struct {
dest_type: *Inst,
value: *Inst,
},
kw_args: struct {},
};
pub const Asm = struct {
pub const base_tag = Tag.@"asm";
base: Inst,
@ -469,6 +535,17 @@ pub const Inst = struct {
kw_args: struct {},
};
pub const ParamType = struct {
pub const base_tag = Tag.param_type;
base: Inst,
positionals: struct {
func: *Inst,
arg_index: usize,
},
kw_args: struct {},
};
pub const Primitive = struct {
pub const base_tag = Tag.primitive;
base: Inst,
@ -559,42 +636,6 @@ pub const Inst = struct {
};
};
pub const FloatCast = struct {
pub const base_tag = Tag.floatcast;
pub const builtin_name = "@floatCast";
base: Inst,
positionals: struct {
dest_type: *Inst,
operand: *Inst,
},
kw_args: struct {},
};
pub const IntCast = struct {
pub const base_tag = Tag.intcast;
pub const builtin_name = "@intCast";
base: Inst,
positionals: struct {
dest_type: *Inst,
operand: *Inst,
},
kw_args: struct {},
};
pub const BitCast = struct {
pub const base_tag = Tag.bitcast;
pub const builtin_name = "@bitCast";
base: Inst,
positionals: struct {
dest_type: *Inst,
operand: *Inst,
},
kw_args: struct {},
};
pub const ElemPtr = struct {
pub const base_tag = Tag.elemptr;
base: Inst,
@ -1467,15 +1508,15 @@ const EmitZIR = struct {
},
.ComptimeInt => return self.emitComptimeIntVal(src, typed_value.val),
.Int => {
const as_inst = try self.arena.allocator.create(Inst.As);
const as_inst = try self.arena.allocator.create(Inst.BinOp);
as_inst.* = .{
.base = .{
.tag = .as,
.src = src,
.tag = Inst.As.base_tag,
},
.positionals = .{
.dest_type = (try self.emitType(src, typed_value.ty)).inst,
.value = (try self.emitComptimeIntVal(src, typed_value.val)).inst,
.lhs = (try self.emitType(src, typed_value.ty)).inst,
.rhs = (try self.emitComptimeIntVal(src, typed_value.val)).inst,
},
.kw_args = .{},
};
@ -1640,17 +1681,17 @@ const EmitZIR = struct {
src: usize,
new_body: ZirBody,
old_inst: *ir.Inst.UnOp,
comptime I: type,
tag: Inst.Tag,
) Allocator.Error!*Inst {
const new_inst = try self.arena.allocator.create(I);
const new_inst = try self.arena.allocator.create(Inst.BinOp);
new_inst.* = .{
.base = .{
.src = src,
.tag = I.base_tag,
.tag = tag,
},
.positionals = .{
.dest_type = (try self.emitType(src, old_inst.base.ty)).inst,
.operand = try self.resolveInst(new_body, old_inst.operand),
.lhs = (try self.emitType(old_inst.base.src, old_inst.base.ty)).inst,
.rhs = try self.resolveInst(new_body, old_inst.operand),
},
.kw_args = .{},
};
@ -1691,9 +1732,9 @@ const EmitZIR = struct {
.cmp_gt => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_gt).?, .cmp_gt),
.cmp_neq => try self.emitBinOp(inst.src, new_body, inst.castTag(.cmp_neq).?, .cmp_neq),
.bitcast => try self.emitCast(inst.src, new_body, inst.castTag(.bitcast).?, Inst.BitCast),
.intcast => try self.emitCast(inst.src, new_body, inst.castTag(.intcast).?, Inst.IntCast),
.floatcast => try self.emitCast(inst.src, new_body, inst.castTag(.floatcast).?, Inst.FloatCast),
.bitcast => try self.emitCast(inst.src, new_body, inst.castTag(.bitcast).?, .bitcast),
.intcast => try self.emitCast(inst.src, new_body, inst.castTag(.intcast).?, .intcast),
.floatcast => try self.emitCast(inst.src, new_body, inst.castTag(.floatcast).?, .floatcast),
.block => blk: {
const old_inst = inst.castTag(.block).?;

View File

@ -19,13 +19,13 @@ pub fn addCases(ctx: *TestContext) !void {
\\fn main() noreturn {}
\\
\\export fn _start() noreturn {
\\ main();
\\ main();
\\}
,
\\noreturn void main(void);
\\
\\noreturn void _start(void) {
\\ main();
\\ main();
\\}
\\
\\noreturn void main(void) {}
@ -35,15 +35,15 @@ pub fn addCases(ctx: *TestContext) !void {
// TODO: figure out a way to prevent asm constants from being generated
ctx.c("inline asm", linux_x64,
\\fn exitGood() void {
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (0)
\\ );
\\ asm volatile ("syscall"
\\ :
\\ : [number] "{rax}" (231),
\\ [arg1] "{rdi}" (0)
\\ );
\\}
\\
\\export fn _start() noreturn {
\\ exitGood();
\\ exitGood();
\\}
,
\\#include <stddef.h>
@ -55,36 +55,14 @@ pub fn addCases(ctx: *TestContext) !void {
\\const char *const exitGood__anon_2 = "syscall";
\\
\\noreturn void _start(void) {
\\ exitGood();
\\ exitGood();
\\}
\\
\\void exitGood(void) {
\\ register size_t rax_constant __asm__("rax") = 231;
\\ register size_t rdi_constant __asm__("rdi") = 0;
\\ __asm volatile ("syscall" :: ""(rax_constant), ""(rdi_constant));
\\ return;
\\}
\\
);
ctx.c("basic return", linux_x64,
\\fn main() u8 {
\\ return 103;
\\}
\\
\\export fn _start() noreturn {
\\ _ = main();
\\}
,
\\#include <stdint.h>
\\
\\uint8_t main(void);
\\
\\noreturn void _start(void) {
\\ (void)main();
\\}
\\
\\uint8_t main(void) {
\\ return 103;
\\ register size_t rax_constant __asm__("rax") = 231;
\\ register size_t rdi_constant __asm__("rdi") = 0;
\\ __asm volatile ("syscall" :: ""(rax_constant), ""(rdi_constant));
\\ return;
\\}
\\
);