ir: improve ZIR emission enough to emit hello world

master
Andrew Kelley 2020-04-22 03:08:50 -04:00
parent b1a86040dd
commit d58233b361
4 changed files with 169 additions and 14 deletions

View File

@ -666,8 +666,7 @@ const Analyze = struct {
};
fn coerceInMemoryAllowed(dest_type: Type, src_type: Type) InMemoryCoercionResult {
// As a shortcut, if the small tags / addresses match, we're done.
if (dest_type.tag_if_small_enough == src_type.tag_if_small_enough)
if (dest_type.eql(src_type))
return .ok;
// TODO: implement more of this function

View File

@ -356,16 +356,15 @@ pub const Module = struct {
comptime var need_comma = pos_fields.len != 0;
const KW_Args = @TypeOf(inst.kw_args);
inline for (@typeInfo(KW_Args).Struct.fields) |arg_field, i| {
if (need_comma) {
try stream.writeAll(", ");
}
if (@typeInfo(arg_field.field_type) == .Optional) {
if (@field(inst.kw_args, arg_field.name)) |non_optional| {
if (need_comma) try stream.writeAll(", ");
try stream.print("{}=", .{arg_field.name});
try self.writeParamToStream(stream, non_optional, inst_table);
need_comma = true;
}
} else {
if (need_comma) try stream.writeAll(", ");
try stream.print("{}=", .{arg_field.name});
try self.writeParamToStream(stream, @field(inst.kw_args, arg_field.name), inst_table);
need_comma = true;
@ -806,7 +805,7 @@ const EmitZIR = struct {
decls: std.ArrayList(*Inst),
decl_table: std.AutoHashMap(*ir.Inst, *Inst),
pub fn emit(self: *EmitZIR) !void {
fn emit(self: *EmitZIR) !void {
for (self.old_module.exports) |module_export| {
const export_value = try self.emitTypedValue(module_export.src, module_export.typed_value);
const symbol_name = try self.emitStringLiteral(module_export.src, module_export.name);
@ -823,7 +822,7 @@ const EmitZIR = struct {
}
}
pub fn resolveInst(self: *EmitZIR, inst_table: *const std.AutoHashMap(*ir.Inst, *Inst), inst: *ir.Inst) !*Inst {
fn resolveInst(self: *EmitZIR, inst_table: *const std.AutoHashMap(*ir.Inst, *Inst), inst: *ir.Inst) !*Inst {
if (inst.cast(ir.Inst.Constant)) |const_inst| {
if (self.decl_table.getValue(inst)) |decl| {
return decl;
@ -836,7 +835,20 @@ const EmitZIR = struct {
}
}
pub fn emitTypedValue(self: *EmitZIR, src: usize, typed_value: ir.TypedValue) Allocator.Error!*Inst {
fn emitComptimeIntVal(self: *EmitZIR, src: usize, val: Value) !*Inst {
const int_inst = try self.arena.allocator.create(Inst.Int);
int_inst.* = .{
.base = .{ .src = src, .tag = Inst.Int.base_tag },
.positionals = .{
.int = try val.toBigInt(&self.arena.allocator),
},
.kw_args = .{},
};
try self.decls.append(&int_inst.base);
return &int_inst.base;
}
fn emitTypedValue(self: *EmitZIR, src: usize, typed_value: ir.TypedValue) Allocator.Error!*Inst {
switch (typed_value.ty.zigTypeTag()) {
.Pointer => {
const ptr_elem_type = typed_value.ty.elemType();
@ -854,6 +866,21 @@ const EmitZIR = struct {
else => |t| std.debug.panic("TODO implement emitTypedValue for pointer to {}", .{@tagName(t)}),
}
},
.ComptimeInt => return self.emitComptimeIntVal(src, typed_value.val),
.Int => {
const as_inst = try self.arena.allocator.create(Inst.As);
as_inst.* = .{
.base = .{ .src = src, .tag = Inst.As.base_tag },
.positionals = .{
.dest_type = try self.emitType(src, typed_value.ty),
.value = try self.emitComptimeIntVal(src, typed_value.val),
},
.kw_args = .{},
};
try self.decls.append(&as_inst.base);
return &as_inst.base;
},
.Type => {
const ty = typed_value.val.toType();
return self.emitType(src, ty);
@ -883,14 +910,38 @@ const EmitZIR = struct {
.assembly => blk: {
const old_inst = inst.cast(ir.Inst.Assembly).?;
const new_inst = try self.arena.allocator.create(Inst.Asm);
const inputs = try self.arena.allocator.alloc(*Inst, old_inst.args.inputs.len);
for (inputs) |*elem, i| {
elem.* = try self.emitStringLiteral(inst.src, old_inst.args.inputs[i]);
}
const clobbers = try self.arena.allocator.alloc(*Inst, old_inst.args.clobbers.len);
for (clobbers) |*elem, i| {
elem.* = try self.emitStringLiteral(inst.src, old_inst.args.clobbers[i]);
}
const args = try self.arena.allocator.alloc(*Inst, old_inst.args.args.len);
for (args) |*elem, i| {
elem.* = try self.resolveInst(&inst_table, old_inst.args.args[i]);
}
new_inst.* = .{
.base = .{ .src = inst.src, .tag = Inst.Asm.base_tag },
.positionals = .{
.asm_source = try self.emitStringLiteral(inst.src, old_inst.args.asm_source),
.return_type = try self.emitType(inst.src, inst.ty),
},
// TODO emit more kw_args
.kw_args = .{},
.kw_args = .{
.@"volatile" = old_inst.args.is_volatile,
.output = if (old_inst.args.output) |o|
try self.emitStringLiteral(inst.src, o)
else
null,
.inputs = inputs,
.clobbers = clobbers,
.args = args,
},
};
break :blk &new_inst.base;
},
@ -931,7 +982,7 @@ const EmitZIR = struct {
}
}
pub fn emitType(self: *EmitZIR, src: usize, ty: Type) Allocator.Error!*Inst {
fn emitType(self: *EmitZIR, src: usize, ty: Type) Allocator.Error!*Inst {
switch (ty.tag()) {
.isize => return self.emitPrimitiveType(src, .isize),
.usize => return self.emitPrimitiveType(src, .usize),
@ -986,7 +1037,7 @@ const EmitZIR = struct {
}
}
pub fn emitPrimitiveType(self: *EmitZIR, src: usize, tag: Inst.Primitive.BuiltinType) !*Inst {
fn emitPrimitiveType(self: *EmitZIR, src: usize, tag: Inst.Primitive.BuiltinType) !*Inst {
const primitive_inst = try self.arena.allocator.create(Inst.Primitive);
primitive_inst.* = .{
.base = .{ .src = src, .tag = Inst.Primitive.base_tag },
@ -999,7 +1050,7 @@ const EmitZIR = struct {
return &primitive_inst.base;
}
pub fn emitStringLiteral(self: *EmitZIR, src: usize, str: []const u8) !*Inst {
fn emitStringLiteral(self: *EmitZIR, src: usize, str: []const u8) !*Inst {
const str_inst = try self.arena.allocator.create(Inst.Str);
str_inst.* = .{
.base = .{ .src = src, .tag = Inst.Str.base_tag },

View File

@ -88,6 +88,60 @@ pub const Type = extern union {
return @fieldParentPtr(T, "base", self.ptr_otherwise);
}
pub fn eql(self: Type, other: Type) bool {
//std.debug.warn("test {} == {}\n", .{ self, other });
// As a shortcut, if the small tags / addresses match, we're done.
if (self.tag_if_small_enough == other.tag_if_small_enough)
return true;
const zig_tag_a = self.zigTypeTag();
const zig_tag_b = self.zigTypeTag();
if (zig_tag_a != zig_tag_b)
return false;
switch (zig_tag_a) {
.Type => return true,
.Void => return true,
.Bool => return true,
.NoReturn => return true,
.ComptimeFloat => return true,
.ComptimeInt => return true,
.Undefined => return true,
.Null => return true,
.Pointer => {
const is_slice_a = isSlice(self);
const is_slice_b = isSlice(other);
if (is_slice_a != is_slice_b)
return false;
@panic("TODO implement more pointer Type equality comparison");
},
.Int => {
if (self.tag() != other.tag()) {
// Detect that e.g. u64 != usize, even if the bits match on a particular target.
return false;
}
// The target will not be branched upon, because we handled target-dependent cases above.
const info_a = self.intInfo(@as(Target, undefined));
const info_b = self.intInfo(@as(Target, undefined));
return info_a.signed == info_b.signed and info_a.bits == info_b.bits;
},
.Float,
.Array,
.Struct,
.Optional,
.ErrorUnion,
.ErrorSet,
.Enum,
.Union,
.Fn,
.BoundFn,
.Opaque,
.Frame,
.AnyFrame,
.Vector,
.EnumLiteral,
=> @panic("TODO implement more Type equality comparison"),
}
}
pub fn format(
self: Type,
comptime fmt: []const u8,

View File

@ -4,6 +4,7 @@ const log2 = std.math.log2;
const assert = std.debug.assert;
const BigInt = std.math.big.Int;
const Target = std.Target;
const Allocator = std.mem.Allocator;
/// This is the raw data, with no bookkeeping, no memory awareness,
/// no de-duplication, and no type system awareness.
@ -156,7 +157,7 @@ pub const Value = extern union {
/// Asserts that the value is representable as an array of bytes.
/// Copies the value into a freshly allocated slice of memory, which is owned by the caller.
pub fn toAllocatedBytes(self: Value, allocator: *std.mem.Allocator) error{OutOfMemory}![]u8 {
pub fn toAllocatedBytes(self: Value, allocator: *Allocator) Allocator.Error![]u8 {
if (self.cast(Payload.Bytes)) |bytes| {
return std.mem.dupe(allocator, u8, bytes.data);
}
@ -213,6 +214,56 @@ pub const Value = extern union {
};
}
/// Asserts the value is an integer.
pub fn toBigInt(self: Value, allocator: *Allocator) Allocator.Error!BigInt {
switch (self.tag()) {
.ty,
.u8_type,
.i8_type,
.isize_type,
.usize_type,
.c_short_type,
.c_ushort_type,
.c_int_type,
.c_uint_type,
.c_long_type,
.c_ulong_type,
.c_longlong_type,
.c_ulonglong_type,
.c_longdouble_type,
.f16_type,
.f32_type,
.f64_type,
.f128_type,
.c_void_type,
.bool_type,
.void_type,
.type_type,
.anyerror_type,
.comptime_int_type,
.comptime_float_type,
.noreturn_type,
.fn_naked_noreturn_no_args_type,
.single_const_pointer_to_comptime_int_type,
.const_slice_u8_type,
.void_value,
.noreturn_value,
.bool_true,
.bool_false,
.function,
.ref,
.ref_val,
.bytes,
=> unreachable,
.zero => return BigInt.initSet(allocator, 0),
.int_u64 => return BigInt.initSet(allocator, self.cast(Payload.Int_u64).?.int),
.int_i64 => return BigInt.initSet(allocator, self.cast(Payload.Int_i64).?.int),
.int_big => return self.cast(Payload.IntBig).?.big_int,
}
}
/// Asserts the value is an integer, and the destination type is ComptimeInt or Int.
pub fn intFitsInType(self: Value, ty: Type, target: Target) bool {
switch (self.tag()) {