self-hosted: progress on IR for supporting libc hello world

* add c int types
 * some more ir stubs
master
Andrew Kelley 2018-07-19 00:08:47 -04:00
parent 7f1a550760
commit 1d85b588ea
10 changed files with 880 additions and 44 deletions

View File

@ -489,6 +489,7 @@ set(ZIG_STD_FILES
"math/atan.zig"
"math/atan2.zig"
"math/atanh.zig"
"math/big/index.zig"
"math/big/int.zig"
"math/cbrt.zig"
"math/ceil.zig"

68
src-self-hosted/c_int.zig Normal file
View File

@ -0,0 +1,68 @@
pub const CInt = struct {
id: Id,
zig_name: []const u8,
c_name: []const u8,
is_signed: bool,
pub const Id = enum {
Short,
UShort,
Int,
UInt,
Long,
ULong,
LongLong,
ULongLong,
};
pub const list = []CInt{
CInt{
.id = Id.Short,
.zig_name = "c_short",
.c_name = "short",
.is_signed = true,
},
CInt{
.id = Id.UShort,
.zig_name = "c_ushort",
.c_name = "unsigned short",
.is_signed = false,
},
CInt{
.id = Id.Int,
.zig_name = "c_int",
.c_name = "int",
.is_signed = true,
},
CInt{
.id = Id.UInt,
.zig_name = "c_uint",
.c_name = "unsigned int",
.is_signed = false,
},
CInt{
.id = Id.Long,
.zig_name = "c_long",
.c_name = "long",
.is_signed = true,
},
CInt{
.id = Id.ULong,
.zig_name = "c_ulong",
.c_name = "unsigned long",
.is_signed = false,
},
CInt{
.id = Id.LongLong,
.zig_name = "c_longlong",
.c_name = "long long",
.is_signed = true,
},
CInt{
.id = Id.ULongLong,
.zig_name = "c_ulonglong",
.c_name = "unsigned long long",
.is_signed = false,
},
};
};

View File

@ -29,6 +29,7 @@ const codegen = @import("codegen.zig");
const Package = @import("package.zig").Package;
const link = @import("link.zig").link;
const LibCInstallation = @import("libc_installation.zig").LibCInstallation;
const CInt = @import("c_int.zig").CInt;
/// Data that is local to the event loop.
pub const EventLoopLocal = struct {
@ -59,6 +60,7 @@ pub const EventLoopLocal = struct {
};
}
/// Must be called only after EventLoop.run completes.
fn deinit(self: *EventLoopLocal) void {
while (self.llvm_handle_pool.pop()) |node| {
c.LLVMContextDispose(node.data);
@ -184,6 +186,7 @@ pub const Compilation = struct {
void_type: *Type.Void,
bool_type: *Type.Bool,
noreturn_type: *Type.NoReturn,
comptime_int_type: *Type.ComptimeInt,
void_value: *Value.Void,
true_value: *Value.Bool,
@ -209,6 +212,16 @@ pub const Compilation = struct {
have_err_ret_tracing: bool,
/// not locked because it is read-only
primitive_type_table: TypeTable,
int_type_table: event.Locked(IntTypeTable),
c_int_types: [CInt.list.len]*Type.Int,
const IntTypeTable = std.HashMap(*const Type.Int.Key, *Type.Int, Type.Int.Key.hash, Type.Int.Key.eql);
const TypeTable = std.HashMap([]const u8, *Type, mem.hash_slice_u8, mem.eql_slice_u8);
const CompileErrList = std.ArrayList(*errmsg.Msg);
// TODO handle some of these earlier and report them in a way other than error codes
@ -362,6 +375,8 @@ pub const Compilation = struct {
.prelink_group = event.Group(BuildError!void).init(loop),
.deinit_group = event.Group(void).init(loop),
.compile_errors = event.Locked(CompileErrList).init(loop, CompileErrList.init(loop.allocator)),
.int_type_table = event.Locked(IntTypeTable).init(loop, IntTypeTable.init(loop.allocator)),
.c_int_types = undefined,
.meta_type = undefined,
.void_type = undefined,
@ -371,6 +386,7 @@ pub const Compilation = struct {
.false_value = undefined,
.noreturn_type = undefined,
.noreturn_value = undefined,
.comptime_int_type = undefined,
.target_machine = undefined,
.target_data_ref = undefined,
@ -382,8 +398,10 @@ pub const Compilation = struct {
.override_libc = null,
.destroy_handle = undefined,
.have_err_ret_tracing = false,
.primitive_type_table = undefined,
});
errdefer {
comp.int_type_table.private_data.deinit();
comp.arena_allocator.deinit();
comp.loop.allocator.destroy(comp);
}
@ -393,6 +411,7 @@ pub const Compilation = struct {
comp.llvm_target = try Target.llvmTargetFromTriple(comp.llvm_triple);
comp.link_libs_list = ArrayList(*LinkLib).init(comp.arena());
comp.zig_std_dir = try std.os.path.join(comp.arena(), zig_lib_dir, "std");
comp.primitive_type_table = TypeTable.init(comp.arena());
const opt_level = switch (build_mode) {
builtin.Mode.Debug => llvm.CodeGenLevelNone,
@ -445,7 +464,6 @@ pub const Compilation = struct {
}
try comp.initTypes();
errdefer comp.derefTypes();
comp.destroy_handle = try async<loop.allocator> comp.internalDeinit();
@ -453,8 +471,9 @@ pub const Compilation = struct {
}
fn initTypes(comp: *Compilation) !void {
comp.meta_type = try comp.gpa().create(Type.MetaType{
comp.meta_type = try comp.arena().create(Type.MetaType{
.base = Type{
.name = "type",
.base = Value{
.id = Value.Id.Type,
.typeof = undefined,
@ -466,10 +485,11 @@ pub const Compilation = struct {
});
comp.meta_type.value = &comp.meta_type.base;
comp.meta_type.base.base.typeof = &comp.meta_type.base;
errdefer comp.gpa().destroy(comp.meta_type);
assert((try comp.primitive_type_table.put(comp.meta_type.base.name, &comp.meta_type.base)) == null);
comp.void_type = try comp.gpa().create(Type.Void{
comp.void_type = try comp.arena().create(Type.Void{
.base = Type{
.name = "void",
.base = Value{
.id = Value.Id.Type,
.typeof = &Type.MetaType.get(comp).base,
@ -478,10 +498,11 @@ pub const Compilation = struct {
.id = builtin.TypeId.Void,
},
});
errdefer comp.gpa().destroy(comp.void_type);
assert((try comp.primitive_type_table.put(comp.void_type.base.name, &comp.void_type.base)) == null);
comp.noreturn_type = try comp.gpa().create(Type.NoReturn{
comp.noreturn_type = try comp.arena().create(Type.NoReturn{
.base = Type{
.name = "noreturn",
.base = Value{
.id = Value.Id.Type,
.typeof = &Type.MetaType.get(comp).base,
@ -490,10 +511,24 @@ pub const Compilation = struct {
.id = builtin.TypeId.NoReturn,
},
});
errdefer comp.gpa().destroy(comp.noreturn_type);
assert((try comp.primitive_type_table.put(comp.noreturn_type.base.name, &comp.noreturn_type.base)) == null);
comp.bool_type = try comp.gpa().create(Type.Bool{
comp.comptime_int_type = try comp.arena().create(Type.ComptimeInt{
.base = Type{
.name = "comptime_int",
.base = Value{
.id = Value.Id.Type,
.typeof = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.ComptimeInt,
},
});
assert((try comp.primitive_type_table.put(comp.comptime_int_type.base.name, &comp.comptime_int_type.base)) == null);
comp.bool_type = try comp.arena().create(Type.Bool{
.base = Type{
.name = "bool",
.base = Value{
.id = Value.Id.Type,
.typeof = &Type.MetaType.get(comp).base,
@ -502,18 +537,17 @@ pub const Compilation = struct {
.id = builtin.TypeId.Bool,
},
});
errdefer comp.gpa().destroy(comp.bool_type);
assert((try comp.primitive_type_table.put(comp.bool_type.base.name, &comp.bool_type.base)) == null);
comp.void_value = try comp.gpa().create(Value.Void{
comp.void_value = try comp.arena().create(Value.Void{
.base = Value{
.id = Value.Id.Void,
.typeof = &Type.Void.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
});
errdefer comp.gpa().destroy(comp.void_value);
comp.true_value = try comp.gpa().create(Value.Bool{
comp.true_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.typeof = &Type.Bool.get(comp).base,
@ -521,9 +555,8 @@ pub const Compilation = struct {
},
.x = true,
});
errdefer comp.gpa().destroy(comp.true_value);
comp.false_value = try comp.gpa().create(Value.Bool{
comp.false_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
.typeof = &Type.Bool.get(comp).base,
@ -531,44 +564,56 @@ pub const Compilation = struct {
},
.x = false,
});
errdefer comp.gpa().destroy(comp.false_value);
comp.noreturn_value = try comp.gpa().create(Value.NoReturn{
comp.noreturn_value = try comp.arena().create(Value.NoReturn{
.base = Value{
.id = Value.Id.NoReturn,
.typeof = &Type.NoReturn.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
});
errdefer comp.gpa().destroy(comp.noreturn_value);
}
fn derefTypes(self: *Compilation) void {
self.noreturn_value.base.deref(self);
self.void_value.base.deref(self);
self.false_value.base.deref(self);
self.true_value.base.deref(self);
self.noreturn_type.base.base.deref(self);
self.void_type.base.base.deref(self);
self.meta_type.base.base.deref(self);
for (CInt.list) |cint, i| {
const c_int_type = try comp.arena().create(Type.Int{
.base = Type{
.name = cint.zig_name,
.base = Value{
.id = Value.Id.Type,
.typeof = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Int,
},
.key = Type.Int.Key{
.is_signed = cint.is_signed,
.bit_count = comp.target.cIntTypeSizeInBits(cint.id),
},
.garbage_node = undefined,
});
comp.c_int_types[i] = c_int_type;
assert((try comp.primitive_type_table.put(cint.zig_name, &c_int_type.base)) == null);
}
}
/// This function can safely use async/await, because it manages Compilation's lifetime,
/// and EventLoopLocal.deinit will not be called until the event.Loop.run() completes.
async fn internalDeinit(self: *Compilation) void {
suspend;
await (async self.deinit_group.wait() catch unreachable);
if (self.tmp_dir.getOrNull()) |tmp_dir_result| if (tmp_dir_result.*) |tmp_dir| {
// TODO evented I/O?
os.deleteTree(self.arena(), tmp_dir) catch {};
} else |_| {};
self.derefTypes();
self.events.destroy();
llvm.DisposeMessage(self.target_layout_str);
llvm.DisposeTargetData(self.target_data_ref);
llvm.DisposeTargetMachine(self.target_machine);
self.primitive_type_table.deinit();
self.arena_allocator.deinit();
self.gpa().destroy(self);
}
@ -939,6 +984,10 @@ pub const Compilation = struct {
b64_fs_encoder.encode(result[0..], rand_bytes);
return result;
}
fn registerGarbage(comp: *Compilation, comptime T: type, node: *std.atomic.Stack(*T).Node) void {
// TODO put the garbage somewhere
}
};
fn printError(comptime format: []const u8, args: ...) !void {
@ -1005,7 +1054,7 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
fn_decl.base.parsed_file,
&fndef_scope.base,
body_node,
null,
return_type,
) catch unreachable)) orelse return;
errdefer analyzed_code.destroy(comp.gpa());

View File

@ -705,7 +705,10 @@ pub const Builder = struct {
ast.Node.Id.ErrorType => return error.Unimplemented,
ast.Node.Id.FnProto => return error.Unimplemented,
ast.Node.Id.PromiseType => return error.Unimplemented,
ast.Node.Id.IntegerLiteral => return error.Unimplemented,
ast.Node.Id.IntegerLiteral => {
const int_lit = @fieldParentPtr(ast.Node.IntegerLiteral, "base", node);
return irb.lvalWrap(scope, try irb.genIntLit(int_lit, scope), lval);
},
ast.Node.Id.FloatLiteral => return error.Unimplemented,
ast.Node.Id.StringLiteral => return error.Unimplemented,
ast.Node.Id.MultilineStringLiteral => return error.Unimplemented,
@ -766,6 +769,45 @@ pub const Builder = struct {
}
}
pub fn genIntLit(irb: *Builder, int_lit: *ast.Node.IntegerLiteral, scope: *Scope) !*Inst {
const int_token = irb.parsed_file.tree.tokenSlice(int_lit.token);
var base: u8 = undefined;
var rest: []const u8 = undefined;
if (int_token.len >= 3 and int_token[0] == '0') {
base = switch (int_token[1]) {
'b' => u8(2),
'o' => u8(8),
'x' => u8(16),
else => unreachable,
};
rest = int_token[2..];
} else {
base = 10;
rest = int_token;
}
const comptime_int_type = Type.ComptimeInt.get(irb.comp);
defer comptime_int_type.base.base.deref(irb.comp);
const int_val = Value.Int.createFromString(
irb.comp,
&comptime_int_type.base,
base,
rest,
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.InvalidBase => unreachable,
error.InvalidCharForDigit => unreachable,
error.DigitTooLargeForBase => unreachable,
};
errdefer int_val.base.deref(irb.comp);
const inst = try irb.build(Inst.Const, scope, Span.token(int_lit.token), Inst.Const.Params{});
inst.val = IrVal{ .KnownValue = &int_val.base };
return inst;
}
pub fn genBlock(irb: *Builder, block: *ast.Node.Block, parent_scope: *Scope) !*Inst {
const block_scope = try Scope.Block.create(irb.comp, parent_scope);
@ -1306,7 +1348,436 @@ const Analyze = struct {
fn implicitCast(self: *Analyze, target: *Inst, optional_dest_type: ?*Type) Analyze.Error!*Inst {
const dest_type = optional_dest_type orelse return target;
return error.Unimplemented;
const from_type = target.getKnownType();
if (from_type == dest_type or from_type.id == Type.Id.NoReturn) return target;
return self.analyzeCast(target, target, dest_type);
}
fn analyzeCast(ira: *Analyze, source_instr: *Inst, target: *Inst, dest_type: *Type) !*Inst {
const from_type = target.getKnownType();
//if (type_is_invalid(wanted_type) || type_is_invalid(actual_type)) {
// return ira->codegen->invalid_instruction;
//}
//// perfect match or non-const to const
//ConstCastOnly const_cast_result = types_match_const_cast_only(ira, wanted_type, actual_type,
// source_node, false);
//if (const_cast_result.id == ConstCastResultIdOk) {
// return ir_resolve_cast(ira, source_instr, value, wanted_type, CastOpNoop, false);
//}
//// widening conversion
//if (wanted_type->id == TypeTableEntryIdInt &&
// actual_type->id == TypeTableEntryIdInt &&
// wanted_type->data.integral.is_signed == actual_type->data.integral.is_signed &&
// wanted_type->data.integral.bit_count >= actual_type->data.integral.bit_count)
//{
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
//}
//// small enough unsigned ints can get casted to large enough signed ints
//if (wanted_type->id == TypeTableEntryIdInt && wanted_type->data.integral.is_signed &&
// actual_type->id == TypeTableEntryIdInt && !actual_type->data.integral.is_signed &&
// wanted_type->data.integral.bit_count > actual_type->data.integral.bit_count)
//{
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
//}
//// float widening conversion
//if (wanted_type->id == TypeTableEntryIdFloat &&
// actual_type->id == TypeTableEntryIdFloat &&
// wanted_type->data.floating.bit_count >= actual_type->data.floating.bit_count)
//{
// return ir_analyze_widen_or_shorten(ira, source_instr, value, wanted_type);
//}
//// cast from [N]T to []const T
//if (is_slice(wanted_type) && actual_type->id == TypeTableEntryIdArray) {
// TypeTableEntry *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
// assert(ptr_type->id == TypeTableEntryIdPointer);
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
// source_node, false).id == ConstCastResultIdOk)
// {
// return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
// }
//}
//// cast from *const [N]T to []const T
//if (is_slice(wanted_type) &&
// actual_type->id == TypeTableEntryIdPointer &&
// actual_type->data.pointer.is_const &&
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
//{
// TypeTableEntry *ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
// assert(ptr_type->id == TypeTableEntryIdPointer);
// TypeTableEntry *array_type = actual_type->data.pointer.child_type;
// if ((ptr_type->data.pointer.is_const || array_type->data.array.len == 0) &&
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, array_type->data.array.child_type,
// source_node, false).id == ConstCastResultIdOk)
// {
// return ir_analyze_array_to_slice(ira, source_instr, value, wanted_type);
// }
//}
//// cast from [N]T to *const []const T
//if (wanted_type->id == TypeTableEntryIdPointer &&
// wanted_type->data.pointer.is_const &&
// is_slice(wanted_type->data.pointer.child_type) &&
// actual_type->id == TypeTableEntryIdArray)
//{
// TypeTableEntry *ptr_type =
// wanted_type->data.pointer.child_type->data.structure.fields[slice_ptr_index].type_entry;
// assert(ptr_type->id == TypeTableEntryIdPointer);
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
// source_node, false).id == ConstCastResultIdOk)
// {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.pointer.child_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// }
//}
//// cast from [N]T to ?[]const T
//if (wanted_type->id == TypeTableEntryIdOptional &&
// is_slice(wanted_type->data.maybe.child_type) &&
// actual_type->id == TypeTableEntryIdArray)
//{
// TypeTableEntry *ptr_type =
// wanted_type->data.maybe.child_type->data.structure.fields[slice_ptr_index].type_entry;
// assert(ptr_type->id == TypeTableEntryIdPointer);
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
// source_node, false).id == ConstCastResultIdOk)
// {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.maybe.child_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// }
//}
//// *[N]T to [*]T
//if (wanted_type->id == TypeTableEntryIdPointer &&
// wanted_type->data.pointer.ptr_len == PtrLenUnknown &&
// actual_type->id == TypeTableEntryIdPointer &&
// actual_type->data.pointer.ptr_len == PtrLenSingle &&
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray &&
// actual_type->data.pointer.alignment >= wanted_type->data.pointer.alignment &&
// types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
// actual_type->data.pointer.child_type->data.array.child_type, source_node,
// !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
//{
// return ir_resolve_ptr_of_array_to_unknown_len_ptr(ira, source_instr, value, wanted_type);
//}
//// *[N]T to []T
//if (is_slice(wanted_type) &&
// actual_type->id == TypeTableEntryIdPointer &&
// actual_type->data.pointer.ptr_len == PtrLenSingle &&
// actual_type->data.pointer.child_type->id == TypeTableEntryIdArray)
//{
// TypeTableEntry *slice_ptr_type = wanted_type->data.structure.fields[slice_ptr_index].type_entry;
// assert(slice_ptr_type->id == TypeTableEntryIdPointer);
// if (types_match_const_cast_only(ira, slice_ptr_type->data.pointer.child_type,
// actual_type->data.pointer.child_type->data.array.child_type, source_node,
// !slice_ptr_type->data.pointer.is_const).id == ConstCastResultIdOk)
// {
// return ir_resolve_ptr_of_array_to_slice(ira, source_instr, value, wanted_type);
// }
//}
//// cast from T to ?T
//// note that the *T to ?*T case is handled via the "ConstCastOnly" mechanism
//if (wanted_type->id == TypeTableEntryIdOptional) {
// TypeTableEntry *wanted_child_type = wanted_type->data.maybe.child_type;
// if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node,
// false).id == ConstCastResultIdOk)
// {
// return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
// } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
// actual_type->id == TypeTableEntryIdComptimeFloat)
// {
// if (ir_num_lit_fits_in_other_type(ira, value, wanted_child_type, true)) {
// return ir_analyze_maybe_wrap(ira, source_instr, value, wanted_type);
// } else {
// return ira->codegen->invalid_instruction;
// }
// } else if (wanted_child_type->id == TypeTableEntryIdPointer &&
// wanted_child_type->data.pointer.is_const &&
// (actual_type->id == TypeTableEntryIdPointer || is_container(actual_type)))
// {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_child_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// }
//}
//// cast from null literal to maybe type
//if (wanted_type->id == TypeTableEntryIdOptional &&
// actual_type->id == TypeTableEntryIdNull)
//{
// return ir_analyze_null_to_maybe(ira, source_instr, value, wanted_type);
//}
//// cast from child type of error type to error type
//if (wanted_type->id == TypeTableEntryIdErrorUnion) {
// if (types_match_const_cast_only(ira, wanted_type->data.error_union.payload_type, actual_type,
// source_node, false).id == ConstCastResultIdOk)
// {
// return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
// } else if (actual_type->id == TypeTableEntryIdComptimeInt ||
// actual_type->id == TypeTableEntryIdComptimeFloat)
// {
// if (ir_num_lit_fits_in_other_type(ira, value, wanted_type->data.error_union.payload_type, true)) {
// return ir_analyze_err_wrap_payload(ira, source_instr, value, wanted_type);
// } else {
// return ira->codegen->invalid_instruction;
// }
// }
//}
//// cast from [N]T to E![]const T
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
// is_slice(wanted_type->data.error_union.payload_type) &&
// actual_type->id == TypeTableEntryIdArray)
//{
// TypeTableEntry *ptr_type =
// wanted_type->data.error_union.payload_type->data.structure.fields[slice_ptr_index].type_entry;
// assert(ptr_type->id == TypeTableEntryIdPointer);
// if ((ptr_type->data.pointer.is_const || actual_type->data.array.len == 0) &&
// types_match_const_cast_only(ira, ptr_type->data.pointer.child_type, actual_type->data.array.child_type,
// source_node, false).id == ConstCastResultIdOk)
// {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// }
//}
//// cast from error set to error union type
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
// actual_type->id == TypeTableEntryIdErrorSet)
//{
// return ir_analyze_err_wrap_code(ira, source_instr, value, wanted_type);
//}
//// cast from T to E!?T
//if (wanted_type->id == TypeTableEntryIdErrorUnion &&
// wanted_type->data.error_union.payload_type->id == TypeTableEntryIdOptional &&
// actual_type->id != TypeTableEntryIdOptional)
//{
// TypeTableEntry *wanted_child_type = wanted_type->data.error_union.payload_type->data.maybe.child_type;
// if (types_match_const_cast_only(ira, wanted_child_type, actual_type, source_node, false).id == ConstCastResultIdOk ||
// actual_type->id == TypeTableEntryIdNull ||
// actual_type->id == TypeTableEntryIdComptimeInt ||
// actual_type->id == TypeTableEntryIdComptimeFloat)
// {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.error_union.payload_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// }
//}
// cast from number literal to another type
//// cast from number literal to *const integer
//if (actual_type->id == TypeTableEntryIdComptimeFloat ||
// actual_type->id == TypeTableEntryIdComptimeInt)
//{
// ensure_complete_type(ira->codegen, wanted_type);
// if (type_is_invalid(wanted_type))
// return ira->codegen->invalid_instruction;
// if (wanted_type->id == TypeTableEntryIdEnum) {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.enumeration.tag_int_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// } else if (wanted_type->id == TypeTableEntryIdPointer &&
// wanted_type->data.pointer.is_const)
// {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, wanted_type->data.pointer.child_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// } else if (ir_num_lit_fits_in_other_type(ira, value, wanted_type, true)) {
// CastOp op;
// if ((actual_type->id == TypeTableEntryIdComptimeFloat &&
// wanted_type->id == TypeTableEntryIdFloat) ||
// (actual_type->id == TypeTableEntryIdComptimeInt &&
// wanted_type->id == TypeTableEntryIdInt))
// {
// op = CastOpNumLitToConcrete;
// } else if (wanted_type->id == TypeTableEntryIdInt) {
// op = CastOpFloatToInt;
// } else if (wanted_type->id == TypeTableEntryIdFloat) {
// op = CastOpIntToFloat;
// } else {
// zig_unreachable();
// }
// return ir_resolve_cast(ira, source_instr, value, wanted_type, op, false);
// } else {
// return ira->codegen->invalid_instruction;
// }
//}
//// cast from typed number to integer or float literal.
//// works when the number is known at compile time
//if (instr_is_comptime(value) &&
// ((actual_type->id == TypeTableEntryIdInt && wanted_type->id == TypeTableEntryIdComptimeInt) ||
// (actual_type->id == TypeTableEntryIdFloat && wanted_type->id == TypeTableEntryIdComptimeFloat)))
//{
// return ir_analyze_number_to_literal(ira, source_instr, value, wanted_type);
//}
//// cast from union to the enum type of the union
//if (actual_type->id == TypeTableEntryIdUnion && wanted_type->id == TypeTableEntryIdEnum) {
// type_ensure_zero_bits_known(ira->codegen, actual_type);
// if (type_is_invalid(actual_type))
// return ira->codegen->invalid_instruction;
// if (actual_type->data.unionation.tag_type == wanted_type) {
// return ir_analyze_union_to_tag(ira, source_instr, value, wanted_type);
// }
//}
//// enum to union which has the enum as the tag type
//if (wanted_type->id == TypeTableEntryIdUnion && actual_type->id == TypeTableEntryIdEnum &&
// (wanted_type->data.unionation.decl_node->data.container_decl.auto_enum ||
// wanted_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr))
//{
// type_ensure_zero_bits_known(ira->codegen, wanted_type);
// if (wanted_type->data.unionation.tag_type == actual_type) {
// return ir_analyze_enum_to_union(ira, source_instr, value, wanted_type);
// }
//}
//// enum to &const union which has the enum as the tag type
//if (actual_type->id == TypeTableEntryIdEnum && wanted_type->id == TypeTableEntryIdPointer) {
// TypeTableEntry *union_type = wanted_type->data.pointer.child_type;
// if (union_type->data.unionation.decl_node->data.container_decl.auto_enum ||
// union_type->data.unionation.decl_node->data.container_decl.init_arg_expr != nullptr)
// {
// type_ensure_zero_bits_known(ira->codegen, union_type);
// if (union_type->data.unionation.tag_type == actual_type) {
// IrInstruction *cast1 = ir_analyze_cast(ira, source_instr, union_type, value);
// if (type_is_invalid(cast1->value.type))
// return ira->codegen->invalid_instruction;
// IrInstruction *cast2 = ir_analyze_cast(ira, source_instr, wanted_type, cast1);
// if (type_is_invalid(cast2->value.type))
// return ira->codegen->invalid_instruction;
// return cast2;
// }
// }
//}
//// cast from *T to *[1]T
//if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
// actual_type->id == TypeTableEntryIdPointer && actual_type->data.pointer.ptr_len == PtrLenSingle)
//{
// TypeTableEntry *array_type = wanted_type->data.pointer.child_type;
// if (array_type->id == TypeTableEntryIdArray && array_type->data.array.len == 1 &&
// types_match_const_cast_only(ira, array_type->data.array.child_type,
// actual_type->data.pointer.child_type, source_node,
// !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
// {
// if (wanted_type->data.pointer.alignment > actual_type->data.pointer.alignment) {
// ErrorMsg *msg = ir_add_error(ira, source_instr, buf_sprintf("cast increases pointer alignment"));
// add_error_note(ira->codegen, msg, value->source_node,
// buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&actual_type->name),
// actual_type->data.pointer.alignment));
// add_error_note(ira->codegen, msg, source_instr->source_node,
// buf_sprintf("'%s' has alignment %" PRIu32, buf_ptr(&wanted_type->name),
// wanted_type->data.pointer.alignment));
// return ira->codegen->invalid_instruction;
// }
// return ir_analyze_ptr_to_array(ira, source_instr, value, wanted_type);
// }
//}
//// cast from T to *T where T is zero bits
//if (wanted_type->id == TypeTableEntryIdPointer && wanted_type->data.pointer.ptr_len == PtrLenSingle &&
// types_match_const_cast_only(ira, wanted_type->data.pointer.child_type,
// actual_type, source_node, !wanted_type->data.pointer.is_const).id == ConstCastResultIdOk)
//{
// type_ensure_zero_bits_known(ira->codegen, actual_type);
// if (type_is_invalid(actual_type)) {
// return ira->codegen->invalid_instruction;
// }
// if (!type_has_bits(actual_type)) {
// return ir_get_ref(ira, source_instr, value, false, false);
// }
//}
//// cast from undefined to anything
//if (actual_type->id == TypeTableEntryIdUndefined) {
// return ir_analyze_undefined_to_anything(ira, source_instr, value, wanted_type);
//}
//// cast from something to const pointer of it
//if (!type_requires_comptime(actual_type)) {
// TypeTableEntry *const_ptr_actual = get_pointer_to_type(ira->codegen, actual_type, true);
// if (types_match_const_cast_only(ira, wanted_type, const_ptr_actual, source_node, false).id == ConstCastResultIdOk) {
// return ir_analyze_cast_ref(ira, source_instr, value, wanted_type);
// }
//}
try ira.addCompileError(
source_instr.span,
"expected type '{}', found '{}'",
dest_type.name,
from_type.name,
);
//ErrorMsg *parent_msg = ir_add_error_node(ira, source_instr->source_node,
// buf_sprintf("expected type '%s', found '%s'",
// buf_ptr(&wanted_type->name),
// buf_ptr(&actual_type->name)));
//report_recursive_error(ira, source_instr->source_node, &const_cast_result, parent_msg);
return error.SemanticAnalysisFailed;
}
fn getCompTimeValOrNullUndefOk(self: *Analyze, target: *Inst) ?*Value {

View File

@ -30,6 +30,8 @@ pub const AddModuleDebugInfoFlag = c.ZigLLVMAddModuleDebugInfoFlag;
pub const ClearCurrentDebugLocation = c.ZigLLVMClearCurrentDebugLocation;
pub const ConstAllOnes = c.LLVMConstAllOnes;
pub const ConstInt = c.LLVMConstInt;
pub const ConstIntOfArbitraryPrecision = c.LLVMConstIntOfArbitraryPrecision;
pub const ConstNeg = c.LLVMConstNeg;
pub const ConstNull = c.LLVMConstNull;
pub const ConstStringInContext = c.LLVMConstStringInContext;
pub const ConstStructInContext = c.LLVMConstStructInContext;

View File

@ -1,6 +1,7 @@
const std = @import("std");
const builtin = @import("builtin");
const llvm = @import("llvm.zig");
const CInt = @import("c_int.zig").CInt;
pub const FloatAbi = enum {
Hard,
@ -173,7 +174,7 @@ pub const Target = union(enum) {
return self.getArchPtrBitWidth() == 64;
}
pub fn getArchPtrBitWidth(self: Target) u8 {
pub fn getArchPtrBitWidth(self: Target) u32 {
switch (self.getArch()) {
builtin.Arch.avr,
builtin.Arch.msp430,
@ -429,4 +430,100 @@ pub const Target = union(enum) {
}
return result;
}
pub fn cIntTypeSizeInBits(self: Target, id: CInt.Id) u32 {
const arch = self.getArch();
switch (self.getOs()) {
builtin.Os.freestanding => switch (self.getArch()) {
builtin.Arch.msp430 => switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
CInt.Id.Int,
CInt.Id.UInt,
=> return 16,
CInt.Id.Long,
CInt.Id.ULong,
=> return 32,
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
else => switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
=> return 16,
CInt.Id.Int,
CInt.Id.UInt,
=> return 32,
CInt.Id.Long,
CInt.Id.ULong,
=> return self.getArchPtrBitWidth(),
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
},
builtin.Os.linux,
builtin.Os.macosx,
builtin.Os.openbsd,
builtin.Os.zen,
=> switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
=> return 16,
CInt.Id.Int,
CInt.Id.UInt,
=> return 32,
CInt.Id.Long,
CInt.Id.ULong,
=> return self.getArchPtrBitWidth(),
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
builtin.Os.windows => switch (id) {
CInt.Id.Short,
CInt.Id.UShort,
=> return 16,
CInt.Id.Int,
CInt.Id.UInt,
=> return 32,
CInt.Id.Long,
CInt.Id.ULong,
CInt.Id.LongLong,
CInt.Id.ULongLong,
=> return 64,
},
builtin.Os.ananas,
builtin.Os.cloudabi,
builtin.Os.dragonfly,
builtin.Os.freebsd,
builtin.Os.fuchsia,
builtin.Os.ios,
builtin.Os.kfreebsd,
builtin.Os.lv2,
builtin.Os.netbsd,
builtin.Os.solaris,
builtin.Os.haiku,
builtin.Os.minix,
builtin.Os.rtems,
builtin.Os.nacl,
builtin.Os.cnk,
builtin.Os.aix,
builtin.Os.cuda,
builtin.Os.nvcl,
builtin.Os.amdhsa,
builtin.Os.ps4,
builtin.Os.elfiamcu,
builtin.Os.tvos,
builtin.Os.watchos,
builtin.Os.mesa3d,
builtin.Os.contiki,
builtin.Os.amdpal,
=> @panic("TODO specify the C integer type sizes for this OS"),
}
}
};

View File

@ -9,6 +9,7 @@ const ObjectFile = @import("codegen.zig").ObjectFile;
pub const Type = struct {
base: Value,
id: Id,
name: []const u8,
pub const Id = builtin.TypeId;
@ -151,6 +152,18 @@ pub const Type = struct {
std.debug.warn("{}", @tagName(base.id));
}
fn init(base: *Type, comp: *Compilation, id: Id, name: []const u8) void {
base.* = Type{
.base = Value{
.id = Value.Id.Type,
.typeof = &MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = id,
.name = name,
};
}
pub fn getAbiAlignment(base: *Type, comp: *Compilation) u32 {
@panic("TODO getAbiAlignment");
}
@ -181,20 +194,15 @@ pub const Type = struct {
pub fn create(comp: *Compilation, return_type: *Type, params: []Param, is_var_args: bool) !*Fn {
const result = try comp.gpa().create(Fn{
.base = Type{
.base = Value{
.id = Value.Id.Type,
.typeof = &MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Fn,
},
.base = undefined,
.return_type = return_type,
.params = params,
.is_var_args = is_var_args,
});
errdefer comp.gpa().destroy(result);
result.base.init(comp, Id.Fn, "TODO fn type name");
result.return_type.base.ref();
for (result.params) |param| {
param.typeof.base.ref();
@ -293,13 +301,77 @@ pub const Type = struct {
pub const Int = struct {
base: Type,
key: Key,
garbage_node: std.atomic.Stack(*Int).Node,
pub const Key = struct {
bit_count: u32,
is_signed: bool,
pub fn hash(self: *const Key) u32 {
const rands = [2]u32{ 0xa4ba6498, 0x75fc5af7 };
return rands[@boolToInt(self.is_signed)] *% self.bit_count;
}
pub fn eql(self: *const Key, other: *const Key) bool {
return self.bit_count == other.bit_count and self.is_signed == other.is_signed;
}
};
pub async fn get(comp: *Compilation, key: Key) !*Int {
{
const held = await (async comp.int_type_table.acquire() catch unreachable);
defer held.release();
if (held.value.get(&key)) |entry| {
return entry.value;
}
}
const self = try comp.gpa().create(Int{
.base = undefined,
.key = key,
.garbage_node = undefined,
});
errdefer comp.gpa().destroy(self);
const u_or_i = "ui"[@boolToInt(key.is_signed)];
const name = std.fmt.allocPrint(comp.gpa(), "{c}{}", u_or_i, key.bit_count);
errdefer comp.gpa().free(name);
self.base.init(comp, Id.Int, name);
{
const held = await (async comp.int_type_table.acquire() catch unreachable);
defer held.release();
held.value.put(&self.key, self);
}
return self;
}
pub fn destroy(self: *Int, comp: *Compilation) void {
self.garbage_node = std.atomic.Stack(*Int).Node{
.data = self,
.next = undefined,
};
comp.registerGarbage(Int, &self.garbage_node);
}
pub async fn gcDestroy(self: *Int, comp: *Compilation) void {
{
const held = await (async comp.int_type_table.acquire() catch unreachable);
defer held.release();
_ = held.value.remove(&self.key).?;
}
// we allocated the name
comp.gpa().free(self.base.name);
comp.gpa().destroy(self);
}
pub fn getLlvmType(self: *Int, ofile: *ObjectFile) llvm.TypeRef {
@panic("TODO");
pub fn getLlvmType(self: *Int, ofile: *ObjectFile) !llvm.TypeRef {
return llvm.IntTypeInContext(ofile.context, self.key.bit_count) orelse return error.OutOfMemory;
}
};
@ -374,6 +446,12 @@ pub const Type = struct {
pub const ComptimeInt = struct {
base: Type,
/// Adds 1 reference to the resulting type
pub fn get(comp: *Compilation) *ComptimeInt {
comp.comptime_int_type.base.base.ref();
return comp.comptime_int_type;
}
pub fn destroy(self: *ComptimeInt, comp: *Compilation) void {
comp.gpa().destroy(self);
}

View File

@ -29,6 +29,7 @@ pub const Value = struct {
Id.Bool => @fieldParentPtr(Bool, "base", base).destroy(comp),
Id.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(comp),
Id.Ptr => @fieldParentPtr(Ptr, "base", base).destroy(comp),
Id.Int => @fieldParentPtr(Int, "base", base).destroy(comp),
}
}
}
@ -50,6 +51,7 @@ pub const Value = struct {
Id.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmConst(ofile),
Id.NoReturn => unreachable,
Id.Ptr => @panic("TODO"),
Id.Int => return @fieldParentPtr(Int, "base", base).getLlvmConst(ofile),
}
}
@ -60,6 +62,7 @@ pub const Value = struct {
Bool,
NoReturn,
Ptr,
Int,
};
pub const Type = @import("type.zig").Type;
@ -198,4 +201,59 @@ pub const Value = struct {
comp.gpa().destroy(self);
}
};
pub const Int = struct {
base: Value,
big_int: std.math.big.Int,
pub fn createFromString(comp: *Compilation, typeof: *Type, base: u8, value: []const u8) !*Int {
const self = try comp.gpa().create(Value.Int{
.base = Value{
.id = Value.Id.Int,
.typeof = typeof,
.ref_count = std.atomic.Int(usize).init(1),
},
.big_int = undefined,
});
typeof.base.ref();
errdefer comp.gpa().destroy(self);
self.big_int = try std.math.big.Int.init(comp.gpa());
errdefer self.big_int.deinit();
try self.big_int.setString(base, value);
return self;
}
pub fn getLlvmConst(self: *Int, ofile: *ObjectFile) !?llvm.ValueRef {
switch (self.base.typeof.id) {
Type.Id.Int => {
const type_ref = try self.base.typeof.getLlvmType(ofile);
if (self.big_int.len == 0) {
return llvm.ConstNull(type_ref);
}
const unsigned_val = if (self.big_int.len == 1) blk: {
break :blk llvm.ConstInt(type_ref, self.big_int.limbs[0], @boolToInt(false));
} else if (@sizeOf(std.math.big.Limb) == @sizeOf(u64)) blk: {
break :blk llvm.ConstIntOfArbitraryPrecision(
type_ref,
@intCast(c_uint, self.big_int.len),
@ptrCast([*]u64, self.big_int.limbs.ptr),
);
} else {
@compileError("std.math.Big.Int.Limb size does not match LLVM");
};
return if (self.big_int.positive) unsigned_val else llvm.ConstNeg(unsigned_val);
},
Type.Id.ComptimeInt => unreachable,
else => unreachable,
}
}
pub fn destroy(self: *Int, comp: *Compilation) void {
self.big_int.deinit();
comp.gpa().destroy(self);
}
};
};

View File

@ -38,6 +38,15 @@ pub fn Group(comptime ReturnType: type) type {
self.alloc_stack.push(node);
}
/// Add a node to the group. Thread-safe. Cannot fail.
/// `node.data` should be the promise handle to add to the group.
/// The node's memory should be in the coroutine frame of
/// the handle that is in the node, or somewhere guaranteed to live
/// at least as long.
pub fn addNode(self: *Self, node: *Stack.Node) void {
self.coro_stack.push(node);
}
/// This is equivalent to an async call, but the async function is added to the group, instead
/// of returning a promise. func must be async and have return type ReturnType.
/// Thread-safe.
@ -98,6 +107,8 @@ pub fn Group(comptime ReturnType: type) type {
}
/// Cancel all the outstanding promises. May only be called if wait was never called.
/// TODO These should be `cancelasync` not `cancel`.
/// See https://github.com/ziglang/zig/issues/1261
pub fn cancelAll(self: *Self) void {
while (self.coro_stack.pop()) |node| {
cancel node.data;

View File

@ -60,8 +60,9 @@ pub const Int = struct {
self.limbs = try self.allocator.realloc(Limb, self.limbs, capacity);
}
pub fn deinit(self: Int) void {
pub fn deinit(self: *Int) void {
self.allocator.free(self.limbs);
self.* = undefined;
}
pub fn clone(other: Int) !Int {