error sets - most tests passing
This commit is contained in:
parent
68238d5678
commit
0d5ff6f462
11
TODO
11
TODO
@ -1,5 +1,6 @@
|
||||
sed -i 's/\(\bfn .*) \)%\(.*{\)$/\1!\2/g' $(find . -name "*.zig")
|
||||
|
||||
|
||||
the literal translation of `%T` to this new code is `error!T`.
|
||||
however this would not take advantage of error sets. It's
|
||||
recommended to generally have all your functions which return possible
|
||||
@ -11,6 +12,11 @@ fn foo() !void {
|
||||
|
||||
then you can return void, or any error, and the error set is inferred.
|
||||
|
||||
|
||||
you can get the compiler to tell you the possible errors for an inferred error set like this:
|
||||
|
||||
foo() catch |err| switch (err) {};
|
||||
|
||||
// TODO this is an explicit cast and should actually coerce the type
|
||||
erorr set casting
|
||||
|
||||
@ -27,3 +33,8 @@ comptime test for err
|
||||
undefined in infer error
|
||||
|
||||
syntax - ?a!b should be ?(a!b) but it's (?a)!b
|
||||
|
||||
syntax - (error{}!void) as the return type
|
||||
|
||||
|
||||
passing a fn()error{}!T to a fn()error!T should be a compile error, they're not compatible
|
||||
|
@ -42,7 +42,7 @@ pub fn main() !void {
|
||||
const input_file_bytes = try file_in_stream.stream.readAllAlloc(allocator, max_doc_file_size);
|
||||
|
||||
var file_out_stream = io.FileOutStream.init(&out_file);
|
||||
var buffered_out_stream = io.BufferedOutStream.init(&file_out_stream.stream);
|
||||
var buffered_out_stream = io.BufferedOutStream(io.FileOutStream.Error).init(&file_out_stream.stream);
|
||||
|
||||
var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
|
||||
var toc = try genToc(allocator, &tokenizer);
|
||||
@ -218,8 +218,6 @@ const Tokenizer = struct {
|
||||
}
|
||||
};
|
||||
|
||||
error ParseError;
|
||||
|
||||
fn parseError(tokenizer: &Tokenizer, token: &const Token, comptime fmt: []const u8, args: ...) error {
|
||||
const loc = tokenizer.getTokenLocation(token);
|
||||
warn("{}:{}:{}: error: " ++ fmt ++ "\n", tokenizer.source_file_name, loc.line + 1, loc.column + 1, args);
|
||||
@ -596,8 +594,6 @@ const TermState = enum {
|
||||
ExpectEnd,
|
||||
};
|
||||
|
||||
error UnsupportedEscape;
|
||||
|
||||
test "term color" {
|
||||
const input_bytes = "A\x1b[32;1mgreen\x1b[0mB";
|
||||
const result = try termColor(std.debug.global_allocator, input_bytes);
|
||||
@ -684,9 +680,7 @@ fn termColor(allocator: &mem.Allocator, input: []const u8) ![]u8 {
|
||||
return buf.toOwnedSlice();
|
||||
}
|
||||
|
||||
error ExampleFailedToCompile;
|
||||
|
||||
fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: &io.OutStream, zig_exe: []const u8) !void {
|
||||
fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: var, zig_exe: []const u8) !void {
|
||||
var code_progress_index: usize = 0;
|
||||
for (toc.nodes) |node| {
|
||||
switch (node) {
|
||||
@ -974,9 +968,6 @@ fn genHtml(allocator: &mem.Allocator, tokenizer: &Tokenizer, toc: &Toc, out: &io
|
||||
|
||||
}
|
||||
|
||||
error ChildCrashed;
|
||||
error ChildExitError;
|
||||
|
||||
fn exec(allocator: &mem.Allocator, args: []const []const u8) !os.ChildProcess.ExecResult {
|
||||
const result = try os.ChildProcess.exec(allocator, args, null, null, max_doc_file_size);
|
||||
switch (result.term) {
|
||||
|
@ -61,7 +61,7 @@ fn cat_file(stdout: &io.File, file: &io.File) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrapArg(arg: %[]u8) ![]u8 {
|
||||
fn unwrapArg(arg: error![]u8) ![]u8 {
|
||||
return arg catch |err| {
|
||||
warn("Unable to parse command line: {}\n", err);
|
||||
return err;
|
||||
|
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) !void {
|
||||
pub fn build(b: &Builder) void {
|
||||
const obj = b.addObject("base64", "base64.zig");
|
||||
|
||||
const exe = b.addCExecutable("test");
|
||||
|
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) !void {
|
||||
pub fn build(b: &Builder) void {
|
||||
const lib = b.addSharedLibrary("mathtest", "mathtest.zig", b.version(1, 0, 0));
|
||||
|
||||
const exe = b.addCExecutable("test");
|
||||
|
@ -14,10 +14,6 @@ const builtin = @import("builtin");
|
||||
const ArrayList = std.ArrayList;
|
||||
const c = @import("c.zig");
|
||||
|
||||
error InvalidCommandLineArguments;
|
||||
error ZigLibDirNotFound;
|
||||
error ZigInstallationNotFound;
|
||||
|
||||
const default_zig_cache_name = "zig-cache";
|
||||
|
||||
pub fn main() !void {
|
||||
@ -472,7 +468,7 @@ pub fn main2() !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn printUsage(stream: &io.OutStream) !void {
|
||||
fn printUsage(stream: var) !void {
|
||||
try stream.write(
|
||||
\\Usage: zig [command] [options]
|
||||
\\
|
||||
|
@ -110,7 +110,7 @@ pub const Module = struct {
|
||||
};
|
||||
|
||||
pub fn create(allocator: &mem.Allocator, name: []const u8, root_src_path: ?[]const u8, target: &const Target,
|
||||
kind: Kind, build_mode: builtin.Mode, zig_lib_dir: []const u8, cache_dir: []const u8) %&Module
|
||||
kind: Kind, build_mode: builtin.Mode, zig_lib_dir: []const u8, cache_dir: []const u8) !&Module
|
||||
{
|
||||
var name_buffer = try Buffer.init(allocator, name);
|
||||
errdefer name_buffer.deinit();
|
||||
@ -265,6 +265,7 @@ pub const Module = struct {
|
||||
|
||||
pub fn link(self: &Module, out_file: ?[]const u8) !void {
|
||||
warn("TODO link");
|
||||
return error.Todo;
|
||||
}
|
||||
|
||||
pub fn addLinkLib(self: &Module, name: []const u8, provided_explicitly: bool) !&LinkLib {
|
||||
|
@ -12,8 +12,6 @@ const io = std.io;
|
||||
// get rid of this
|
||||
const warn = std.debug.warn;
|
||||
|
||||
error ParseError;
|
||||
|
||||
pub const Parser = struct {
|
||||
allocator: &mem.Allocator,
|
||||
tokenizer: &Tokenizer,
|
||||
@ -555,7 +553,7 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
fn createVarDecl(self: &Parser, visib_token: &const ?Token, mut_token: &const Token, comptime_token: &const ?Token,
|
||||
extern_token: &const ?Token) %&ast.NodeVarDecl
|
||||
extern_token: &const ?Token) !&ast.NodeVarDecl
|
||||
{
|
||||
const node = try self.allocator.create(ast.NodeVarDecl);
|
||||
|
||||
@ -577,7 +575,7 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
fn createFnProto(self: &Parser, fn_token: &const Token, extern_token: &const ?Token,
|
||||
cc_token: &const ?Token, visib_token: &const ?Token, inline_token: &const ?Token) %&ast.NodeFnProto
|
||||
cc_token: &const ?Token, visib_token: &const ?Token, inline_token: &const ?Token) !&ast.NodeFnProto
|
||||
{
|
||||
const node = try self.allocator.create(ast.NodeFnProto);
|
||||
|
||||
@ -694,7 +692,7 @@ pub const Parser = struct {
|
||||
|
||||
fn createAttachFnProto(self: &Parser, list: &ArrayList(&ast.Node), fn_token: &const Token,
|
||||
extern_token: &const ?Token, cc_token: &const ?Token, visib_token: &const ?Token,
|
||||
inline_token: &const ?Token) %&ast.NodeFnProto
|
||||
inline_token: &const ?Token) !&ast.NodeFnProto
|
||||
{
|
||||
const node = try self.createFnProto(fn_token, extern_token, cc_token, visib_token, inline_token);
|
||||
try list.append(&node.base);
|
||||
@ -702,7 +700,7 @@ pub const Parser = struct {
|
||||
}
|
||||
|
||||
fn createAttachVarDecl(self: &Parser, list: &ArrayList(&ast.Node), visib_token: &const ?Token,
|
||||
mut_token: &const Token, comptime_token: &const ?Token, extern_token: &const ?Token) %&ast.NodeVarDecl
|
||||
mut_token: &const Token, comptime_token: &const ?Token, extern_token: &const ?Token) !&ast.NodeVarDecl
|
||||
{
|
||||
const node = try self.createVarDecl(visib_token, mut_token, comptime_token, extern_token);
|
||||
try list.append(&node.base);
|
||||
@ -763,7 +761,7 @@ pub const Parser = struct {
|
||||
indent: usize,
|
||||
};
|
||||
|
||||
pub fn renderAst(self: &Parser, stream: &std.io.OutStream, root_node: &ast.NodeRoot) !void {
|
||||
pub fn renderAst(self: &Parser, stream: var, root_node: &ast.NodeRoot) !void {
|
||||
var stack = self.initUtilityArrayList(RenderAstFrame);
|
||||
defer self.deinitUtilityArrayList(stack);
|
||||
|
||||
@ -802,7 +800,7 @@ pub const Parser = struct {
|
||||
Indent: usize,
|
||||
};
|
||||
|
||||
pub fn renderSource(self: &Parser, stream: &std.io.OutStream, root_node: &ast.NodeRoot) !void {
|
||||
pub fn renderSource(self: &Parser, stream: var, root_node: &ast.NodeRoot) !void {
|
||||
var stack = self.initUtilityArrayList(RenderState);
|
||||
defer self.deinitUtilityArrayList(stack);
|
||||
|
||||
@ -1058,10 +1056,6 @@ fn testParse(source: []const u8, allocator: &mem.Allocator) ![]u8 {
|
||||
return buffer.toOwnedSlice();
|
||||
}
|
||||
|
||||
error TestFailed;
|
||||
error NondeterministicMemoryUsage;
|
||||
error MemoryLeakDetected;
|
||||
|
||||
// TODO test for memory leaks
|
||||
// TODO test for valid frees
|
||||
fn testCanonical(source: []const u8) !void {
|
||||
|
76
src/ir.cpp
76
src/ir.cpp
@ -5442,6 +5442,10 @@ static TypeTableEntry *get_error_set_union(CodeGen *g, ErrorTableEntry **errors,
|
||||
buf_resize(&err_set_type->name, 0);
|
||||
buf_appendf(&err_set_type->name, "error{");
|
||||
|
||||
for (uint32_t i = 0, count = set1->data.error_set.err_count; i < count; i += 1) {
|
||||
assert(errors[set1->data.error_set.errors[i]->value] == set1->data.error_set.errors[i]);
|
||||
}
|
||||
|
||||
uint32_t count = set1->data.error_set.err_count;
|
||||
for (uint32_t i = 0; i < set2->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = set2->data.error_set.errors[i];
|
||||
@ -5523,6 +5527,8 @@ static IrInstruction *ir_gen_err_set_decl(IrBuilder *irb, Scope *parent_scope, A
|
||||
err_set_type->data.error_set.errors = allocate<ErrorTableEntry *>(err_count);
|
||||
}
|
||||
|
||||
ErrorTableEntry **errors = allocate<ErrorTableEntry *>(irb->codegen->errors_by_index.length + err_count);
|
||||
|
||||
for (uint32_t i = 0; i < err_count; i += 1) {
|
||||
AstNode *symbol_node = node->data.err_set_decl.decls.at(i);
|
||||
assert(symbol_node->type == NodeTypeSymbol);
|
||||
@ -5543,7 +5549,16 @@ static IrInstruction *ir_gen_err_set_decl(IrBuilder *irb, Scope *parent_scope, A
|
||||
buf_ptr(err_name), error_value_count));
|
||||
}
|
||||
err_set_type->data.error_set.errors[i] = err;
|
||||
|
||||
ErrorTableEntry *prev_err = errors[err->value];
|
||||
if (prev_err != nullptr) {
|
||||
ErrorMsg *msg = add_node_error(irb->codegen, err->decl_node, buf_sprintf("duplicate error: '%s'", buf_ptr(&err->name)));
|
||||
add_error_note(irb->codegen, msg, prev_err->decl_node, buf_sprintf("other error here"));
|
||||
return irb->codegen->invalid_instruction;
|
||||
}
|
||||
errors[err->value] = err;
|
||||
}
|
||||
free(errors);
|
||||
return ir_build_const_type(irb, parent_scope, node, err_set_type);
|
||||
}
|
||||
|
||||
@ -6512,6 +6527,7 @@ static TypeTableEntry *get_error_set_intersection(IrAnalyze *ira, TypeTableEntry
|
||||
ErrorTableEntry **errors = allocate<ErrorTableEntry *>(ira->codegen->errors_by_index.length);
|
||||
for (uint32_t i = 0; i < set1->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = set1->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
ZigList<ErrorTableEntry *> intersection_list = {};
|
||||
@ -6653,6 +6669,7 @@ static ConstCastOnly types_match_const_cast_only(IrAnalyze *ira, TypeTableEntry
|
||||
ErrorTableEntry **errors = allocate<ErrorTableEntry *>(g->errors_by_index.length);
|
||||
for (uint32_t i = 0; i < container_set->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = container_set->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
for (uint32_t i = 0; i < contained_set->data.error_set.err_count; i += 1) {
|
||||
@ -6767,6 +6784,12 @@ static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira,
|
||||
buf_sprintf("unable to cast global error set into smaller set"));
|
||||
return ImplicitCastMatchResultReportedError;
|
||||
}
|
||||
} else if (const_cast_result.id == ConstCastResultIdErrSetGlobal) {
|
||||
ErrorMsg *msg = ir_add_error(ira, value,
|
||||
buf_sprintf("expected '%s', found '%s'", buf_ptr(&expected_type->name), buf_ptr(&actual_type->name)));
|
||||
add_error_note(ira->codegen, msg, value->source_node,
|
||||
buf_sprintf("unable to cast global error set into smaller set"));
|
||||
return ImplicitCastMatchResultReportedError;
|
||||
}
|
||||
if (missing_errors != nullptr) {
|
||||
ErrorMsg *msg = ir_add_error(ira, value,
|
||||
@ -6995,6 +7018,12 @@ static ImplicitCastMatchResult ir_types_match_with_implicit_cast(IrAnalyze *ira,
|
||||
return ImplicitCastMatchResultNo;
|
||||
}
|
||||
|
||||
static void update_errors_helper(CodeGen *g, ErrorTableEntry ***errors, size_t *errors_count) {
|
||||
size_t old_errors_count = *errors_count;
|
||||
*errors_count = g->errors_by_index.length;
|
||||
*errors = reallocate(*errors, old_errors_count, *errors_count);
|
||||
}
|
||||
|
||||
static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_node, IrInstruction **instructions, size_t instruction_count) {
|
||||
assert(instruction_count >= 1);
|
||||
IrInstruction *prev_inst = instructions[0];
|
||||
@ -7002,6 +7031,7 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
}
|
||||
ErrorTableEntry **errors = nullptr;
|
||||
size_t errors_count = 0;
|
||||
TypeTableEntry *err_set_type = nullptr;
|
||||
if (prev_inst->value.type->id == TypeTableEntryIdErrorSet) {
|
||||
if (type_is_global_error_set(prev_inst->value.type)) {
|
||||
@ -7011,9 +7041,11 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
if (!resolve_inferred_error_set(ira, err_set_type, prev_inst->source_node)) {
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
}
|
||||
errors = allocate<ErrorTableEntry *>(ira->codegen->errors_by_index.length);
|
||||
update_errors_helper(ira->codegen, &errors, &errors_count);
|
||||
|
||||
for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
}
|
||||
@ -7064,6 +7096,9 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
continue;
|
||||
}
|
||||
|
||||
// number of declared errors might have increased now
|
||||
update_errors_helper(ira->codegen, &errors, &errors_count);
|
||||
|
||||
// if err_set_type is a superset of cur_type, keep err_set_type.
|
||||
// if cur_type is a superset of err_set_type, switch err_set_type to cur_type
|
||||
bool prev_is_superset = true;
|
||||
@ -7084,8 +7119,12 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
|
||||
errors[error_entry->value] = nullptr;
|
||||
}
|
||||
for (uint32_t i = 0, count = ira->codegen->errors_by_index.length; i < count; i += 1) {
|
||||
assert(errors[i] == nullptr);
|
||||
}
|
||||
for (uint32_t i = 0; i < cur_type->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = cur_type->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
bool cur_is_superset = true;
|
||||
@ -7122,14 +7161,21 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
prev_inst = cur_inst;
|
||||
continue;
|
||||
}
|
||||
|
||||
update_errors_helper(ira->codegen, &errors, &errors_count);
|
||||
|
||||
// test if err_set_type is a subset of cur_type's error set
|
||||
// unset everything in errors
|
||||
for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
|
||||
errors[error_entry->value] = nullptr;
|
||||
}
|
||||
for (uint32_t i = 0, count = ira->codegen->errors_by_index.length; i < count; i += 1) {
|
||||
assert(errors[i] == nullptr);
|
||||
}
|
||||
for (uint32_t i = 0; i < cur_err_set_type->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = cur_err_set_type->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
bool cur_is_superset = true;
|
||||
@ -7173,15 +7219,18 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
if (!resolve_inferred_error_set(ira, cur_type, cur_inst->source_node)) {
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
}
|
||||
|
||||
update_errors_helper(ira->codegen, &errors, &errors_count);
|
||||
|
||||
if (err_set_type == nullptr) {
|
||||
if (prev_type->id == TypeTableEntryIdErrorUnion) {
|
||||
err_set_type = prev_type->data.error_union.err_set_type;
|
||||
} else {
|
||||
err_set_type = cur_type;
|
||||
}
|
||||
errors = allocate<ErrorTableEntry *>(ira->codegen->errors_by_index.length);
|
||||
for (uint32_t i = 0; i < err_set_type->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
if (err_set_type == cur_type) {
|
||||
@ -7237,11 +7286,13 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
continue;
|
||||
}
|
||||
|
||||
update_errors_helper(ira->codegen, &errors, &errors_count);
|
||||
|
||||
if (err_set_type == nullptr) {
|
||||
err_set_type = prev_err_set_type;
|
||||
errors = allocate<ErrorTableEntry *>(ira->codegen->errors_by_index.length);
|
||||
for (uint32_t i = 0; i < prev_err_set_type->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = prev_err_set_type->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
}
|
||||
@ -7262,8 +7313,12 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
ErrorTableEntry *error_entry = err_set_type->data.error_set.errors[i];
|
||||
errors[error_entry->value] = nullptr;
|
||||
}
|
||||
for (uint32_t i = 0, count = ira->codegen->errors_by_index.length; i < count; i += 1) {
|
||||
assert(errors[i] == nullptr);
|
||||
}
|
||||
for (uint32_t i = 0; i < cur_err_set_type->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = cur_err_set_type->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
bool cur_is_superset = true;
|
||||
@ -7331,6 +7386,8 @@ static TypeTableEntry *ir_resolve_peer_types(IrAnalyze *ira, AstNode *source_nod
|
||||
continue;
|
||||
}
|
||||
|
||||
update_errors_helper(ira->codegen, &errors, &errors_count);
|
||||
|
||||
err_set_type = get_error_set_union(ira->codegen, errors, err_set_type, cur_err_set_type);
|
||||
}
|
||||
prev_inst = cur_inst;
|
||||
@ -8000,6 +8057,7 @@ static IrInstruction *ir_analyze_err_set_cast(IrAnalyze *ira, IrInstruction *sou
|
||||
ErrorTableEntry **errors = allocate<ErrorTableEntry *>(ira->codegen->errors_by_index.length);
|
||||
for (uint32_t i = 0; i < container_set->data.error_set.err_count; i += 1) {
|
||||
ErrorTableEntry *error_entry = container_set->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
ErrorMsg *err_msg = nullptr;
|
||||
@ -10212,8 +10270,9 @@ static TypeTableEntry *ir_analyze_merge_error_sets(IrAnalyze *ira, IrInstruction
|
||||
}
|
||||
|
||||
ErrorTableEntry **errors = allocate<ErrorTableEntry *>(ira->codegen->errors_by_index.length);
|
||||
for (uint32_t i = 0; i < op1_type->data.error_set.err_count; i += 1) {
|
||||
for (uint32_t i = 0, count = op1_type->data.error_set.err_count; i < count; i += 1) {
|
||||
ErrorTableEntry *error_entry = op1_type->data.error_set.errors[i];
|
||||
assert(errors[error_entry->value] == nullptr);
|
||||
errors[error_entry->value] = error_entry;
|
||||
}
|
||||
TypeTableEntry *result_type = get_error_set_union(ira->codegen, errors, op1_type, op2_type);
|
||||
@ -14987,6 +15046,15 @@ static TypeTableEntry *ir_analyze_instruction_member_count(IrAnalyze *ira, IrIns
|
||||
result = container_type->data.structure.src_field_count;
|
||||
} else if (container_type->id == TypeTableEntryIdUnion) {
|
||||
result = container_type->data.unionation.src_field_count;
|
||||
} else if (container_type->id == TypeTableEntryIdErrorSet) {
|
||||
if (!resolve_inferred_error_set(ira, container_type, instruction->base.source_node)) {
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
}
|
||||
if (type_is_global_error_set(container_type)) {
|
||||
ir_add_error(ira, &instruction->base, buf_sprintf("global error set member count not available at comptime"));
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
}
|
||||
result = container_type->data.error_set.err_count;
|
||||
} else {
|
||||
ir_add_error(ira, &instruction->base, buf_sprintf("no value count available for type '%s'", buf_ptr(&container_type->name)));
|
||||
return ira->codegen->builtin_types.entry_invalid;
|
||||
|
19
src/util.hpp
19
src/util.hpp
@ -91,20 +91,23 @@ static inline void safe_memcpy(T *dest, const T *src, size_t count) {
|
||||
#endif
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T *reallocate(T *old, size_t old_count, size_t new_count) {
|
||||
T *ptr = reinterpret_cast<T*>(realloc(old, new_count * sizeof(T)));
|
||||
if (!ptr)
|
||||
zig_panic("allocation failed");
|
||||
if (new_count > old_count) {
|
||||
memset(&ptr[old_count], 0, (new_count - old_count) * sizeof(T));
|
||||
}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
static inline T *reallocate_nonzero(T *old, size_t old_count, size_t new_count) {
|
||||
#ifdef NDEBUG
|
||||
T *ptr = reinterpret_cast<T*>(realloc(old, new_count * sizeof(T)));
|
||||
if (!ptr)
|
||||
zig_panic("allocation failed");
|
||||
return ptr;
|
||||
#else
|
||||
// manually assign every element to trigger compile error for non-copyable structs
|
||||
T *ptr = allocate_nonzero<T>(new_count);
|
||||
safe_memcpy(ptr, old, old_count);
|
||||
free(old);
|
||||
return ptr;
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename T, size_t n>
|
||||
|
@ -271,7 +271,7 @@ pub const Builder = struct {
|
||||
return &self.uninstall_tls.step;
|
||||
}
|
||||
|
||||
fn makeUninstall(uninstall_step: &Step) !void {
|
||||
fn makeUninstall(uninstall_step: &Step) error!void {
|
||||
const uninstall_tls = @fieldParentPtr(TopLevelStep, "step", uninstall_step);
|
||||
const self = @fieldParentPtr(Builder, "uninstall_tls", uninstall_tls);
|
||||
|
||||
@ -285,7 +285,7 @@ pub const Builder = struct {
|
||||
// TODO remove empty directories
|
||||
}
|
||||
|
||||
fn makeOneStep(self: &Builder, s: &Step) !void {
|
||||
fn makeOneStep(self: &Builder, s: &Step) error!void {
|
||||
if (s.loop_flag) {
|
||||
warn("Dependency loop detected:\n {}\n", s.name);
|
||||
return error.DependencyLoopDetected;
|
||||
@ -1910,7 +1910,7 @@ pub const LogStep = struct {
|
||||
};
|
||||
}
|
||||
|
||||
fn make(step: &Step) !void {
|
||||
fn make(step: &Step) error!void {
|
||||
const self = @fieldParentPtr(LogStep, "step", step);
|
||||
warn("{}", self.data);
|
||||
}
|
||||
@ -1972,7 +1972,7 @@ pub const Step = struct {
|
||||
self.dependencies.append(other) catch unreachable;
|
||||
}
|
||||
|
||||
fn makeNoOp(self: &Step) (error{}!void) {}
|
||||
fn makeNoOp(self: &Step) error!void {}
|
||||
};
|
||||
|
||||
fn doAtomicSymLinks(allocator: &Allocator, output_path: []const u8, filename_major_only: []const u8,
|
||||
|
@ -510,7 +510,7 @@ pub fn allocPrint(allocator: &mem.Allocator, comptime fmt: []const u8, args: ...
|
||||
return bufPrint(buf, fmt, args);
|
||||
}
|
||||
|
||||
fn countSize(size: &usize, bytes: []const u8) !void {
|
||||
fn countSize(size: &usize, bytes: []const u8) (error{}!void) {
|
||||
*size += bytes.len;
|
||||
}
|
||||
|
||||
|
@ -694,13 +694,13 @@ pub const BufferOutStream = struct {
|
||||
pub fn init(buffer: &Buffer) BufferOutStream {
|
||||
return BufferOutStream {
|
||||
.buffer = buffer,
|
||||
.stream = OutStream {
|
||||
.stream = Stream {
|
||||
.writeFn = writeFn,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn writeFn(out_stream: &OutStream, bytes: []const u8) !void {
|
||||
fn writeFn(out_stream: &Stream, bytes: []const u8) !void {
|
||||
const self = @fieldParentPtr(BufferOutStream, "stream", out_stream);
|
||||
return self.buffer.append(bytes);
|
||||
}
|
||||
|
@ -55,7 +55,22 @@ pub const ChildProcess = struct {
|
||||
llnode: if (is_windows) void else LinkedList(&ChildProcess).Node,
|
||||
|
||||
pub const SpawnError = error {
|
||||
|
||||
ProcessFdQuotaExceeded,
|
||||
Unexpected,
|
||||
NotDir,
|
||||
SystemResources,
|
||||
FileNotFound,
|
||||
NameTooLong,
|
||||
SymLinkLoop,
|
||||
FileSystem,
|
||||
OutOfMemory,
|
||||
AccessDenied,
|
||||
PermissionDenied,
|
||||
InvalidUserId,
|
||||
ResourceLimitReached,
|
||||
InvalidExe,
|
||||
IsDir,
|
||||
FileBusy,
|
||||
};
|
||||
|
||||
pub const Term = union(enum) {
|
||||
@ -313,7 +328,7 @@ pub const ChildProcess = struct {
|
||||
// Here we potentially return the fork child's error
|
||||
// from the parent pid.
|
||||
if (err_int != @maxValue(ErrInt)) {
|
||||
return error(err_int);
|
||||
return SpawnError(err_int);
|
||||
}
|
||||
|
||||
return statusToTerm(status);
|
||||
@ -757,7 +772,7 @@ fn destroyPipe(pipe: &const [2]i32) void {
|
||||
|
||||
// Child of fork calls this to report an error to the fork parent.
|
||||
// Then the child exits.
|
||||
fn forkChildErrReport(fd: i32, err: error) noreturn {
|
||||
fn forkChildErrReport(fd: i32, err: ChildProcess.SpawnError) noreturn {
|
||||
_ = writeIntFd(fd, ErrInt(err));
|
||||
posix.exit(1);
|
||||
}
|
||||
|
@ -243,7 +243,6 @@ pub const PosixOpenError = error {
|
||||
SystemResources,
|
||||
NoSpaceLeft,
|
||||
NotDir,
|
||||
AccessDenied,
|
||||
PathAlreadyExists,
|
||||
Unexpected,
|
||||
};
|
||||
@ -411,7 +410,19 @@ pub fn posixExecve(argv: []const []const u8, env_map: &const BufMap,
|
||||
return posixExecveErrnoToErr(err);
|
||||
}
|
||||
|
||||
fn posixExecveErrnoToErr(err: usize) error {
|
||||
pub const PosixExecveError = error {
|
||||
SystemResources,
|
||||
AccessDenied,
|
||||
InvalidExe,
|
||||
FileSystem,
|
||||
IsDir,
|
||||
FileNotFound,
|
||||
NotDir,
|
||||
FileBusy,
|
||||
Unexpected,
|
||||
};
|
||||
|
||||
fn posixExecveErrnoToErr(err: usize) PosixExecveError {
|
||||
assert(err > 0);
|
||||
return switch (err) {
|
||||
posix.EFAULT => unreachable,
|
||||
@ -904,24 +915,68 @@ pub fn deleteDir(allocator: &Allocator, dir_path: []const u8) !void {
|
||||
/// removes it. If it cannot be removed because it is a non-empty directory,
|
||||
/// this function recursively removes its entries and then tries again.
|
||||
// TODO non-recursive implementation
|
||||
pub fn deleteTree(allocator: &Allocator, full_path: []const u8) !void {
|
||||
const DeleteTreeError = error {
|
||||
OutOfMemory,
|
||||
AccessDenied,
|
||||
FileTooBig,
|
||||
IsDir,
|
||||
SymLinkLoop,
|
||||
ProcessFdQuotaExceeded,
|
||||
NameTooLong,
|
||||
SystemFdQuotaExceeded,
|
||||
NoDevice,
|
||||
PathNotFound,
|
||||
SystemResources,
|
||||
NoSpaceLeft,
|
||||
PathAlreadyExists,
|
||||
ReadOnlyFileSystem,
|
||||
NotDir,
|
||||
FileNotFound,
|
||||
FileSystem,
|
||||
FileBusy,
|
||||
DirNotEmpty,
|
||||
Unexpected,
|
||||
};
|
||||
pub fn deleteTree(allocator: &Allocator, full_path: []const u8) DeleteTreeError!void {
|
||||
start_over: while (true) {
|
||||
// First, try deleting the item as a file. This way we don't follow sym links.
|
||||
if (deleteFile(allocator, full_path)) {
|
||||
return;
|
||||
} else |err| {
|
||||
if (err == error.FileNotFound)
|
||||
return;
|
||||
if (err != error.IsDir)
|
||||
return err;
|
||||
} else |err| switch (err) {
|
||||
error.FileNotFound => return,
|
||||
error.IsDir => {},
|
||||
|
||||
error.OutOfMemory,
|
||||
error.AccessDenied,
|
||||
error.SymLinkLoop,
|
||||
error.NameTooLong,
|
||||
error.SystemResources,
|
||||
error.ReadOnlyFileSystem,
|
||||
error.NotDir,
|
||||
error.FileSystem,
|
||||
error.FileBusy,
|
||||
error.Unexpected
|
||||
=> return err,
|
||||
}
|
||||
{
|
||||
var dir = Dir.open(allocator, full_path) catch |err| {
|
||||
if (err == error.FileNotFound)
|
||||
return;
|
||||
if (err == error.NotDir)
|
||||
continue :start_over;
|
||||
return err;
|
||||
var dir = Dir.open(allocator, full_path) catch |err| switch (err) {
|
||||
error.NotDir => continue :start_over,
|
||||
|
||||
error.OutOfMemory,
|
||||
error.AccessDenied,
|
||||
error.FileTooBig,
|
||||
error.IsDir,
|
||||
error.SymLinkLoop,
|
||||
error.ProcessFdQuotaExceeded,
|
||||
error.NameTooLong,
|
||||
error.SystemFdQuotaExceeded,
|
||||
error.NoDevice,
|
||||
error.PathNotFound,
|
||||
error.SystemResources,
|
||||
error.NoSpaceLeft,
|
||||
error.PathAlreadyExists,
|
||||
error.Unexpected
|
||||
=> return err,
|
||||
};
|
||||
defer dir.close();
|
||||
|
||||
@ -1252,6 +1307,8 @@ pub const ArgIteratorWindows = struct {
|
||||
quote_count: usize,
|
||||
seen_quote_count: usize,
|
||||
|
||||
pub const NextError = error{OutOfMemory};
|
||||
|
||||
pub fn init() ArgIteratorWindows {
|
||||
return initWithCmdLine(windows.GetCommandLineA());
|
||||
}
|
||||
@ -1267,7 +1324,7 @@ pub const ArgIteratorWindows = struct {
|
||||
}
|
||||
|
||||
/// You must free the returned memory when done.
|
||||
pub fn next(self: &ArgIteratorWindows, allocator: &Allocator) ?(@typeOf(internalNext).ReturnType.ErrorSet![]u8) {
|
||||
pub fn next(self: &ArgIteratorWindows, allocator: &Allocator) ?(NextError![]u8) {
|
||||
// march forward over whitespace
|
||||
while (true) : (self.index += 1) {
|
||||
const byte = self.cmd_line[self.index];
|
||||
@ -1320,7 +1377,7 @@ pub const ArgIteratorWindows = struct {
|
||||
}
|
||||
}
|
||||
|
||||
fn internalNext(self: &ArgIteratorWindows, allocator: &Allocator) ![]u8 {
|
||||
fn internalNext(self: &ArgIteratorWindows, allocator: &Allocator) NextError![]u8 {
|
||||
var buf = try Buffer.initSize(allocator, 0);
|
||||
defer buf.deinit();
|
||||
|
||||
@ -1394,16 +1451,20 @@ pub const ArgIteratorWindows = struct {
|
||||
};
|
||||
|
||||
pub const ArgIterator = struct {
|
||||
inner: if (builtin.os == Os.windows) ArgIteratorWindows else ArgIteratorPosix,
|
||||
const InnerType = if (builtin.os == Os.windows) ArgIteratorWindows else ArgIteratorPosix;
|
||||
|
||||
inner: InnerType,
|
||||
|
||||
pub fn init() ArgIterator {
|
||||
return ArgIterator {
|
||||
.inner = if (builtin.os == Os.windows) ArgIteratorWindows.init() else ArgIteratorPosix.init(),
|
||||
.inner = InnerType.init(),
|
||||
};
|
||||
}
|
||||
|
||||
pub const NextError = ArgIteratorWindows.NextError;
|
||||
|
||||
/// You must free the returned memory when done.
|
||||
pub fn next(self: &ArgIterator, allocator: &Allocator) ?![]u8 {
|
||||
pub fn next(self: &ArgIterator, allocator: &Allocator) ?(NextError![]u8) {
|
||||
if (builtin.os == Os.windows) {
|
||||
return self.inner.next(allocator);
|
||||
} else {
|
||||
|
@ -30,7 +30,6 @@ pub fn windowsClose(handle: windows.HANDLE) void {
|
||||
pub const WriteError = error {
|
||||
SystemResources,
|
||||
OperationAborted,
|
||||
SystemResources,
|
||||
IoPending,
|
||||
BrokenPipe,
|
||||
Unexpected,
|
||||
@ -83,6 +82,8 @@ pub const OpenError = error {
|
||||
AccessDenied,
|
||||
PipeBusy,
|
||||
Unexpected,
|
||||
OutOfMemory,
|
||||
NameTooLong,
|
||||
};
|
||||
|
||||
/// `file_path` may need to be copied in memory to add a null terminating byte. In this case
|
||||
|
@ -77,7 +77,7 @@ fn callMain() u8 {
|
||||
},
|
||||
builtin.TypeId.Int => {
|
||||
if (@typeOf(root.main).ReturnType.bit_count != 8) {
|
||||
@compileError("expected return type of main to be 'u8', 'noreturn', 'void', or '%void'");
|
||||
@compileError("expected return type of main to be 'u8', 'noreturn', 'void', or '!void'");
|
||||
}
|
||||
return root.main();
|
||||
},
|
||||
@ -91,6 +91,6 @@ fn callMain() u8 {
|
||||
};
|
||||
return 0;
|
||||
},
|
||||
else => @compileError("expected return type of main to be 'u8', 'noreturn', 'void', or '%void'"),
|
||||
else => @compileError("expected return type of main to be 'u8', 'noreturn', 'void', or '!void'"),
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
const root = @import("@build");
|
||||
const std = @import("std");
|
||||
const builtin = @import("builtin");
|
||||
const io = std.io;
|
||||
const fmt = std.fmt;
|
||||
const os = std.os;
|
||||
@ -43,14 +44,14 @@ pub fn main() !void {
|
||||
|
||||
var stderr_file = io.getStdErr();
|
||||
var stderr_file_stream: io.FileOutStream = undefined;
|
||||
var stderr_stream: %&io.OutStream = if (stderr_file) |*f| x: {
|
||||
var stderr_stream = if (stderr_file) |*f| x: {
|
||||
stderr_file_stream = io.FileOutStream.init(f);
|
||||
break :x &stderr_file_stream.stream;
|
||||
} else |err| err;
|
||||
|
||||
var stdout_file = io.getStdOut();
|
||||
var stdout_file_stream: io.FileOutStream = undefined;
|
||||
var stdout_stream: %&io.OutStream = if (stdout_file) |*f| x: {
|
||||
var stdout_stream = if (stdout_file) |*f| x: {
|
||||
stdout_file_stream = io.FileOutStream.init(f);
|
||||
break :x &stdout_file_stream.stream;
|
||||
} else |err| err;
|
||||
@ -110,7 +111,7 @@ pub fn main() !void {
|
||||
}
|
||||
|
||||
builder.setInstallPrefix(prefix);
|
||||
try root.build(&builder);
|
||||
try runBuild(&builder);
|
||||
|
||||
if (builder.validateUserInputDidItFail())
|
||||
return usageAndErr(&builder, true, try stderr_stream);
|
||||
@ -123,11 +124,19 @@ pub fn main() !void {
|
||||
};
|
||||
}
|
||||
|
||||
fn usage(builder: &Builder, already_ran_build: bool, out_stream: &io.OutStream) !void {
|
||||
fn runBuild(builder: &Builder) error!void {
|
||||
switch (@typeId(@typeOf(root.build).ReturnType)) {
|
||||
builtin.TypeId.Void => root.build(builder),
|
||||
builtin.TypeId.ErrorUnion => try root.build(builder),
|
||||
else => @compileError("expected return type of build to be 'void' or '!void'"),
|
||||
}
|
||||
}
|
||||
|
||||
fn usage(builder: &Builder, already_ran_build: bool, out_stream: var) !void {
|
||||
// run the build script to collect the options
|
||||
if (!already_ran_build) {
|
||||
builder.setInstallPrefix(null);
|
||||
try root.build(builder);
|
||||
try runBuild(builder);
|
||||
}
|
||||
|
||||
// This usage text has to be synchronized with src/main.cpp
|
||||
@ -181,12 +190,14 @@ fn usage(builder: &Builder, already_ran_build: bool, out_stream: &io.OutStream)
|
||||
);
|
||||
}
|
||||
|
||||
fn usageAndErr(builder: &Builder, already_ran_build: bool, out_stream: &io.OutStream) error {
|
||||
fn usageAndErr(builder: &Builder, already_ran_build: bool, out_stream: var) error {
|
||||
usage(builder, already_ran_build, out_stream) catch {};
|
||||
return error.InvalidArgs;
|
||||
}
|
||||
|
||||
fn unwrapArg(arg: %[]u8) ![]u8 {
|
||||
const UnwrapArgError = error {OutOfMemory};
|
||||
|
||||
fn unwrapArg(arg: UnwrapArgError![]u8) UnwrapArgError![]u8 {
|
||||
return arg catch |err| {
|
||||
warn("Unable to parse command line: {}\n", err);
|
||||
return err;
|
||||
|
@ -1,5 +1,7 @@
|
||||
const assert = @import("std").debug.assert;
|
||||
const mem = @import("std").mem;
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const mem = std.mem;
|
||||
const builtin = @import("builtin");
|
||||
|
||||
pub fn foo() error!i32 {
|
||||
const x = try bar();
|
||||
@ -74,3 +76,35 @@ fn doErrReturnInAssignment() error!void {
|
||||
fn makeANonErr() error!i32 {
|
||||
return 1;
|
||||
}
|
||||
|
||||
test "error union type " {
|
||||
testErrorUnionType();
|
||||
comptime testErrorUnionType();
|
||||
}
|
||||
|
||||
fn testErrorUnionType() void {
|
||||
const x: error!i32 = 1234;
|
||||
if (x) |value| assert(value == 1234) else |_| unreachable;
|
||||
assert(@typeId(@typeOf(x)) == builtin.TypeId.ErrorUnion);
|
||||
assert(@typeId(@typeOf(x).ErrorSet) == builtin.TypeId.ErrorSet);
|
||||
assert(@typeOf(x).ErrorSet == error);
|
||||
}
|
||||
|
||||
test "error set type " {
|
||||
testErrorSetType();
|
||||
comptime testErrorSetType();
|
||||
}
|
||||
|
||||
const MyErrSet = error {OutOfMemory, FileNotFound};
|
||||
|
||||
fn testErrorSetType() void {
|
||||
assert(@memberCount(MyErrSet) == 2);
|
||||
|
||||
const a: MyErrSet!i32 = 5678;
|
||||
const b: MyErrSet!i32 = MyErrSet.OutOfMemory;
|
||||
|
||||
if (a) |value| assert(value == 5678) else |err| switch (err) {
|
||||
error.OutOfMemory => unreachable,
|
||||
error.FileNotFound => unreachable,
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
\\use @import("std").io;
|
||||
\\use @import("foo.zig");
|
||||
\\
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ privateFunction();
|
||||
\\ const stdout = &(FileOutStream.init(&(getStdOut() catch unreachable)).stream);
|
||||
\\ stdout.print("OK 2\n") catch unreachable;
|
||||
@ -49,7 +49,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
\\use @import("foo.zig");
|
||||
\\use @import("bar.zig");
|
||||
\\
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ foo_function();
|
||||
\\ bar_function();
|
||||
\\}
|
||||
@ -89,7 +89,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
var tc = cases.create("two files use import each other",
|
||||
\\use @import("a.zig");
|
||||
\\
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ ok();
|
||||
\\}
|
||||
, "OK\n");
|
||||
@ -118,7 +118,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
cases.add("hello world without libc",
|
||||
\\const io = @import("std").io;
|
||||
\\
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ const stdout = &(io.FileOutStream.init(&(io.getStdOut() catch unreachable)).stream);
|
||||
\\ stdout.print("Hello, world!\n{d4} {x3} {c}\n", u32(12), u16(0x12), u8('a')) catch unreachable;
|
||||
\\}
|
||||
@ -268,7 +268,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
\\const z = io.stdin_fileno;
|
||||
\\const x : @typeOf(y) = 1234;
|
||||
\\const y : u16 = 5678;
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ var x_local : i32 = print_ok(x);
|
||||
\\}
|
||||
\\fn print_ok(val: @typeOf(x)) @typeOf(foo) {
|
||||
@ -351,7 +351,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
\\ fn method(b: &const Bar) bool { return true; }
|
||||
\\};
|
||||
\\
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ const bar = Bar {.field2 = 13,};
|
||||
\\ const foo = Foo {.field1 = bar,};
|
||||
\\ const stdout = &(io.FileOutStream.init(&(io.getStdOut() catch unreachable)).stream);
|
||||
@ -367,7 +367,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
|
||||
cases.add("defer with only fallthrough",
|
||||
\\const io = @import("std").io;
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ const stdout = &(io.FileOutStream.init(&(io.getStdOut() catch unreachable)).stream);
|
||||
\\ stdout.print("before\n") catch unreachable;
|
||||
\\ defer stdout.print("defer1\n") catch unreachable;
|
||||
@ -380,7 +380,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
cases.add("defer with return",
|
||||
\\const io = @import("std").io;
|
||||
\\const os = @import("std").os;
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ const stdout = &(io.FileOutStream.init(&(io.getStdOut() catch unreachable)).stream);
|
||||
\\ stdout.print("before\n") catch unreachable;
|
||||
\\ defer stdout.print("defer1\n") catch unreachable;
|
||||
@ -394,7 +394,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
|
||||
cases.add("errdefer and it fails",
|
||||
\\const io = @import("std").io;
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ do_test() catch return;
|
||||
\\}
|
||||
\\fn do_test() !void {
|
||||
@ -406,7 +406,6 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
\\ defer stdout.print("defer3\n") catch unreachable;
|
||||
\\ stdout.print("after\n") catch unreachable;
|
||||
\\}
|
||||
\\error IToldYouItWouldFail;
|
||||
\\fn its_gonna_fail() !void {
|
||||
\\ return error.IToldYouItWouldFail;
|
||||
\\}
|
||||
@ -414,7 +413,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
|
||||
cases.add("errdefer and it passes",
|
||||
\\const io = @import("std").io;
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ do_test() catch return;
|
||||
\\}
|
||||
\\fn do_test() !void {
|
||||
@ -426,7 +425,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
\\ defer stdout.print("defer3\n") catch unreachable;
|
||||
\\ stdout.print("after\n") catch unreachable;
|
||||
\\}
|
||||
\\fn its_gonna_pass() %void { }
|
||||
\\fn its_gonna_pass() error!void { }
|
||||
, "before\nafter\ndefer3\ndefer1\n");
|
||||
|
||||
cases.addCase(x: {
|
||||
@ -434,7 +433,7 @@ pub fn addCases(cases: &tests.CompareOutputContext) void {
|
||||
\\const foo_txt = @embedFile("foo.txt");
|
||||
\\const io = @import("std").io;
|
||||
\\
|
||||
\\pub fn main() !void {
|
||||
\\pub fn main() void {
|
||||
\\ const stdout = &(io.FileOutStream.init(&(io.getStdOut() catch unreachable)).stream);
|
||||
\\ stdout.print(foo_txt) catch unreachable;
|
||||
\\}
|
||||
|
@ -1,6 +1,25 @@
|
||||
const tests = @import("tests.zig");
|
||||
|
||||
pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
cases.add("@memberCount of error",
|
||||
\\comptime {
|
||||
\\ _ = @memberCount(error);
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:2:9: error: global error set member count not available at comptime");
|
||||
|
||||
cases.add("duplicate error value in error set",
|
||||
\\const Foo = error {
|
||||
\\ Bar,
|
||||
\\ Bar,
|
||||
\\};
|
||||
\\export fn entry() void {
|
||||
\\ const a: Foo = undefined;
|
||||
\\}
|
||||
,
|
||||
".tmp_source.zig:3:5: error: duplicate error: 'Bar'",
|
||||
".tmp_source.zig:2:5: note: other error here");
|
||||
|
||||
cases.add("duplicate struct field",
|
||||
\\const Foo = struct {
|
||||
\\ Bar: i32,
|
||||
@ -99,12 +118,12 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
|
||||
cases.add("wrong return type for main",
|
||||
\\pub fn main() f32 { }
|
||||
, "error: expected return type of main to be 'u8', 'noreturn', 'void', or '%void'");
|
||||
, "error: expected return type of main to be 'u8', 'noreturn', 'void', or '!void'");
|
||||
|
||||
cases.add("double ?? on main return value",
|
||||
\\pub fn main() ??void {
|
||||
\\}
|
||||
, "error: expected return type of main to be 'u8', 'noreturn', 'void', or '%void'");
|
||||
, "error: expected return type of main to be 'u8', 'noreturn', 'void', or '!void'");
|
||||
|
||||
cases.add("bad identifier in function with struct defined inside function which references local const",
|
||||
\\export fn entry() void {
|
||||
@ -1160,7 +1179,7 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
\\export fn f() void {
|
||||
\\ try something();
|
||||
\\}
|
||||
\\fn something() %void { }
|
||||
\\fn something() error!void { }
|
||||
,
|
||||
".tmp_source.zig:2:5: error: expected type 'void', found 'error'");
|
||||
|
||||
@ -1251,7 +1270,7 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
, ".tmp_source.zig:3:11: error: cannot assign to constant");
|
||||
|
||||
cases.add("main function with bogus args type",
|
||||
\\pub fn main(args: [][]bogus) %void {}
|
||||
\\pub fn main(args: [][]bogus) !void {}
|
||||
, ".tmp_source.zig:1:23: error: use of undeclared identifier 'bogus'");
|
||||
|
||||
cases.add("for loop missing element param",
|
||||
@ -1391,7 +1410,7 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
\\ const a = maybeInt() ?? return;
|
||||
\\}
|
||||
\\
|
||||
\\fn canFail() %void { }
|
||||
\\fn canFail() error!void { }
|
||||
\\
|
||||
\\pub fn maybeInt() ?i32 {
|
||||
\\ return 0;
|
||||
@ -1521,7 +1540,7 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
\\export fn foo() void {
|
||||
\\ bar() catch unreachable;
|
||||
\\}
|
||||
\\fn bar() %i32 { return 0; }
|
||||
\\fn bar() error!i32 { return 0; }
|
||||
, ".tmp_source.zig:2:11: error: expression value is ignored");
|
||||
|
||||
cases.add("ignored statement value",
|
||||
@ -1552,7 +1571,7 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
\\export fn foo() void {
|
||||
\\ defer bar();
|
||||
\\}
|
||||
\\fn bar() %i32 { return 0; }
|
||||
\\fn bar() error!i32 { return 0; }
|
||||
, ".tmp_source.zig:2:14: error: expression value is ignored");
|
||||
|
||||
cases.add("dereference an array",
|
||||
@ -1619,13 +1638,12 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
, ".tmp_source.zig:2:21: error: expected pointer, found 'usize'");
|
||||
|
||||
cases.add("too many error values to cast to small integer",
|
||||
\\error A; error B; error C; error D; error E; error F; error G; error H;
|
||||
\\const u2 = @IntType(false, 2);
|
||||
\\fn foo(e: error) u2 {
|
||||
\\const Error = error { A, B, C, D, E, F, G, H };
|
||||
\\fn foo(e: Error) u2 {
|
||||
\\ return u2(e);
|
||||
\\}
|
||||
\\export fn entry() usize { return @sizeOf(@typeOf(foo)); }
|
||||
, ".tmp_source.zig:4:14: error: too many error values to fit in 'u2'");
|
||||
, ".tmp_source.zig:3:14: error: too many error values to fit in 'u2'");
|
||||
|
||||
cases.add("asm at compile time",
|
||||
\\comptime {
|
||||
@ -1808,9 +1826,9 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
\\export fn foo() void {
|
||||
\\ while (bar()) {}
|
||||
\\}
|
||||
\\fn bar() %i32 { return 1; }
|
||||
\\fn bar() error!i32 { return 1; }
|
||||
,
|
||||
".tmp_source.zig:2:15: error: expected type 'bool', found '%i32'");
|
||||
".tmp_source.zig:2:15: error: expected type 'bool', found 'error!i32'");
|
||||
|
||||
cases.add("while expected nullable, got bool",
|
||||
\\export fn foo() void {
|
||||
@ -1824,9 +1842,9 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
|
||||
\\export fn foo() void {
|
||||
\\ while (bar()) |x| {}
|
||||
\\}
|
||||
\\fn bar() %i32 { return 1; }
|
||||
\\fn bar() error!i32 { return 1; }
|
||||
,
|
||||
".tmp_source.zig:2:15: error: expected nullable type, found '%i32'");
|
||||
".tmp_source.zig:2:15: error: expected nullable type, found 'error!i32'");
|
||||
|
||||
cases.add("while expected error union, got bool",
|
||||
\\export fn foo() void {
|
||||
|
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) !void {
|
||||
pub fn build(b: &Builder) void {
|
||||
const main = b.addTest("main.zig");
|
||||
main.setBuildMode(b.standardReleaseOptions());
|
||||
|
||||
|
@ -68,7 +68,12 @@ const Node = union(enum) {
|
||||
Combine: []Node,
|
||||
};
|
||||
|
||||
fn parse(tokens: &const ArrayList(Token), token_index: &usize) !Node {
|
||||
const ParseError = error {
|
||||
InvalidInput,
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
fn parse(tokens: &const ArrayList(Token), token_index: &usize) ParseError!Node {
|
||||
const first_token = tokens.items[*token_index];
|
||||
*token_index += 1;
|
||||
|
||||
@ -132,7 +137,11 @@ fn expandString(input: []const u8, output: &Buffer) !void {
|
||||
}
|
||||
}
|
||||
|
||||
fn expandNode(node: &const Node, output: &ArrayList(Buffer)) !void {
|
||||
const ExpandNodeError = error {
|
||||
OutOfMemory,
|
||||
};
|
||||
|
||||
fn expandNode(node: &const Node, output: &ArrayList(Buffer)) ExpandNodeError!void {
|
||||
assert(output.len == 0);
|
||||
switch (*node) {
|
||||
Node.Scalar => |scalar| {
|
||||
|
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) !void {
|
||||
pub fn build(b: &Builder) void {
|
||||
const obj = b.addObject("test", "test.zig");
|
||||
|
||||
const test_step = b.step("test", "Test the program");
|
||||
|
@ -1,7 +1,7 @@
|
||||
const StackTrace = @import("builtin").StackTrace;
|
||||
pub fn panic(msg: []const u8, stack_trace: ?&StackTrace) noreturn { @breakpoint(); while (true) {} }
|
||||
|
||||
fn bar() %void {}
|
||||
fn bar() error!void {}
|
||||
|
||||
export fn foo() void {
|
||||
bar() catch unreachable;
|
||||
|
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) !void {
|
||||
pub fn build(b: &Builder) void {
|
||||
const exe = b.addExecutable("test", "test.zig");
|
||||
exe.addPackagePath("my_pkg", "pkg.zig");
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
const my_pkg = @import("my_pkg");
|
||||
const assert = @import("std").debug.assert;
|
||||
|
||||
pub fn main() !void {
|
||||
pub fn main() void {
|
||||
assert(my_pkg.add(10, 20) == 30);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
const Builder = @import("std").build.Builder;
|
||||
|
||||
pub fn build(b: &Builder) !void {
|
||||
pub fn build(b: &Builder) void {
|
||||
b.addCIncludePath(".");
|
||||
|
||||
const main = b.addTest("main.zig");
|
||||
|
Loading…
x
Reference in New Issue
Block a user