parent
f1e5be9686
commit
d92ae20f45
@ -216,6 +216,7 @@ install(FILES "${CMAKE_SOURCE_DIR}/std/linux_x86_64.zig" DESTINATION "${ZIG_STD_
|
|||||||
install(FILES "${CMAKE_SOURCE_DIR}/std/linux_i386.zig" DESTINATION "${ZIG_STD_DEST}")
|
install(FILES "${CMAKE_SOURCE_DIR}/std/linux_i386.zig" DESTINATION "${ZIG_STD_DEST}")
|
||||||
install(FILES "${CMAKE_SOURCE_DIR}/std/mem.zig" DESTINATION "${ZIG_STD_DEST}")
|
install(FILES "${CMAKE_SOURCE_DIR}/std/mem.zig" DESTINATION "${ZIG_STD_DEST}")
|
||||||
install(FILES "${CMAKE_SOURCE_DIR}/std/list.zig" DESTINATION "${ZIG_STD_DEST}")
|
install(FILES "${CMAKE_SOURCE_DIR}/std/list.zig" DESTINATION "${ZIG_STD_DEST}")
|
||||||
|
install(FILES "${CMAKE_SOURCE_DIR}/std/hash_map.zig" DESTINATION "${ZIG_STD_DEST}")
|
||||||
|
|
||||||
add_executable(run_tests ${TEST_SOURCES})
|
add_executable(run_tests ${TEST_SOURCES})
|
||||||
target_link_libraries(run_tests)
|
target_link_libraries(run_tests)
|
||||||
|
@ -4014,15 +4014,28 @@ static TypeTableEntry *analyze_if(CodeGen *g, ImportTableEntry *import, BlockCon
|
|||||||
else_context = parent_context;
|
else_context = parent_context;
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeTableEntry *then_type = analyze_expression(g, import, then_context, expected_type, *then_node);
|
TypeTableEntry *then_type = nullptr;
|
||||||
TypeTableEntry *else_type = analyze_expression(g, import, else_context, expected_type, *else_node);
|
TypeTableEntry *else_type = nullptr;
|
||||||
|
|
||||||
if (then_type->id == TypeTableEntryIdInvalid || else_type->id == TypeTableEntryIdInvalid) {
|
if (!then_context->codegen_excluded) {
|
||||||
return g->builtin_types.entry_invalid;
|
then_type = analyze_expression(g, import, then_context, expected_type, *then_node);
|
||||||
|
if (then_type->id == TypeTableEntryIdInvalid) {
|
||||||
|
return g->builtin_types.entry_invalid;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!else_context->codegen_excluded) {
|
||||||
|
else_type = analyze_expression(g, import, else_context, expected_type, *else_node);
|
||||||
|
if (else_type->id == TypeTableEntryIdInvalid) {
|
||||||
|
return g->builtin_types.entry_invalid;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TypeTableEntry *result_type;
|
TypeTableEntry *result_type;
|
||||||
if (expected_type) {
|
if (then_context->codegen_excluded) {
|
||||||
|
result_type = else_type;
|
||||||
|
} else if (else_context->codegen_excluded) {
|
||||||
|
result_type = then_type;
|
||||||
|
} else if (expected_type) {
|
||||||
result_type = (then_type->id == TypeTableEntryIdUnreachable) ? else_type : then_type;
|
result_type = (then_type->id == TypeTableEntryIdUnreachable) ? else_type : then_type;
|
||||||
} else {
|
} else {
|
||||||
AstNode *op_nodes[] = {*then_node, *else_node};
|
AstNode *op_nodes[] = {*then_node, *else_node};
|
||||||
|
113
src/codegen.cpp
113
src/codegen.cpp
@ -2501,53 +2501,9 @@ static void gen_var_debug_decl(CodeGen *g, VariableTableEntry *var) {
|
|||||||
LLVMGetInsertBlock(g->builder));
|
LLVMGetInsertBlock(g->builder));
|
||||||
}
|
}
|
||||||
|
|
||||||
static LLVMValueRef gen_if_var_expr(CodeGen *g, AstNode *node) {
|
static LLVMValueRef gen_if_var_then_block(CodeGen *g, AstNode *node, VariableTableEntry *variable, bool maybe_is_ptr,
|
||||||
assert(node->type == NodeTypeIfVarExpr);
|
LLVMValueRef init_val, TypeTableEntry *child_type, AstNode *then_node)
|
||||||
assert(node->data.if_var_expr.var_decl.expr);
|
{
|
||||||
|
|
||||||
AstNodeVariableDeclaration *var_decl = &node->data.if_var_expr.var_decl;
|
|
||||||
VariableTableEntry *variable = var_decl->variable;
|
|
||||||
|
|
||||||
// test if value is the maybe state
|
|
||||||
TypeTableEntry *expr_type = get_expr_type(var_decl->expr);
|
|
||||||
TypeTableEntry *child_type = expr_type->data.maybe.child_type;
|
|
||||||
|
|
||||||
LLVMValueRef init_val = gen_expr(g, var_decl->expr);
|
|
||||||
|
|
||||||
LLVMValueRef cond_value;
|
|
||||||
bool maybe_is_ptr = child_type->id == TypeTableEntryIdPointer || child_type->id == TypeTableEntryIdFn;
|
|
||||||
if (maybe_is_ptr) {
|
|
||||||
set_debug_source_node(g, node);
|
|
||||||
cond_value = LLVMBuildICmp(g->builder, LLVMIntNE, init_val, LLVMConstNull(child_type->type_ref), "");
|
|
||||||
} else {
|
|
||||||
set_debug_source_node(g, node);
|
|
||||||
LLVMValueRef maybe_field_ptr = LLVMBuildStructGEP(g->builder, init_val, 1, "");
|
|
||||||
cond_value = LLVMBuildLoad(g->builder, maybe_field_ptr, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
AstNode *then_node = node->data.if_var_expr.then_block;
|
|
||||||
AstNode *else_node = node->data.if_var_expr.else_node;
|
|
||||||
|
|
||||||
TypeTableEntry *then_type = get_expr_type(then_node);
|
|
||||||
TypeTableEntry *else_type = get_expr_type(else_node);
|
|
||||||
|
|
||||||
bool use_then_value = type_has_bits(then_type);
|
|
||||||
bool use_else_value = type_has_bits(else_type);
|
|
||||||
|
|
||||||
LLVMBasicBlockRef then_block = LLVMAppendBasicBlock(g->cur_fn->fn_value, "MaybeThen");
|
|
||||||
LLVMBasicBlockRef else_block = LLVMAppendBasicBlock(g->cur_fn->fn_value, "MaybeElse");
|
|
||||||
|
|
||||||
LLVMBasicBlockRef endif_block;
|
|
||||||
bool then_endif_reachable = then_type->id != TypeTableEntryIdUnreachable;
|
|
||||||
bool else_endif_reachable = else_type->id != TypeTableEntryIdUnreachable;
|
|
||||||
if (then_endif_reachable || else_endif_reachable) {
|
|
||||||
endif_block = LLVMAppendBasicBlock(g->cur_fn->fn_value, "MaybeEndIf");
|
|
||||||
}
|
|
||||||
|
|
||||||
set_debug_source_node(g, node);
|
|
||||||
LLVMBuildCondBr(g->builder, cond_value, then_block, else_block);
|
|
||||||
|
|
||||||
LLVMPositionBuilderAtEnd(g->builder, then_block);
|
|
||||||
if (node->data.if_var_expr.var_is_ptr) {
|
if (node->data.if_var_expr.var_is_ptr) {
|
||||||
LLVMValueRef payload_ptr;
|
LLVMValueRef payload_ptr;
|
||||||
if (maybe_is_ptr) {
|
if (maybe_is_ptr) {
|
||||||
@ -2569,7 +2525,68 @@ static LLVMValueRef gen_if_var_expr(CodeGen *g, AstNode *node) {
|
|||||||
}
|
}
|
||||||
gen_var_debug_decl(g, variable);
|
gen_var_debug_decl(g, variable);
|
||||||
|
|
||||||
LLVMValueRef then_expr_result = gen_expr(g, then_node);
|
return gen_expr(g, then_node);
|
||||||
|
}
|
||||||
|
|
||||||
|
static LLVMValueRef gen_if_var_expr(CodeGen *g, AstNode *node) {
|
||||||
|
assert(node->type == NodeTypeIfVarExpr);
|
||||||
|
assert(node->data.if_var_expr.var_decl.expr);
|
||||||
|
|
||||||
|
AstNodeVariableDeclaration *var_decl = &node->data.if_var_expr.var_decl;
|
||||||
|
VariableTableEntry *variable = var_decl->variable;
|
||||||
|
|
||||||
|
// test if value is the maybe state
|
||||||
|
TypeTableEntry *expr_type = get_expr_type(var_decl->expr);
|
||||||
|
TypeTableEntry *child_type = expr_type->data.maybe.child_type;
|
||||||
|
|
||||||
|
LLVMValueRef init_val = gen_expr(g, var_decl->expr);
|
||||||
|
|
||||||
|
|
||||||
|
AstNode *then_node = node->data.if_var_expr.then_block;
|
||||||
|
AstNode *else_node = node->data.if_var_expr.else_node;
|
||||||
|
bool maybe_is_ptr = child_type->id == TypeTableEntryIdPointer || child_type->id == TypeTableEntryIdFn;
|
||||||
|
|
||||||
|
ConstExprValue *const_val = &get_resolved_expr(var_decl->expr)->const_val;
|
||||||
|
if (const_val->ok) {
|
||||||
|
if (const_val->data.x_maybe) {
|
||||||
|
return gen_if_var_then_block(g, node, variable, maybe_is_ptr, init_val, child_type, then_node);
|
||||||
|
} else {
|
||||||
|
return gen_expr(g, else_node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
LLVMValueRef cond_value;
|
||||||
|
if (maybe_is_ptr) {
|
||||||
|
set_debug_source_node(g, node);
|
||||||
|
cond_value = LLVMBuildICmp(g->builder, LLVMIntNE, init_val, LLVMConstNull(child_type->type_ref), "");
|
||||||
|
} else {
|
||||||
|
set_debug_source_node(g, node);
|
||||||
|
LLVMValueRef maybe_field_ptr = LLVMBuildStructGEP(g->builder, init_val, 1, "");
|
||||||
|
cond_value = LLVMBuildLoad(g->builder, maybe_field_ptr, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
TypeTableEntry *then_type = get_expr_type(then_node);
|
||||||
|
TypeTableEntry *else_type = get_expr_type(else_node);
|
||||||
|
|
||||||
|
bool use_then_value = type_has_bits(then_type);
|
||||||
|
bool use_else_value = type_has_bits(else_type);
|
||||||
|
|
||||||
|
LLVMBasicBlockRef then_block = LLVMAppendBasicBlock(g->cur_fn->fn_value, "MaybeThen");
|
||||||
|
LLVMBasicBlockRef else_block = LLVMAppendBasicBlock(g->cur_fn->fn_value, "MaybeElse");
|
||||||
|
|
||||||
|
LLVMBasicBlockRef endif_block;
|
||||||
|
bool then_endif_reachable = then_type->id != TypeTableEntryIdUnreachable;
|
||||||
|
bool else_endif_reachable = else_type->id != TypeTableEntryIdUnreachable;
|
||||||
|
if (then_endif_reachable || else_endif_reachable) {
|
||||||
|
endif_block = LLVMAppendBasicBlock(g->cur_fn->fn_value, "MaybeEndIf");
|
||||||
|
}
|
||||||
|
|
||||||
|
set_debug_source_node(g, node);
|
||||||
|
LLVMBuildCondBr(g->builder, cond_value, then_block, else_block);
|
||||||
|
|
||||||
|
LLVMPositionBuilderAtEnd(g->builder, then_block);
|
||||||
|
LLVMValueRef then_expr_result = gen_if_var_then_block(g, node, variable, maybe_is_ptr, init_val, child_type, then_node);
|
||||||
|
|
||||||
if (then_endif_reachable) {
|
if (then_endif_reachable) {
|
||||||
LLVMBuildBr(g->builder, endif_block);
|
LLVMBuildBr(g->builder, endif_block);
|
||||||
}
|
}
|
||||||
|
@ -4,17 +4,20 @@ const mem = @import("mem.zig");
|
|||||||
const Allocator = mem.Allocator;
|
const Allocator = mem.Allocator;
|
||||||
|
|
||||||
const want_modification_safety = !@compile_var("is_release");
|
const want_modification_safety = !@compile_var("is_release");
|
||||||
const debug_u32 = if (want_modification_safety) void else u32;
|
const debug_u32 = if (want_modification_safety) u32 else void;
|
||||||
|
|
||||||
pub struct HashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)->bool) {
|
pub struct SmallHashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)->bool, STATIC_SIZE: isize) {
|
||||||
entries: []Entry,
|
entries: []Entry,
|
||||||
size: isize,
|
size: isize,
|
||||||
max_distance_from_start_index: isize,
|
max_distance_from_start_index: isize,
|
||||||
allocator: &Allocator,
|
allocator: &Allocator,
|
||||||
|
// if the hash map is small enough, we use linear search through these
|
||||||
|
// entries instead of allocating memory
|
||||||
|
prealloc_entries: [STATIC_SIZE]Entry,
|
||||||
// this is used to detect bugs where a hashtable is edited while an iterator is running.
|
// this is used to detect bugs where a hashtable is edited while an iterator is running.
|
||||||
modification_count: debug_u32,
|
modification_count: debug_u32,
|
||||||
|
|
||||||
const Self = HashMap(K, V, hash, eql);
|
const Self = SmallHashMap(K, V, hash, eql, STATIC_SIZE);
|
||||||
|
|
||||||
pub struct Entry {
|
pub struct Entry {
|
||||||
used: bool,
|
used: bool,
|
||||||
@ -49,14 +52,20 @@ pub struct HashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)-
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init(hm: &Self, allocator: &Allocator, capacity: isize) {
|
pub fn init(hm: &Self, allocator: &Allocator) {
|
||||||
assert(capacity > 0);
|
hm.entries = hm.prealloc_entries[0...];
|
||||||
hm.allocator = allocator;
|
hm.allocator = allocator;
|
||||||
hm.init_capacity(capacity);
|
hm.size = 0;
|
||||||
|
hm.max_distance_from_start_index = 0;
|
||||||
|
for (hm.entries) |*entry| {
|
||||||
|
entry.used = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(hm: &Self) {
|
pub fn deinit(hm: &Self) {
|
||||||
hm.allocator.free(hm.allocator, ([]u8)(hm.entries));
|
if (hm.entries.ptr != &hm.prealloc_entries[0]) {
|
||||||
|
hm.allocator.free(hm.allocator, ([]u8)(hm.entries));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clear(hm: &Self) {
|
pub fn clear(hm: &Self) {
|
||||||
@ -68,26 +77,35 @@ pub struct HashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)-
|
|||||||
hm.increment_modification_count();
|
hm.increment_modification_count();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn put(hm: &Self, key: K, value: V) {
|
pub fn put(hm: &Self, key: K, value: V) -> %void {
|
||||||
hm.increment_modification_count();
|
hm.increment_modification_count();
|
||||||
hm.internal_put(key, value);
|
|
||||||
|
|
||||||
// if we get too full (60%), double the capacity
|
const resize = if (hm.entries.ptr == &hm.prealloc_entries[0]) {
|
||||||
if (hm.size * 5 >= hm.entries.len * 3) {
|
// preallocated entries table is full
|
||||||
|
hm.size == hm.entries.len
|
||||||
|
} else {
|
||||||
|
// if we get too full (60%), double the capacity
|
||||||
|
hm.size * 5 >= hm.entries.len * 3
|
||||||
|
};
|
||||||
|
if (resize) {
|
||||||
const old_entries = hm.entries;
|
const old_entries = hm.entries;
|
||||||
hm.init_capacity(hm.entries.len * 2);
|
%return hm.init_capacity(hm.entries.len * 2);
|
||||||
// dump all of the old elements into the new table
|
// dump all of the old elements into the new table
|
||||||
for (old_entries) |*old_entry| {
|
for (old_entries) |*old_entry| {
|
||||||
if (old_entry.used) {
|
if (old_entry.used) {
|
||||||
hm.internal_put(old_entry.key, old_entry.value);
|
hm.internal_put(old_entry.key, old_entry.value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
hm.allocator.free(hm.allocator, ([]u8)(old_entries));
|
if (old_entries.ptr != &hm.prealloc_entries[0]) {
|
||||||
|
hm.allocator.free(hm.allocator, ([]u8)(old_entries));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
hm.internal_put(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get(hm: &Self, key: K) {
|
pub fn get(hm: &Self, key: K) -> ?&Entry {
|
||||||
return internal_get(key);
|
return hm.internal_get(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove(hm: &Self, key: K) {
|
pub fn remove(hm: &Self, key: K) {
|
||||||
@ -95,7 +113,7 @@ pub struct HashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)-
|
|||||||
const start_index = hm.key_to_index(key);
|
const start_index = hm.key_to_index(key);
|
||||||
{var roll_over: isize = 0; while (roll_over <= hm.max_distance_from_start_index; roll_over += 1) {
|
{var roll_over: isize = 0; while (roll_over <= hm.max_distance_from_start_index; roll_over += 1) {
|
||||||
const index = (start_index + roll_over) % hm.entries.len;
|
const index = (start_index + roll_over) % hm.entries.len;
|
||||||
const entry = &hm.entries[index];
|
var entry = &hm.entries[index];
|
||||||
|
|
||||||
assert(entry.used); // key not found
|
assert(entry.used); // key not found
|
||||||
|
|
||||||
@ -127,7 +145,7 @@ pub struct HashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)-
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_capacity(hm: &Self, capacity: isize) {
|
fn init_capacity(hm: &Self, capacity: isize) -> %void {
|
||||||
hm.entries = ([]Entry)(%return hm.allocator.alloc(hm.allocator, capacity * @sizeof(Entry)));
|
hm.entries = ([]Entry)(%return hm.allocator.alloc(hm.allocator, capacity * @sizeof(Entry)));
|
||||||
hm.size = 0;
|
hm.size = 0;
|
||||||
hm.max_distance_from_start_index = 0;
|
hm.max_distance_from_start_index = 0;
|
||||||
@ -145,7 +163,7 @@ pub struct HashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)-
|
|||||||
fn internal_put(hm: &Self, orig_key: K, orig_value: V) {
|
fn internal_put(hm: &Self, orig_key: K, orig_value: V) {
|
||||||
var key = orig_key;
|
var key = orig_key;
|
||||||
var value = orig_value;
|
var value = orig_value;
|
||||||
const start_index = key_to_index(key);
|
const start_index = hm.key_to_index(key);
|
||||||
var roll_over: isize = 0;
|
var roll_over: isize = 0;
|
||||||
var distance_from_start_index: isize = 0;
|
var distance_from_start_index: isize = 0;
|
||||||
while (roll_over < hm.entries.len; {roll_over += 1; distance_from_start_index += 1}) {
|
while (roll_over < hm.entries.len; {roll_over += 1; distance_from_start_index += 1}) {
|
||||||
@ -190,7 +208,7 @@ pub struct HashMap(K: type, V: type, hash: fn(key: K)->u32, eql: fn(a: K, b: K)-
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn internal_get(hm: &Self, key: K) -> ?&Entry {
|
fn internal_get(hm: &Self, key: K) -> ?&Entry {
|
||||||
const start_index = key_to_index(key);
|
const start_index = hm.key_to_index(key);
|
||||||
{var roll_over: isize = 0; while (roll_over <= hm.max_distance_from_start_index; roll_over += 1) {
|
{var roll_over: isize = 0; while (roll_over <= hm.max_distance_from_start_index; roll_over += 1) {
|
||||||
const index = (start_index + roll_over) % hm.entries.len;
|
const index = (start_index + roll_over) % hm.entries.len;
|
||||||
const entry = &hm.entries[index];
|
const entry = &hm.entries[index];
|
||||||
@ -233,9 +251,19 @@ fn global_free(self: &Allocator, old_mem: []u8) {
|
|||||||
|
|
||||||
#attribute("test")
|
#attribute("test")
|
||||||
fn basic_hash_map_test() {
|
fn basic_hash_map_test() {
|
||||||
var map: HashMap(i32, i32, hash_i32, eql_i32) = undefined;
|
var map: SmallHashMap(i32, i32, hash_i32, eql_i32, 4) = undefined;
|
||||||
map.init(&global_allocator, 4);
|
map.init(&global_allocator);
|
||||||
defer map.deinit();
|
defer map.deinit();
|
||||||
|
|
||||||
|
%%map.put(1, 11);
|
||||||
|
%%map.put(2, 22);
|
||||||
|
%%map.put(3, 33);
|
||||||
|
%%map.put(4, 44);
|
||||||
|
%%map.put(5, 55);
|
||||||
|
|
||||||
|
assert((??map.get(2)).value == 22);
|
||||||
|
map.remove(2);
|
||||||
|
assert(if (const entry ?= map.get(2)) false else true);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_i32(x: i32) -> u32 {
|
fn hash_i32(x: i32) -> u32 {
|
||||||
|
@ -6,6 +6,7 @@ pub const str = @import("str.zig");
|
|||||||
pub const cstr = @import("cstr.zig");
|
pub const cstr = @import("cstr.zig");
|
||||||
pub const net = @import("net.zig");
|
pub const net = @import("net.zig");
|
||||||
pub const list = @import("list.zig");
|
pub const list = @import("list.zig");
|
||||||
|
pub const hash_map = @import("hash_map.zig");
|
||||||
pub const mem = @import("mem.zig");
|
pub const mem = @import("mem.zig");
|
||||||
|
|
||||||
pub fn assert(b: bool) {
|
pub fn assert(b: bool) {
|
||||||
|
@ -21,7 +21,7 @@ pub struct SmallList(T: type, STATIC_SIZE: isize) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(l: &SmallList(T, STATIC_SIZE)) {
|
pub fn deinit(l: &SmallList(T, STATIC_SIZE)) {
|
||||||
if (l.items.ptr == &l.prealloc_items[0]) {
|
if (l.items.ptr != &l.prealloc_items[0]) {
|
||||||
l.allocator.free(l.allocator, ([]u8)(l.items));
|
l.allocator.free(l.allocator, ([]u8)(l.items));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -831,9 +831,9 @@ fn f() {
|
|||||||
|
|
||||||
|
|
||||||
add_compile_fail_case("missing else clause", R"SOURCE(
|
add_compile_fail_case("missing else clause", R"SOURCE(
|
||||||
fn f() {
|
fn f(b: bool) {
|
||||||
const x : i32 = if (true) { 1 };
|
const x : i32 = if (b) { 1 };
|
||||||
const y = if (true) { i32(1) };
|
const y = if (b) { i32(1) };
|
||||||
}
|
}
|
||||||
)SOURCE", 2, ".tmp_source.zig:3:21: error: expected type 'i32', got 'void'",
|
)SOURCE", 2, ".tmp_source.zig:3:21: error: expected type 'i32', got 'void'",
|
||||||
".tmp_source.zig:4:15: error: incompatible types: 'i32' and 'void'");
|
".tmp_source.zig:4:15: error: incompatible types: 'i32' and 'void'");
|
||||||
|
Loading…
x
Reference in New Issue
Block a user