Merge branch 'master' into pointer-reform

This commit is contained in:
Andrew Kelley 2018-05-09 23:43:07 -04:00
commit 6928badd85
52 changed files with 6437 additions and 390 deletions

View File

@ -416,8 +416,8 @@ set(ZIG_CPP_SOURCES
set(ZIG_STD_FILES
"array_list.zig"
"atomic/index.zig"
"atomic/stack.zig"
"atomic/queue.zig"
"atomic/stack.zig"
"base64.zig"
"buf_map.zig"
"buf_set.zig"
@ -427,13 +427,13 @@ set(ZIG_STD_FILES
"c/index.zig"
"c/linux.zig"
"c/windows.zig"
"crypto/blake2.zig"
"crypto/hmac.zig"
"crypto/index.zig"
"crypto/md5.zig"
"crypto/sha1.zig"
"crypto/sha2.zig"
"crypto/sha3.zig"
"crypto/blake2.zig"
"crypto/hmac.zig"
"cstr.zig"
"debug/failing_allocator.zig"
"debug/index.zig"
@ -445,15 +445,16 @@ set(ZIG_STD_FILES
"fmt/errol/index.zig"
"fmt/errol/lookup.zig"
"fmt/index.zig"
"hash_map.zig"
"hash/index.zig"
"hash/adler.zig"
"hash/crc.zig"
"hash/fnv.zig"
"hash/index.zig"
"hash/siphash.zig"
"hash_map.zig"
"heap.zig"
"index.zig"
"io.zig"
"json.zig"
"linked_list.zig"
"macho.zig"
"math/acos.zig"
@ -465,6 +466,28 @@ set(ZIG_STD_FILES
"math/atanh.zig"
"math/cbrt.zig"
"math/ceil.zig"
"math/complex/abs.zig"
"math/complex/acos.zig"
"math/complex/acosh.zig"
"math/complex/arg.zig"
"math/complex/asin.zig"
"math/complex/asinh.zig"
"math/complex/atan.zig"
"math/complex/atanh.zig"
"math/complex/conj.zig"
"math/complex/cos.zig"
"math/complex/cosh.zig"
"math/complex/exp.zig"
"math/complex/index.zig"
"math/complex/ldexp.zig"
"math/complex/log.zig"
"math/complex/pow.zig"
"math/complex/proj.zig"
"math/complex/sin.zig"
"math/complex/sinh.zig"
"math/complex/sqrt.zig"
"math/complex/tan.zig"
"math/complex/tanh.zig"
"math/copysign.zig"
"math/cos.zig"
"math/cosh.zig"
@ -501,33 +524,12 @@ set(ZIG_STD_FILES
"math/tan.zig"
"math/tanh.zig"
"math/trunc.zig"
"math/complex/abs.zig"
"math/complex/acosh.zig"
"math/complex/acos.zig"
"math/complex/arg.zig"
"math/complex/asinh.zig"
"math/complex/asin.zig"
"math/complex/atanh.zig"
"math/complex/atan.zig"
"math/complex/conj.zig"
"math/complex/cosh.zig"
"math/complex/cos.zig"
"math/complex/exp.zig"
"math/complex/index.zig"
"math/complex/ldexp.zig"
"math/complex/log.zig"
"math/complex/pow.zig"
"math/complex/proj.zig"
"math/complex/sinh.zig"
"math/complex/sin.zig"
"math/complex/sqrt.zig"
"math/complex/tanh.zig"
"math/complex/tan.zig"
"mem.zig"
"net.zig"
"os/child_process.zig"
"os/darwin.zig"
"os/darwin_errno.zig"
"os/epoch.zig"
"os/file.zig"
"os/get_user_id.zig"
"os/index.zig"
@ -537,13 +539,13 @@ set(ZIG_STD_FILES
"os/linux/x86_64.zig"
"os/path.zig"
"os/time.zig"
"os/epoch.zig"
"os/windows/error.zig"
"os/windows/index.zig"
"os/windows/util.zig"
"os/zen.zig"
"rand/index.zig"
"rand/ziggurat.zig"
"segmented_list.zig"
"sort.zig"
"special/bootstrap.zig"
"special/bootstrap_lib.zig"

View File

@ -4809,6 +4809,182 @@ pub const TypeId = enum {
BoundFn,
ArgTuple,
Opaque,
};
{#code_end#}
{#header_close#}
{#header_open|@typeInfo#}
<pre><code class="zig">@typeInfo(comptime T: type) -&gt; @import("builtin").TypeInfo</code></pre>
<p>
Returns information on the type. Returns a value of the following union:
</p>
{#code_begin|syntax#}
pub const TypeInfo = union(TypeId) {
Type: void,
Void: void,
Bool: void,
NoReturn: void,
Int: Int,
Float: Float,
Pointer: Pointer,
Array: Array,
Struct: Struct,
FloatLiteral: void,
IntLiteral: void,
UndefinedLiteral: void,
NullLiteral: void,
Nullable: Nullable,
ErrorUnion: ErrorUnion,
ErrorSet: ErrorSet,
Enum: Enum,
Union: Union,
Fn: Fn,
Namespace: void,
Block: void,
BoundFn: Fn,
ArgTuple: void,
Opaque: void,
Promise: Promise,
pub const Int = struct {
is_signed: bool,
bits: u8,
};
pub const Float = struct {
bits: u8,
};
pub const Pointer = struct {
is_const: bool,
is_volatile: bool,
alignment: u32,
child: type,
};
pub const Array = struct {
len: usize,
child: type,
};
pub const ContainerLayout = enum {
Auto,
Extern,
Packed,
};
pub const StructField = struct {
name: []const u8,
offset: ?usize,
field_type: type,
};
pub const Struct = struct {
layout: ContainerLayout,
fields: []StructField,
defs: []Definition,
};
pub const Nullable = struct {
child: type,
};
pub const ErrorUnion = struct {
error_set: type,
payload: type,
};
pub const Error = struct {
name: []const u8,
value: usize,
};
pub const ErrorSet = struct {
errors: []Error,
};
pub const EnumField = struct {
name: []const u8,
value: usize,
};
pub const Enum = struct {
layout: ContainerLayout,
tag_type: type,
fields: []EnumField,
defs: []Definition,
};
pub const UnionField = struct {
name: []const u8,
enum_field: ?EnumField,
field_type: type,
};
pub const Union = struct {
layout: ContainerLayout,
tag_type: type,
fields: []UnionField,
defs: []Definition,
};
pub const CallingConvention = enum {
Unspecified,
C,
Cold,
Naked,
Stdcall,
Async,
};
pub const FnArg = struct {
is_generic: bool,
is_noalias: bool,
arg_type: type,
};
pub const Fn = struct {
calling_convention: CallingConvention,
is_generic: bool,
is_var_args: bool,
return_type: type,
async_allocator_type: type,
args: []FnArg,
};
pub const Promise = struct {
child: type,
};
pub const Definition = struct {
name: []const u8,
is_pub: bool,
data: Data,
pub const Data = union(enum) {
Type: type,
Var: type,
Fn: FnDef,
pub const FnDef = struct {
fn_type: type,
inline_type: Inline,
calling_convention: CallingConvention,
is_var_args: bool,
is_extern: bool,
is_export: bool,
lib_name: ?[]const u8,
return_type: type,
arg_names: [][] const u8,
pub const Inline = enum {
Auto,
Always,
Never,
};
};
};
};
};
{#code_end#}
{#header_close#}
@ -5226,7 +5402,6 @@ pub const Os = enum {
rtems,
nacl,
cnk,
bitrig,
aix,
cuda,
nvcl,
@ -5237,10 +5412,12 @@ pub const Os = enum {
watchos,
mesa3d,
contiki,
amdpal,
zen,
};
pub const Arch = enum {
armv8_3a,
armv8_2a,
armv8_1a,
armv8,
@ -5260,9 +5437,29 @@ pub const Arch = enum {
armv5,
armv5te,
armv4t,
armeb,
armebv8_3a,
armebv8_2a,
armebv8_1a,
armebv8,
armebv8r,
armebv8m_baseline,
armebv8m_mainline,
armebv7,
armebv7em,
armebv7m,
armebv7s,
armebv7k,
armebv7ve,
armebv6,
armebv6m,
armebv6k,
armebv6t2,
armebv5,
armebv5te,
armebv4t,
aarch64,
aarch64_be,
arc,
avr,
bpfel,
bpfeb,
@ -5315,6 +5512,7 @@ pub const Arch = enum {
pub const Environ = enum {
unknown,
gnu,
gnuabin32,
gnuabi64,
gnueabi,
gnueabihf,
@ -5332,6 +5530,7 @@ pub const Environ = enum {
amdopencl,
coreclr,
opencl,
simulator,
};
pub const ObjectFormat = enum {
@ -5358,10 +5557,23 @@ pub const AtomicOrder = enum {
SeqCst,
};
pub const AtomicRmwOp = enum {
Xchg,
Add,
Sub,
And,
Nand,
Or,
Xor,
Max,
Min,
};
pub const Mode = enum {
Debug,
ReleaseSafe,
ReleaseFast,
ReleaseSmall,
};
pub const TypeId = enum {
@ -5380,7 +5592,7 @@ pub const TypeId = enum {
NullLiteral,
Nullable,
ErrorUnion,
Error,
ErrorSet,
Enum,
Union,
Fn,
@ -5389,6 +5601,176 @@ pub const TypeId = enum {
BoundFn,
ArgTuple,
Opaque,
Promise,
};
pub const TypeInfo = union(TypeId) {
Type: void,
Void: void,
Bool: void,
NoReturn: void,
Int: Int,
Float: Float,
Pointer: Pointer,
Array: Array,
Struct: Struct,
FloatLiteral: void,
IntLiteral: void,
UndefinedLiteral: void,
NullLiteral: void,
Nullable: Nullable,
ErrorUnion: ErrorUnion,
ErrorSet: ErrorSet,
Enum: Enum,
Union: Union,
Fn: Fn,
Namespace: void,
Block: void,
BoundFn: Fn,
ArgTuple: void,
Opaque: void,
Promise: Promise,
pub const Int = struct {
is_signed: bool,
bits: u8,
};
pub const Float = struct {
bits: u8,
};
pub const Pointer = struct {
is_const: bool,
is_volatile: bool,
alignment: u32,
child: type,
};
pub const Array = struct {
len: usize,
child: type,
};
pub const ContainerLayout = enum {
Auto,
Extern,
Packed,
};
pub const StructField = struct {
name: []const u8,
offset: ?usize,
field_type: type,
};
pub const Struct = struct {
layout: ContainerLayout,
fields: []StructField,
defs: []Definition,
};
pub const Nullable = struct {
child: type,
};
pub const ErrorUnion = struct {
error_set: type,
payload: type,
};
pub const Error = struct {
name: []const u8,
value: usize,
};
pub const ErrorSet = struct {
errors: []Error,
};
pub const EnumField = struct {
name: []const u8,
value: usize,
};
pub const Enum = struct {
layout: ContainerLayout,
tag_type: type,
fields: []EnumField,
defs: []Definition,
};
pub const UnionField = struct {
name: []const u8,
enum_field: ?EnumField,
field_type: type,
};
pub const Union = struct {
layout: ContainerLayout,
tag_type: type,
fields: []UnionField,
defs: []Definition,
};
pub const CallingConvention = enum {
Unspecified,
C,
Cold,
Naked,
Stdcall,
Async,
};
pub const FnArg = struct {
is_generic: bool,
is_noalias: bool,
arg_type: type,
};
pub const Fn = struct {
calling_convention: CallingConvention,
is_generic: bool,
is_var_args: bool,
return_type: type,
async_allocator_type: type,
args: []FnArg,
};
pub const Promise = struct {
child: type,
};
pub const Definition = struct {
name: []const u8,
is_pub: bool,
data: Data,
pub const Data = union(enum) {
Type: type,
Var: type,
Fn: FnDef,
pub const FnDef = struct {
fn_type: type,
inline_type: Inline,
calling_convention: CallingConvention,
is_var_args: bool,
is_extern: bool,
is_export: bool,
lib_name: ?[]const u8,
return_type: type,
arg_names: [][] const u8,
pub const Inline = enum {
Auto,
Always,
Never,
};
};
};
};
};
pub const FloatMode = enum {
@ -5402,7 +5784,7 @@ pub const Endian = enum {
};
pub const endian = Endian.Little;
pub const is_test = false;
pub const is_test = true;
pub const os = Os.linux;
pub const arch = Arch.x86_64;
pub const environ = Environ.gnu;
@ -5410,6 +5792,7 @@ pub const object_format = ObjectFormat.elf;
pub const mode = Mode.Debug;
pub const link_libc = false;
pub const have_error_return_tracing = true;
pub const __zig_test_fn_slice = {}; // overwritten later
{#code_end#}
{#see_also|Build Mode#}
{#header_close#}
@ -6070,7 +6453,7 @@ hljs.registerLanguage("zig", function(t) {
a = t.IR + "\\s*\\(",
c = {
keyword: "const align var extern stdcallcc nakedcc volatile export pub noalias inline struct packed enum union break return try catch test continue unreachable comptime and or asm defer errdefer if else switch while for fn use bool f32 f64 void type noreturn error i8 u8 i16 u16 i32 u32 i64 u64 isize usize i8w u8w i16w i32w u32w i64w u64w isizew usizew c_short c_ushort c_int c_uint c_long c_ulong c_longlong c_ulonglong",
built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field",
built_in: "atomicLoad breakpoint returnAddress frameAddress fieldParentPtr setFloatMode IntType OpaqueType compileError compileLog setCold setRuntimeSafety setEvalBranchQuota offsetOf memcpy inlineCall setGlobalLinkage setGlobalSection divTrunc divFloor enumTagName intToPtr ptrToInt panic canImplicitCast ptrCast bitCast rem mod memset sizeOf alignOf alignCast maxValue minValue memberCount memberName memberType typeOf addWithOverflow subWithOverflow mulWithOverflow shlWithOverflow shlExact shrExact cInclude cDefine cUndef ctz clz import cImport errorName embedFile cmpxchgStrong cmpxchgWeak fence divExact truncate atomicRmw sqrt field typeInfo",
literal: "true false null undefined"
},
n = [e, t.CLCM, t.CBCM, s, r];

View File

@ -1298,6 +1298,7 @@ enum BuiltinFnId {
BuiltinFnIdMemberType,
BuiltinFnIdMemberName,
BuiltinFnIdField,
BuiltinFnIdTypeInfo,
BuiltinFnIdTypeof,
BuiltinFnIdAddWithOverflow,
BuiltinFnIdSubWithOverflow,
@ -1511,6 +1512,7 @@ struct CodeGen {
HashMap<Buf *, AstNode *, buf_hash, buf_eql_buf> exported_symbol_names;
HashMap<Buf *, Tld *, buf_hash, buf_eql_buf> external_prototypes;
HashMap<Buf *, ConstExprValue *, buf_hash, buf_eql_buf> string_literals_table;
HashMap<const TypeTableEntry *, ConstExprValue *, type_ptr_hash, type_ptr_eql> type_info_cache;
ZigList<ImportTableEntry *> import_queue;
@ -2042,6 +2044,7 @@ enum IrInstructionId {
IrInstructionIdTagType,
IrInstructionIdFieldParentPtr,
IrInstructionIdOffsetOf,
IrInstructionIdTypeInfo,
IrInstructionIdTypeId,
IrInstructionIdSetEvalBranchQuota,
IrInstructionIdPtrTypeOf,
@ -2863,6 +2866,12 @@ struct IrInstructionOffsetOf {
IrInstruction *field_name;
};
struct IrInstructionTypeInfo {
IrInstruction base;
IrInstruction *type_value;
};
struct IrInstructionTypeId {
IrInstruction base;

View File

@ -2325,8 +2325,14 @@ static void resolve_enum_zero_bits(CodeGen *g, TypeTableEntry *enum_type) {
HashMap<BigInt, AstNode *, bigint_hash, bigint_eql> occupied_tag_values = {};
occupied_tag_values.init(field_count);
TypeTableEntry *tag_int_type = get_smallest_unsigned_int_type(g, field_count - 1);
TypeTableEntry *tag_int_type;
if (enum_type->data.enumeration.layout == ContainerLayoutExtern) {
tag_int_type = get_c_int_type(g, CIntTypeInt);
} else {
tag_int_type = get_smallest_unsigned_int_type(g, field_count - 1);
}
// TODO: Are extern enums allowed to have an init_arg_expr?
if (decl_node->data.container_decl.init_arg_expr != nullptr) {
TypeTableEntry *wanted_tag_int_type = analyze_type_expr(g, scope, decl_node->data.container_decl.init_arg_expr);
if (type_is_invalid(wanted_tag_int_type)) {
@ -5926,8 +5932,8 @@ size_t type_id_len() {
return array_length(all_type_ids);
}
size_t type_id_index(TypeTableEntryId id) {
switch (id) {
size_t type_id_index(TypeTableEntry *entry) {
switch (entry->id) {
case TypeTableEntryIdInvalid:
zig_unreachable();
case TypeTableEntryIdMetaType:
@ -5947,6 +5953,8 @@ size_t type_id_index(TypeTableEntryId id) {
case TypeTableEntryIdArray:
return 7;
case TypeTableEntryIdStruct:
if (entry->data.structure.is_slice)
return 25;
return 8;
case TypeTableEntryIdNumLitFloat:
return 9;

View File

@ -174,7 +174,7 @@ void update_compile_var(CodeGen *g, Buf *name, ConstExprValue *value);
const char *type_id_name(TypeTableEntryId id);
TypeTableEntryId type_id_at_index(size_t index);
size_t type_id_len();
size_t type_id_index(TypeTableEntryId id);
size_t type_id_index(TypeTableEntry *entry);
TypeTableEntry *get_generic_fn_type(CodeGen *g, FnTypeId *fn_type_id);
bool type_is_copyable(CodeGen *g, TypeTableEntry *type_entry);
LinkLib *create_link_lib(Buf *name);

View File

@ -736,7 +736,7 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
render_node_grouped(ar, field_node->data.struct_field.type);
}
if (field_node->data.struct_field.value != nullptr) {
fprintf(ar->f, "= ");
fprintf(ar->f, " = ");
render_node_grouped(ar, field_node->data.struct_field.value);
}
fprintf(ar->f, ",\n");

View File

@ -1259,12 +1259,11 @@ void bigint_and(BigInt *dest, const BigInt *op1, const BigInt *op2) {
bigint_normalize(dest);
return;
}
// TODO this code path is untested
uint64_t first_digit = dest->data.digit;
dest->digit_count = max(op1->digit_count, op2->digit_count);
dest->data.digits = allocate_nonzero<uint64_t>(dest->digit_count);
dest->data.digits[0] = first_digit;
size_t i = 1;
size_t i = 0;
for (; i < op1->digit_count && i < op2->digit_count; i += 1) {
dest->data.digits[i] = op1_digits[i] & op2_digits[i];
}
@ -1412,7 +1411,6 @@ void bigint_shr(BigInt *dest, const BigInt *op1, const BigInt *op2) {
return;
}
// TODO this code path is untested
size_t digit_shift_count = shift_amt / 64;
size_t leftover_shift_count = shift_amt % 64;
@ -1427,7 +1425,7 @@ void bigint_shr(BigInt *dest, const BigInt *op1, const BigInt *op2) {
uint64_t digit = op1_digits[op_digit_index];
size_t dest_digit_index = op_digit_index - digit_shift_count;
dest->data.digits[dest_digit_index] = carry | (digit >> leftover_shift_count);
carry = (0xffffffffffffffffULL << leftover_shift_count) & digit;
carry = digit << leftover_shift_count;
if (dest_digit_index == 0) { break; }
op_digit_index -= 1;

View File

@ -88,6 +88,7 @@ CodeGen *codegen_create(Buf *root_src_path, const ZigTarget *target, OutType out
g->exported_symbol_names.init(8);
g->external_prototypes.init(8);
g->string_literals_table.init(16);
g->type_info_cache.init(32);
g->is_test_build = false;
g->want_h_file = (out_type == OutTypeObj || out_type == OutTypeLib);
buf_resize(&g->global_asm, 0);
@ -4502,6 +4503,7 @@ static LLVMValueRef ir_render_instruction(CodeGen *g, IrExecutable *executable,
case IrInstructionIdDeclRef:
case IrInstructionIdSwitchVar:
case IrInstructionIdOffsetOf:
case IrInstructionIdTypeInfo:
case IrInstructionIdTypeId:
case IrInstructionIdSetEvalBranchQuota:
case IrInstructionIdPtrTypeOf:
@ -6125,6 +6127,7 @@ static void define_builtin_fns(CodeGen *g) {
create_builtin_fn(g, BuiltinFnIdMemberType, "memberType", 2);
create_builtin_fn(g, BuiltinFnIdMemberName, "memberName", 2);
create_builtin_fn(g, BuiltinFnIdField, "field", 2);
create_builtin_fn(g, BuiltinFnIdTypeInfo, "typeInfo", 1);
create_builtin_fn(g, BuiltinFnIdTypeof, "typeOf", 1); // TODO rename to TypeOf
create_builtin_fn(g, BuiltinFnIdAddWithOverflow, "addWithOverflow", 4);
create_builtin_fn(g, BuiltinFnIdSubWithOverflow, "subWithOverflow", 4);
@ -6342,8 +6345,196 @@ static void define_builtin_compile_vars(CodeGen *g) {
const TypeTableEntryId id = type_id_at_index(i);
buf_appendf(contents, " %s,\n", type_id_name(id));
}
buf_appendf(contents, " Slice,\n");
buf_appendf(contents, "};\n\n");
}
{
buf_appendf(contents,
"pub const TypeInfo = union(TypeId) {\n"
" Type: void,\n"
" Void: void,\n"
" Bool: void,\n"
" NoReturn: void,\n"
" Int: Int,\n"
" Float: Float,\n"
" Pointer: Pointer,\n"
" Slice: Slice,\n"
" Array: Array,\n"
" Struct: Struct,\n"
" FloatLiteral: void,\n"
" IntLiteral: void,\n"
" UndefinedLiteral: void,\n"
" NullLiteral: void,\n"
" Nullable: Nullable,\n"
" ErrorUnion: ErrorUnion,\n"
" ErrorSet: ErrorSet,\n"
" Enum: Enum,\n"
" Union: Union,\n"
" Fn: Fn,\n"
" Namespace: void,\n"
" Block: void,\n"
" BoundFn: Fn,\n"
" ArgTuple: void,\n"
" Opaque: void,\n"
" Promise: Promise,\n"
"\n\n"
" pub const Int = struct {\n"
" is_signed: bool,\n"
" bits: u8,\n"
" };\n"
"\n"
" pub const Float = struct {\n"
" bits: u8,\n"
" };\n"
"\n"
" pub const Pointer = struct {\n"
" is_const: bool,\n"
" is_volatile: bool,\n"
" alignment: u32,\n"
" child: type,\n"
" };\n"
"\n"
" pub const Slice = Pointer;\n"
"\n"
" pub const Array = struct {\n"
" len: usize,\n"
" child: type,\n"
" };\n"
"\n"
" pub const ContainerLayout = enum {\n"
" Auto,\n"
" Extern,\n"
" Packed,\n"
" };\n"
"\n"
" pub const StructField = struct {\n"
" name: []const u8,\n"
" offset: ?usize,\n"
" field_type: type,\n"
" };\n"
"\n"
" pub const Struct = struct {\n"
" layout: ContainerLayout,\n"
" fields: []StructField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const Nullable = struct {\n"
" child: type,\n"
" };\n"
"\n"
" pub const ErrorUnion = struct {\n"
" error_set: type,\n"
" payload: type,\n"
" };\n"
"\n"
" pub const Error = struct {\n"
" name: []const u8,\n"
" value: usize,\n"
" };\n"
"\n"
" pub const ErrorSet = struct {\n"
" errors: []Error,\n"
" };\n"
"\n"
" pub const EnumField = struct {\n"
" name: []const u8,\n"
" value: usize,\n"
" };\n"
"\n"
" pub const Enum = struct {\n"
" layout: ContainerLayout,\n"
" tag_type: type,\n"
" fields: []EnumField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const UnionField = struct {\n"
" name: []const u8,\n"
" enum_field: ?EnumField,\n"
" field_type: type,\n"
" };\n"
"\n"
" pub const Union = struct {\n"
" layout: ContainerLayout,\n"
" tag_type: type,\n"
" fields: []UnionField,\n"
" defs: []Definition,\n"
" };\n"
"\n"
" pub const CallingConvention = enum {\n"
" Unspecified,\n"
" C,\n"
" Cold,\n"
" Naked,\n"
" Stdcall,\n"
" Async,\n"
" };\n"
"\n"
" pub const FnArg = struct {\n"
" is_generic: bool,\n"
" is_noalias: bool,\n"
" arg_type: type,\n"
" };\n"
"\n"
" pub const Fn = struct {\n"
" calling_convention: CallingConvention,\n"
" is_generic: bool,\n"
" is_var_args: bool,\n"
" return_type: type,\n"
" async_allocator_type: type,\n"
" args: []FnArg,\n"
" };\n"
"\n"
" pub const Promise = struct {\n"
" child: type,\n"
" };\n"
"\n"
" pub const Definition = struct {\n"
" name: []const u8,\n"
" is_pub: bool,\n"
" data: Data,\n"
"\n"
" pub const Data = union(enum) {\n"
" Type: type,\n"
" Var: type,\n"
" Fn: FnDef,\n"
"\n"
" pub const FnDef = struct {\n"
" fn_type: type,\n"
" inline_type: Inline,\n"
" calling_convention: CallingConvention,\n"
" is_var_args: bool,\n"
" is_extern: bool,\n"
" is_export: bool,\n"
" lib_name: ?[]const u8,\n"
" return_type: type,\n"
" arg_names: [][] const u8,\n"
"\n"
" pub const Inline = enum {\n"
" Auto,\n"
" Always,\n"
" Never,\n"
" };\n"
" };\n"
" };\n"
" };\n"
"};\n\n");
assert(ContainerLayoutAuto == 0);
assert(ContainerLayoutExtern == 1);
assert(ContainerLayoutPacked == 2);
assert(CallingConventionUnspecified == 0);
assert(CallingConventionC == 1);
assert(CallingConventionCold == 2);
assert(CallingConventionNaked == 3);
assert(CallingConventionStdcall == 4);
assert(CallingConventionAsync == 5);
assert(FnInlineAuto == 0);
assert(FnInlineAlways == 1);
assert(FnInlineNever == 2);
}
{
buf_appendf(contents,
"pub const FloatMode = enum {\n"

1000
src/ir.cpp

File diff suppressed because it is too large Load Diff

View File

@ -966,6 +966,12 @@ static void ir_print_offset_of(IrPrint *irp, IrInstructionOffsetOf *instruction)
fprintf(irp->f, ")");
}
static void ir_print_type_info(IrPrint *irp, IrInstructionTypeInfo *instruction) {
fprintf(irp->f, "@typeInfo(");
ir_print_other_instruction(irp, instruction->type_value);
fprintf(irp->f, ")");
}
static void ir_print_type_id(IrPrint *irp, IrInstructionTypeId *instruction) {
fprintf(irp->f, "@typeId(");
ir_print_other_instruction(irp, instruction->type_value);
@ -1536,6 +1542,9 @@ static void ir_print_instruction(IrPrint *irp, IrInstruction *instruction) {
case IrInstructionIdOffsetOf:
ir_print_offset_of(irp, (IrInstructionOffsetOf *)instruction);
break;
case IrInstructionIdTypeInfo:
ir_print_type_info(irp, (IrInstructionTypeInfo *)instruction);
break;
case IrInstructionIdTypeId:
ir_print_type_id(irp, (IrInstructionTypeId *)instruction);
break;

View File

@ -3672,6 +3672,7 @@ static AstNode *resolve_typedef_decl(Context *c, const TypedefNameDecl *typedef_
if (existing_entry) {
return existing_entry->value;
}
QualType child_qt = typedef_decl->getUnderlyingType();
Buf *type_name = buf_create_from_str(decl_name(typedef_decl));
@ -3705,16 +3706,19 @@ static AstNode *resolve_typedef_decl(Context *c, const TypedefNameDecl *typedef_
// use the name of this typedef
// TODO
// trans_qual_type here might cause us to look at this typedef again so we put the item in the map first
AstNode *symbol_node = trans_create_node_symbol(c, type_name);
c->decl_table.put(typedef_decl->getCanonicalDecl(), symbol_node);
AstNode *type_node = trans_qual_type(c, child_qt, typedef_decl->getLocation());
if (type_node == nullptr) {
emit_warning(c, typedef_decl->getLocation(), "typedef %s - unresolved child type", buf_ptr(type_name));
c->decl_table.put(typedef_decl, nullptr);
// TODO add global var with type_name equal to @compileError("unable to resolve C type")
return nullptr;
}
add_global_var(c, type_name, type_node);
AstNode *symbol_node = trans_create_node_symbol(c, type_name);
c->decl_table.put(typedef_decl->getCanonicalDecl(), symbol_node);
return symbol_node;
}
@ -3749,6 +3753,7 @@ static AstNode *resolve_enum_decl(Context *c, const EnumDecl *enum_decl) {
return demote_enum_to_opaque(c, enum_decl, full_type_name, bare_name);
}
bool pure_enum = true;
uint32_t field_count = 0;
for (auto it = enum_def->enumerator_begin(),
@ -3760,84 +3765,53 @@ static AstNode *resolve_enum_decl(Context *c, const EnumDecl *enum_decl) {
pure_enum = false;
}
}
AstNode *tag_int_type = trans_qual_type(c, enum_decl->getIntegerType(), enum_decl->getLocation());
assert(tag_int_type);
if (pure_enum) {
AstNode *enum_node = trans_create_node(c, NodeTypeContainerDecl);
enum_node->data.container_decl.kind = ContainerKindEnum;
enum_node->data.container_decl.layout = ContainerLayoutExtern;
// TODO only emit this tag type if the enum tag type is not the default.
// I don't know what the default is, need to figure out how clang is deciding.
// it appears to at least be different across gcc/msvc
if (!c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::UInt) &&
!c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::Int))
{
enum_node->data.container_decl.init_arg_expr = tag_int_type;
}
enum_node->data.container_decl.fields.resize(field_count);
uint32_t i = 0;
for (auto it = enum_def->enumerator_begin(),
it_end = enum_def->enumerator_end();
it != it_end; ++it, i += 1)
{
const EnumConstantDecl *enum_const = *it;
Buf *enum_val_name = buf_create_from_str(decl_name(enum_const));
Buf *field_name;
if (bare_name != nullptr && buf_starts_with_buf(enum_val_name, bare_name)) {
field_name = buf_slice(enum_val_name, buf_len(bare_name), buf_len(enum_val_name));
} else {
field_name = enum_val_name;
}
AstNode *field_node = trans_create_node(c, NodeTypeStructField);
field_node->data.struct_field.name = field_name;
field_node->data.struct_field.type = nullptr;
enum_node->data.container_decl.fields.items[i] = field_node;
// in C each enum value is in the global namespace. so we put them there too.
// at this point we can rely on the enum emitting successfully
if (is_anonymous) {
AstNode *lit_node = trans_create_node_unsigned(c, i);
add_global_var(c, enum_val_name, lit_node);
} else {
AstNode *field_access_node = trans_create_node_field_access(c,
trans_create_node_symbol(c, full_type_name), field_name);
add_global_var(c, enum_val_name, field_access_node);
}
}
if (is_anonymous) {
c->decl_table.put(enum_decl->getCanonicalDecl(), enum_node);
return enum_node;
} else {
AstNode *symbol_node = trans_create_node_symbol(c, full_type_name);
add_global_weak_alias(c, bare_name, full_type_name);
add_global_var(c, full_type_name, enum_node);
c->decl_table.put(enum_decl->getCanonicalDecl(), symbol_node);
return enum_node;
}
AstNode *enum_node = trans_create_node(c, NodeTypeContainerDecl);
enum_node->data.container_decl.kind = ContainerKindEnum;
enum_node->data.container_decl.layout = ContainerLayoutExtern;
// TODO only emit this tag type if the enum tag type is not the default.
// I don't know what the default is, need to figure out how clang is deciding.
// it appears to at least be different across gcc/msvc
if (!c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::UInt) &&
!c_is_builtin_type(c, enum_decl->getIntegerType(), BuiltinType::Int))
{
enum_node->data.container_decl.init_arg_expr = tag_int_type;
}
// TODO after issue #305 is solved, make this be an enum with tag_int_type
// as the integer type and set the custom enum values
AstNode *enum_node = tag_int_type;
// add variables for all the values with enum_node
enum_node->data.container_decl.fields.resize(field_count);
uint32_t i = 0;
for (auto it = enum_def->enumerator_begin(),
it_end = enum_def->enumerator_end();
it != it_end; ++it)
it != it_end; ++it, i += 1)
{
const EnumConstantDecl *enum_const = *it;
Buf *enum_val_name = buf_create_from_str(decl_name(enum_const));
AstNode *int_node = trans_create_node_apint(c, enum_const->getInitVal());
AstNode *var_node = add_global_var(c, enum_val_name, int_node);
var_node->data.variable_declaration.type = tag_int_type;
Buf *field_name;
if (bare_name != nullptr && buf_starts_with_buf(enum_val_name, bare_name)) {
field_name = buf_slice(enum_val_name, buf_len(bare_name), buf_len(enum_val_name));
} else {
field_name = enum_val_name;
}
AstNode *int_node = pure_enum && !is_anonymous ? nullptr : trans_create_node_apint(c, enum_const->getInitVal());
AstNode *field_node = trans_create_node(c, NodeTypeStructField);
field_node->data.struct_field.name = field_name;
field_node->data.struct_field.type = nullptr;
field_node->data.struct_field.value = int_node;
enum_node->data.container_decl.fields.items[i] = field_node;
// in C each enum value is in the global namespace. so we put them there too.
// at this point we can rely on the enum emitting successfully
if (is_anonymous) {
Buf *enum_val_name = buf_create_from_str(decl_name(enum_const));
add_global_var(c, enum_val_name, int_node);
} else {
AstNode *field_access_node = trans_create_node_field_access(c,
trans_create_node_symbol(c, full_type_name), field_name);
add_global_var(c, enum_val_name, field_access_node);
}
}
if (is_anonymous) {
@ -3848,7 +3822,7 @@ static AstNode *resolve_enum_decl(Context *c, const EnumDecl *enum_decl) {
add_global_weak_alias(c, bare_name, full_type_name);
add_global_var(c, full_type_name, enum_node);
c->decl_table.put(enum_decl->getCanonicalDecl(), symbol_node);
return symbol_node;
return enum_node;
}
}

View File

@ -28,11 +28,11 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
};
}
pub fn deinit(l: &Self) void {
pub fn deinit(l: &const Self) void {
l.allocator.free(l.items);
}
pub fn toSlice(l: &Self) []align(A) T {
pub fn toSlice(l: &const Self) []align(A) T {
return l.items[0..l.len];
}
@ -44,6 +44,10 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
return l.toSliceConst()[n];
}
pub fn count(self: &const Self) usize {
return self.len;
}
/// ArrayList takes ownership of the passed in slice. The slice must have been
/// allocated with `allocator`.
/// Deinitialize with `deinit` or use `toOwnedSlice`.
@ -127,6 +131,27 @@ pub fn AlignedArrayList(comptime T: type, comptime A: u29) type {
if (self.len == 0) return null;
return self.pop();
}
pub const Iterator = struct {
list: &const Self,
// how many items have we returned
count: usize,
pub fn next(it: &Iterator) ?T {
if (it.count >= it.list.len) return null;
const val = it.list.at(it.count);
it.count += 1;
return val;
}
pub fn reset(it: &Iterator) void {
it.count = 0;
}
};
pub fn iterator(self: &const Self) Iterator {
return Iterator { .list = self, .count = 0 };
}
};
}
@ -148,6 +173,14 @@ test "basic ArrayList test" {
}
}
for (list.toSlice()) |v, i| {
assert(v == i32(i + 1));
}
for (list.toSliceConst()) |v, i| {
assert(v == i32(i + 1));
}
assert(list.pop() == 10);
assert(list.len == 9);
@ -166,6 +199,35 @@ test "basic ArrayList test" {
assert(list.len == 9);
}
test "iterator ArrayList test" {
var list = ArrayList(i32).init(debug.global_allocator);
defer list.deinit();
try list.append(1);
try list.append(2);
try list.append(3);
var count : i32 = 0;
var it = list.iterator();
while (it.next()) |next| {
assert(next == count + 1);
count += 1;
}
assert(count == 3);
assert(it.next() == null);
it.reset();
count = 0;
while (it.next()) |next| {
assert(next == count + 1);
count += 1;
if (count == 2) break;
}
it.reset();
assert(?? it.next() == 1);
}
test "insert ArrayList test" {
var list = ArrayList(i32).init(debug.global_allocator);
defer list.deinit();

View File

@ -31,10 +31,10 @@ pub fn Queue(comptime T: type) type {
}
pub fn get(self: &Self) ?&Node {
var head = @atomicLoad(&Node, &self.head, AtomicOrder.Acquire);
var head = @atomicLoad(&Node, &self.head, AtomicOrder.SeqCst);
while (true) {
const node = head.next ?? return null;
head = @cmpxchgWeak(&Node, &self.head, head, node, AtomicOrder.Release, AtomicOrder.Acquire) ?? return node;
head = @cmpxchgWeak(&Node, &self.head, head, node, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return node;
}
}
};
@ -49,14 +49,20 @@ const Context = struct {
get_count: usize,
puts_done: u8, // TODO make this a bool
};
const puts_per_thread = 10000;
// TODO add lazy evaluated build options and then put puts_per_thread behind
// some option such as: "AggressiveMultithreadedFuzzTest". In the AppVeyor
// CI we would use a less aggressive setting since at 1 core, while we still
// want this test to pass, we need a smaller value since there is so much thrashing
// we would also use a less aggressive setting when running in valgrind
const puts_per_thread = 500;
const put_thread_count = 3;
test "std.atomic.queue" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 64 * 1024 * 1024);
var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 300 * 1024);
defer direct_allocator.allocator.free(plenty_of_memory);
var fixed_buffer_allocator = std.heap.ThreadSafeFixedBufferAllocator.init(plenty_of_memory);

View File

@ -35,7 +35,7 @@ pub fn Stack(comptime T: type) type {
}
pub fn pop(self: &Self) ?&Node {
var root = @atomicLoad(?&Node, &self.root, AtomicOrder.Acquire);
var root = @atomicLoad(?&Node, &self.root, AtomicOrder.SeqCst);
while (true) {
root = @cmpxchgWeak(?&Node, &self.root, root, (root ?? return null).next, AtomicOrder.SeqCst, AtomicOrder.SeqCst) ?? return root;
}
@ -56,14 +56,19 @@ const Context = struct {
get_count: usize,
puts_done: u8, // TODO make this a bool
};
const puts_per_thread = 1000;
// TODO add lazy evaluated build options and then put puts_per_thread behind
// some option such as: "AggressiveMultithreadedFuzzTest". In the AppVeyor
// CI we would use a less aggressive setting since at 1 core, while we still
// want this test to pass, we need a smaller value since there is so much thrashing
// we would also use a less aggressive setting when running in valgrind
const puts_per_thread = 500;
const put_thread_count = 3;
test "std.atomic.stack" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 64 * 1024 * 1024);
var plenty_of_memory = try direct_allocator.allocator.alloc(u8, 300 * 1024);
defer direct_allocator.allocator.free(plenty_of_memory);
var fixed_buffer_allocator = std.heap.ThreadSafeFixedBufferAllocator.init(plenty_of_memory);

View File

@ -18,10 +18,10 @@ pub const BufMap = struct {
return self;
}
pub fn deinit(self: &BufMap) void {
pub fn deinit(self: &const BufMap) void {
var it = self.hash_map.iterator();
while (true) {
const entry = it.next() ?? break;
const entry = it.next() ?? break;
self.free(entry.key);
self.free(entry.value);
}
@ -38,7 +38,7 @@ pub const BufMap = struct {
_ = try self.hash_map.put(key_copy, value_copy);
}
pub fn get(self: &BufMap, key: []const u8) ?[]const u8 {
pub fn get(self: &const BufMap, key: []const u8) ?[]const u8 {
const entry = self.hash_map.get(key) ?? return null;
return entry.value;
}
@ -50,18 +50,18 @@ pub const BufMap = struct {
}
pub fn count(self: &const BufMap) usize {
return self.hash_map.size;
return self.hash_map.count();
}
pub fn iterator(self: &const BufMap) BufMapHashMap.Iterator {
return self.hash_map.iterator();
}
fn free(self: &BufMap, value: []const u8) void {
fn free(self: &const BufMap, value: []const u8) void {
self.hash_map.allocator.free(value);
}
fn copy(self: &BufMap, value: []const u8) ![]const u8 {
fn copy(self: &const BufMap, value: []const u8) ![]const u8 {
return mem.dupe(self.hash_map.allocator, u8, value);
}
};

View File

@ -1,6 +1,8 @@
const std = @import("index.zig");
const HashMap = @import("hash_map.zig").HashMap;
const mem = @import("mem.zig");
const Allocator = mem.Allocator;
const assert = std.debug.assert;
pub const BufSet = struct {
hash_map: BufSetHashMap,
@ -14,10 +16,10 @@ pub const BufSet = struct {
return self;
}
pub fn deinit(self: &BufSet) void {
pub fn deinit(self: &const BufSet) void {
var it = self.hash_map.iterator();
while (true) {
const entry = it.next() ?? break;
const entry = it.next() ?? break;
self.free(entry.key);
}
@ -38,7 +40,7 @@ pub const BufSet = struct {
}
pub fn count(self: &const BufSet) usize {
return self.hash_map.size;
return self.hash_map.count();
}
pub fn iterator(self: &const BufSet) BufSetHashMap.Iterator {
@ -49,14 +51,30 @@ pub const BufSet = struct {
return self.hash_map.allocator;
}
fn free(self: &BufSet, value: []const u8) void {
fn free(self: &const BufSet, value: []const u8) void {
self.hash_map.allocator.free(value);
}
fn copy(self: &BufSet, value: []const u8) ![]const u8 {
fn copy(self: &const BufSet, value: []const u8) ![]const u8 {
const result = try self.hash_map.allocator.alloc(u8, value.len);
mem.copy(u8, result, value);
return result;
}
};
test "BufSet" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
var bufset = BufSet.init(&direct_allocator.allocator);
defer bufset.deinit();
try bufset.put("x");
assert(bufset.count() == 1);
bufset.delete("x");
assert(bufset.count() == 0);
try bufset.put("x");
try bufset.put("y");
try bufset.put("z");
}

View File

@ -66,7 +66,7 @@ pub const Buffer = struct {
self.list.deinit();
}
pub fn toSlice(self: &Buffer) []u8 {
pub fn toSlice(self: &const Buffer) []u8 {
return self.list.toSlice()[0..self.len()];
}
@ -166,5 +166,5 @@ test "simple Buffer" {
assert(buf.endsWith("orld"));
try buf2.resize(4);
assert(buf.startsWith(buf2.toSliceConst()));
assert(buf.startsWith(buf2.toSlice()));
}

View File

@ -54,6 +54,14 @@ pub fn HashMap(comptime K: type, comptime V: type,
}
unreachable; // no next item
}
// Reset the iterator to the initial index
pub fn reset(it: &Iterator) void {
it.count = 0;
it.index = 0;
// Resetting the modification count too
it.initial_modification_count = it.hm.modification_count;
}
};
pub fn init(allocator: &Allocator) Self {
@ -66,7 +74,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
};
}
pub fn deinit(hm: &Self) void {
pub fn deinit(hm: &const Self) void {
hm.allocator.free(hm.entries);
}
@ -79,6 +87,10 @@ pub fn HashMap(comptime K: type, comptime V: type,
hm.incrementModificationCount();
}
pub fn count(hm: &const Self) usize {
return hm.size;
}
/// Returns the value that was already there.
pub fn put(hm: &Self, key: K, value: &const V) !?V {
if (hm.entries.len == 0) {
@ -102,14 +114,14 @@ pub fn HashMap(comptime K: type, comptime V: type,
return hm.internalPut(key, value);
}
pub fn get(hm: &Self, key: K) ?&Entry {
pub fn get(hm: &const Self, key: K) ?&Entry {
if (hm.entries.len == 0) {
return null;
}
return hm.internalGet(key);
}
pub fn contains(hm: &Self, key: K) bool {
pub fn contains(hm: &const Self, key: K) bool {
return hm.get(key) != null;
}
@ -218,7 +230,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
unreachable; // put into a full map
}
fn internalGet(hm: &Self, key: K) ?&Entry {
fn internalGet(hm: &const Self, key: K) ?&Entry {
const start_index = hm.keyToIndex(key);
{var roll_over: usize = 0; while (roll_over <= hm.max_distance_from_start_index) : (roll_over += 1) {
const index = (start_index + roll_over) % hm.entries.len;
@ -230,7 +242,7 @@ pub fn HashMap(comptime K: type, comptime V: type,
return null;
}
fn keyToIndex(hm: &Self, key: K) usize {
fn keyToIndex(hm: &const Self, key: K) usize {
return usize(hash(key)) % hm.entries.len;
}
};
@ -252,12 +264,52 @@ test "basic hash map usage" {
assert(??(map.put(5, 66) catch unreachable) == 55);
assert(??(map.put(5, 55) catch unreachable) == 66);
assert(map.contains(2));
assert((??map.get(2)).value == 22);
_ = map.remove(2);
assert(map.remove(2) == null);
assert(map.get(2) == null);
}
test "iterator hash map" {
var direct_allocator = std.heap.DirectAllocator.init();
defer direct_allocator.deinit();
var reset_map = HashMap(i32, i32, hash_i32, eql_i32).init(&direct_allocator.allocator);
defer reset_map.deinit();
assert((reset_map.put(1, 11) catch unreachable) == null);
assert((reset_map.put(2, 22) catch unreachable) == null);
assert((reset_map.put(3, 33) catch unreachable) == null);
var keys = []i32 { 1, 2, 3 };
var values = []i32 { 11, 22, 33 };
var it = reset_map.iterator();
var count : usize = 0;
while (it.next()) |next| {
assert(next.key == keys[count]);
assert(next.value == values[count]);
count += 1;
}
assert(count == 3);
assert(it.next() == null);
it.reset();
count = 0;
while (it.next()) |next| {
assert(next.key == keys[count]);
assert(next.value == values[count]);
count += 1;
if (count == 2) break;
}
it.reset();
var entry = ?? it.next();
assert(entry.key == keys[0]);
assert(entry.value == values[0]);
}
fn hash_i32(x: i32) u32 {
return @bitCast(u32, x);
}

View File

@ -7,6 +7,7 @@ pub const BufferOutStream = @import("buffer.zig").BufferOutStream;
pub const HashMap = @import("hash_map.zig").HashMap;
pub const LinkedList = @import("linked_list.zig").LinkedList;
pub const IntrusiveLinkedList = @import("linked_list.zig").IntrusiveLinkedList;
pub const SegmentedList = @import("segmented_list.zig").SegmentedList;
pub const atomic = @import("atomic/index.zig");
pub const base64 = @import("base64.zig");
@ -23,6 +24,7 @@ pub const fmt = @import("fmt/index.zig");
pub const hash = @import("hash/index.zig");
pub const heap = @import("heap.zig");
pub const io = @import("io.zig");
pub const json = @import("json.zig");
pub const macho = @import("macho.zig");
pub const math = @import("math/index.zig");
pub const mem = @import("mem.zig");
@ -42,6 +44,7 @@ test "std" {
_ = @import("buffer.zig");
_ = @import("hash_map.zig");
_ = @import("linked_list.zig");
_ = @import("segmented_list.zig");
_ = @import("base64.zig");
_ = @import("build.zig");
@ -56,6 +59,7 @@ test "std" {
_ = @import("fmt/index.zig");
_ = @import("hash/index.zig");
_ = @import("io.zig");
_ = @import("json.zig");
_ = @import("macho.zig");
_ = @import("math/index.zig");
_ = @import("mem.zig");

1304
std/json.zig Normal file

File diff suppressed because it is too large Load Diff

1942
std/json_test.zig Normal file

File diff suppressed because it is too large Load Diff

View File

@ -541,6 +541,32 @@ test "math.floorPowerOfTwo" {
comptime testFloorPowerOfTwo();
}
pub fn log2_int(comptime T: type, x: T) Log2Int(T) {
assert(x != 0);
return Log2Int(T)(T.bit_count - 1 - @clz(x));
}
pub fn log2_int_ceil(comptime T: type, x: T) Log2Int(T) {
assert(x != 0);
const log2_val = log2_int(T, x);
if (T(1) << log2_val == x)
return log2_val;
return log2_val + 1;
}
test "std.math.log2_int_ceil" {
assert(log2_int_ceil(u32, 1) == 0);
assert(log2_int_ceil(u32, 2) == 1);
assert(log2_int_ceil(u32, 3) == 2);
assert(log2_int_ceil(u32, 4) == 2);
assert(log2_int_ceil(u32, 5) == 3);
assert(log2_int_ceil(u32, 6) == 3);
assert(log2_int_ceil(u32, 7) == 3);
assert(log2_int_ceil(u32, 8) == 3);
assert(log2_int_ceil(u32, 9) == 4);
assert(log2_int_ceil(u32, 10) == 4);
}
fn testFloorPowerOfTwo() void {
assert(floorPowerOfTwo(u32, 63) == 32);
assert(floorPowerOfTwo(u32, 64) == 64);

View File

@ -31,17 +31,12 @@ pub fn log2(x: var) @typeOf(x) {
return result;
},
TypeId.Int => {
return log2_int(T, x);
return math.log2_int(T, x);
},
else => @compileError("log2 not implemented for " ++ @typeName(T)),
}
}
pub fn log2_int(comptime T: type, x: T) T {
assert(x != 0);
return T.bit_count - 1 - T(@clz(x));
}
pub fn log2_32(x_: f32) f32 {
const ivln2hi: f32 = 1.4428710938e+00;
const ivln2lo: f32 = -1.7605285393e-04;

View File

@ -2473,6 +2473,7 @@ pub const Thread = struct {
},
builtin.Os.windows => {
assert(windows.WaitForSingleObject(self.data.handle, windows.INFINITE) == windows.WAIT_OBJECT_0);
assert(windows.CloseHandle(self.data.handle) != 0);
assert(windows.HeapFree(self.data.heap_handle, 0, self.data.alloc_start) != 0);
},
else => @compileError("Unsupported OS"),

368
std/segmented_list.zig Normal file
View File

@ -0,0 +1,368 @@
const std = @import("index.zig");
const assert = std.debug.assert;
const Allocator = std.mem.Allocator;
// Imagine that `fn at(self: &Self, index: usize) &T` is a customer asking for a box
// from a warehouse, based on a flat array, boxes ordered from 0 to N - 1.
// But the warehouse actually stores boxes in shelves of increasing powers of 2 sizes.
// So when the customer requests a box index, we have to translate it to shelf index
// and box index within that shelf. Illustration:
//
// customer indexes:
// shelf 0: 0
// shelf 1: 1 2
// shelf 2: 3 4 5 6
// shelf 3: 7 8 9 10 11 12 13 14
// shelf 4: 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
// shelf 5: 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
// ...
//
// warehouse indexes:
// shelf 0: 0
// shelf 1: 0 1
// shelf 2: 0 1 2 3
// shelf 3: 0 1 2 3 4 5 6 7
// shelf 4: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// shelf 5: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
// ...
//
// With this arrangement, here are the equations to get the shelf index and
// box index based on customer box index:
//
// shelf_index = floor(log2(customer_index + 1))
// shelf_count = ceil(log2(box_count + 1))
// box_index = customer_index + 1 - 2 ** shelf
// shelf_size = 2 ** shelf_index
//
// Now we complicate it a little bit further by adding a preallocated shelf, which must be
// a power of 2:
// prealloc=4
//
// customer indexes:
// prealloc: 0 1 2 3
// shelf 0: 4 5 6 7 8 9 10 11
// shelf 1: 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
// shelf 2: 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
// ...
//
// warehouse indexes:
// prealloc: 0 1 2 3
// shelf 0: 0 1 2 3 4 5 6 7
// shelf 1: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
// shelf 2: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31
// ...
//
// Now the equations are:
//
// shelf_index = floor(log2(customer_index + prealloc)) - log2(prealloc) - 1
// shelf_count = ceil(log2(box_count + prealloc)) - log2(prealloc) - 1
// box_index = customer_index + prealloc - 2 ** (log2(prealloc) + 1 + shelf)
// shelf_size = prealloc * 2 ** (shelf_index + 1)
/// This is a stack data structure where pointers to indexes have the same lifetime as the data structure
/// itself, unlike ArrayList where push() invalidates all existing element pointers.
/// The tradeoff is that elements are not guaranteed to be contiguous. For that, use ArrayList.
/// Note however that most elements are contiguous, making this data structure cache-friendly.
///
/// Because it never has to copy elements from an old location to a new location, it does not require
/// its elements to be copyable, and it avoids wasting memory when backed by an ArenaAllocator.
/// Note that the push() and pop() convenience methods perform a copy, but you can instead use
/// addOne(), at(), setCapacity(), and shrinkCapacity() to avoid copying items.
///
/// This data structure has O(1) push and O(1) pop.
///
/// It supports preallocated elements, making it especially well suited when the expected maximum
/// size is small. `prealloc_item_count` must be 0, or a power of 2.
pub fn SegmentedList(comptime T: type, comptime prealloc_item_count: usize) type {
return struct {
const Self = this;
const prealloc_exp = blk: {
// we don't use the prealloc_exp constant when prealloc_item_count is 0.
assert(prealloc_item_count != 0);
const value = std.math.log2_int(usize, prealloc_item_count);
assert((1 << value) == prealloc_item_count); // prealloc_item_count must be a power of 2
break :blk @typeOf(1)(value);
};
const ShelfIndex = std.math.Log2Int(usize);
prealloc_segment: [prealloc_item_count]T,
dynamic_segments: []&T,
allocator: &Allocator,
len: usize,
/// Deinitialize with `deinit`
pub fn init(allocator: &Allocator) Self {
return Self {
.allocator = allocator,
.len = 0,
.prealloc_segment = undefined,
.dynamic_segments = []&T{},
};
}
pub fn deinit(self: &Self) void {
self.freeShelves(ShelfIndex(self.dynamic_segments.len), 0);
self.allocator.free(self.dynamic_segments);
*self = undefined;
}
pub fn at(self: &Self, i: usize) &T {
assert(i < self.len);
return self.uncheckedAt(i);
}
pub fn count(self: &const Self) usize {
return self.len;
}
pub fn push(self: &Self, item: &const T) !void {
const new_item_ptr = try self.addOne();
*new_item_ptr = *item;
}
pub fn pushMany(self: &Self, items: []const T) !void {
for (items) |item| {
try self.push(item);
}
}
pub fn pop(self: &Self) ?T {
if (self.len == 0)
return null;
const index = self.len - 1;
const result = *self.uncheckedAt(index);
self.len = index;
return result;
}
pub fn addOne(self: &Self) !&T {
const new_length = self.len + 1;
try self.growCapacity(new_length);
const result = self.uncheckedAt(self.len);
self.len = new_length;
return result;
}
/// Grows or shrinks capacity to match usage.
pub fn setCapacity(self: &Self, new_capacity: usize) !void {
if (new_capacity <= usize(1) << (prealloc_exp + self.dynamic_segments.len)) {
return self.shrinkCapacity(new_capacity);
} else {
return self.growCapacity(new_capacity);
}
}
/// Only grows capacity, or retains current capacity
pub fn growCapacity(self: &Self, new_capacity: usize) !void {
const new_cap_shelf_count = shelfCount(new_capacity);
const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
if (new_cap_shelf_count > old_shelf_count) {
self.dynamic_segments = try self.allocator.realloc(&T, self.dynamic_segments, new_cap_shelf_count);
var i = old_shelf_count;
errdefer {
self.freeShelves(i, old_shelf_count);
self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, old_shelf_count);
}
while (i < new_cap_shelf_count) : (i += 1) {
self.dynamic_segments[i] = (try self.allocator.alloc(T, shelfSize(i))).ptr;
}
}
}
/// Only shrinks capacity or retains current capacity
pub fn shrinkCapacity(self: &Self, new_capacity: usize) void {
if (new_capacity <= prealloc_item_count) {
const len = ShelfIndex(self.dynamic_segments.len);
self.freeShelves(len, 0);
self.allocator.free(self.dynamic_segments);
self.dynamic_segments = []&T{};
return;
}
const new_cap_shelf_count = shelfCount(new_capacity);
const old_shelf_count = ShelfIndex(self.dynamic_segments.len);
assert(new_cap_shelf_count <= old_shelf_count);
if (new_cap_shelf_count == old_shelf_count) {
return;
}
self.freeShelves(old_shelf_count, new_cap_shelf_count);
self.dynamic_segments = self.allocator.shrink(&T, self.dynamic_segments, new_cap_shelf_count);
}
pub fn uncheckedAt(self: &Self, index: usize) &T {
if (index < prealloc_item_count) {
return &self.prealloc_segment[index];
}
const shelf_index = shelfIndex(index);
const box_index = boxIndex(index, shelf_index);
return &self.dynamic_segments[shelf_index][box_index];
}
fn shelfCount(box_count: usize) ShelfIndex {
if (prealloc_item_count == 0) {
return std.math.log2_int_ceil(usize, box_count + 1);
}
return std.math.log2_int_ceil(usize, box_count + prealloc_item_count) - prealloc_exp - 1;
}
fn shelfSize(shelf_index: ShelfIndex) usize {
if (prealloc_item_count == 0) {
return usize(1) << shelf_index;
}
return usize(1) << (shelf_index + (prealloc_exp + 1));
}
fn shelfIndex(list_index: usize) ShelfIndex {
if (prealloc_item_count == 0) {
return std.math.log2_int(usize, list_index + 1);
}
return std.math.log2_int(usize, list_index + prealloc_item_count) - prealloc_exp - 1;
}
fn boxIndex(list_index: usize, shelf_index: ShelfIndex) usize {
if (prealloc_item_count == 0) {
return (list_index + 1) - (usize(1) << shelf_index);
}
return list_index + prealloc_item_count - (usize(1) << ((prealloc_exp + 1) + shelf_index));
}
fn freeShelves(self: &Self, from_count: ShelfIndex, to_count: ShelfIndex) void {
var i = from_count;
while (i != to_count) {
i -= 1;
self.allocator.free(self.dynamic_segments[i][0..shelfSize(i)]);
}
}
pub const Iterator = struct {
list: &Self,
index: usize,
box_index: usize,
shelf_index: ShelfIndex,
shelf_size: usize,
pub fn next(it: &Iterator) ?&T {
if (it.index >= it.list.len)
return null;
if (it.index < prealloc_item_count) {
const ptr = &it.list.prealloc_segment[it.index];
it.index += 1;
if (it.index == prealloc_item_count) {
it.box_index = 0;
it.shelf_index = 0;
it.shelf_size = prealloc_item_count * 2;
}
return ptr;
}
const ptr = &it.list.dynamic_segments[it.shelf_index][it.box_index];
it.index += 1;
it.box_index += 1;
if (it.box_index == it.shelf_size) {
it.shelf_index += 1;
it.box_index = 0;
it.shelf_size *= 2;
}
return ptr;
}
pub fn prev(it: &Iterator) ?&T {
if (it.index == 0)
return null;
it.index -= 1;
if (it.index < prealloc_item_count)
return &it.list.prealloc_segment[it.index];
if (it.box_index == 0) {
it.shelf_index -= 1;
it.shelf_size /= 2;
it.box_index = it.shelf_size - 1;
} else {
it.box_index -= 1;
}
return &it.list.dynamic_segments[it.shelf_index][it.box_index];
}
};
pub fn iterator(self: &Self, start_index: usize) Iterator {
var it = Iterator {
.list = self,
.index = start_index,
.shelf_index = undefined,
.box_index = undefined,
.shelf_size = undefined,
};
if (start_index >= prealloc_item_count) {
it.shelf_index = shelfIndex(start_index);
it.box_index = boxIndex(start_index, it.shelf_index);
it.shelf_size = shelfSize(it.shelf_index);
}
return it;
}
};
}
test "std.SegmentedList" {
var da = std.heap.DirectAllocator.init();
defer da.deinit();
var a = &da.allocator;
try testSegmentedList(0, a);
try testSegmentedList(1, a);
try testSegmentedList(2, a);
try testSegmentedList(4, a);
try testSegmentedList(8, a);
try testSegmentedList(16, a);
}
fn testSegmentedList(comptime prealloc: usize, allocator: &Allocator) !void {
var list = SegmentedList(i32, prealloc).init(allocator);
defer list.deinit();
{var i: usize = 0; while (i < 100) : (i += 1) {
try list.push(i32(i + 1));
assert(list.len == i + 1);
}}
{var i: usize = 0; while (i < 100) : (i += 1) {
assert(*list.at(i) == i32(i + 1));
}}
{
var it = list.iterator(0);
var x: i32 = 0;
while (it.next()) |item| {
x += 1;
assert(*item == x);
}
assert(x == 100);
while (it.prev()) |item| : (x -= 1) {
assert(*item == x);
}
assert(x == 0);
}
assert(??list.pop() == 100);
assert(list.len == 99);
try list.pushMany([]i32 { 1, 2, 3 });
assert(list.len == 102);
assert(??list.pop() == 3);
assert(??list.pop() == 2);
assert(??list.pop() == 1);
assert(list.len == 99);
try list.pushMany([]const i32 {});
assert(list.len == 99);
var i: i32 = 99;
while (list.pop()) |item| : (i -= 1) {
assert(item == i);
list.shrinkCapacity(list.len);
}
}

View File

@ -1,5 +1,5 @@
const is_test = @import("builtin").is_test;
const Log2Int = @import("../../math/index.zig").Log2Int;
const Log2Int = @import("std").math.Log2Int;
pub fn fixuint(comptime fp_t: type, comptime fixuint_t: type, a: fp_t) fixuint_t {
@setRuntimeSafety(is_test);

View File

@ -1,5 +1,5 @@
const __fixunsdfdi = @import("fixunsdfdi.zig").__fixunsdfdi;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunsdfdi(a: f64, expected: u64) void {
const x = __fixunsdfdi(a);

View File

@ -1,5 +1,5 @@
const __fixunsdfsi = @import("fixunsdfsi.zig").__fixunsdfsi;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunsdfsi(a: f64, expected: u32) void {
const x = __fixunsdfsi(a);

View File

@ -1,5 +1,5 @@
const __fixunsdfti = @import("fixunsdfti.zig").__fixunsdfti;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunsdfti(a: f64, expected: u128) void {
const x = __fixunsdfti(a);

View File

@ -1,5 +1,5 @@
const __fixunssfdi = @import("fixunssfdi.zig").__fixunssfdi;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunssfdi(a: f32, expected: u64) void {
const x = __fixunssfdi(a);

View File

@ -1,5 +1,5 @@
const __fixunssfsi = @import("fixunssfsi.zig").__fixunssfsi;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunssfsi(a: f32, expected: u32) void {
const x = __fixunssfsi(a);

View File

@ -1,5 +1,5 @@
const __fixunssfti = @import("fixunssfti.zig").__fixunssfti;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunssfti(a: f32, expected: u128) void {
const x = __fixunssfti(a);

View File

@ -1,5 +1,5 @@
const __fixunstfdi = @import("fixunstfdi.zig").__fixunstfdi;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunstfdi(a: f128, expected: u64) void {
const x = __fixunstfdi(a);

View File

@ -1,5 +1,5 @@
const __fixunstfsi = @import("fixunstfsi.zig").__fixunstfsi;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunstfsi(a: f128, expected: u32) void {
const x = __fixunstfsi(a);

View File

@ -1,5 +1,5 @@
const __fixunstfti = @import("fixunstfti.zig").__fixunstfti;
const assert = @import("../../index.zig").debug.assert;
const assert = @import("std").debug.assert;
fn test__fixunstfti(a: f128, expected: u128) void {
const x = __fixunstfti(a);

View File

@ -71,7 +71,8 @@ comptime {
}
}
const assert = @import("../../index.zig").debug.assert;
const std = @import("std");
const assert = std.debug.assert;
const __udivmoddi4 = @import("udivmoddi4.zig").__udivmoddi4;
@ -80,7 +81,7 @@ const __udivmoddi4 = @import("udivmoddi4.zig").__udivmoddi4;
pub fn panic(msg: []const u8, error_return_trace: ?&builtin.StackTrace) noreturn {
@setCold(true);
if (is_test) {
@import("std").debug.panic("{}", msg);
std.debug.panic("{}", msg);
} else {
unreachable;
}

View File

@ -12,7 +12,7 @@ pub fn udivmod(comptime DoubleInt: type, a: DoubleInt, b: DoubleInt, maybe_rem:
const SingleInt = @IntType(false, @divExact(DoubleInt.bit_count, 2));
const SignedDoubleInt = @IntType(true, DoubleInt.bit_count);
const Log2SingleInt = @import("../../math/index.zig").Log2Int(SingleInt);
const Log2SingleInt = @import("std").math.Log2Int(SingleInt);
const n = @ptrCast(&const [2]SingleInt, &a).*; // TODO issue #421
const d = @ptrCast(&const [2]SingleInt, &b).*; // TODO issue #421

View File

@ -6,7 +6,6 @@ const mem = std.mem;
pub const Node = struct {
id: Id,
doc_comments: ?&DocComment,
same_line_comment: ?&Token,
pub const Id = enum {
@ -36,6 +35,7 @@ pub const Node = struct {
VarType,
ErrorType,
FnProto,
PromiseType,
// Primary expressions
IntegerLiteral,
@ -69,6 +69,7 @@ pub const Node = struct {
StructField,
UnionTag,
EnumTag,
ErrorTag,
AsmInput,
AsmOutput,
AsyncAttribute,
@ -76,6 +77,13 @@ pub const Node = struct {
FieldInitializer,
};
pub fn cast(base: &Node, comptime T: type) ?&T {
if (base.id == comptime typeToId(T)) {
return @fieldParentPtr(T, "base", base);
}
return null;
}
pub fn iterate(base: &Node, index: usize) ?&Node {
comptime var i = 0;
inline while (i < @memberCount(Id)) : (i += 1) {
@ -121,6 +129,7 @@ pub const Node = struct {
pub const Root = struct {
base: Node,
doc_comments: ?&DocComment,
decls: ArrayList(&Node),
eof_token: Token,
@ -142,6 +151,7 @@ pub const Node = struct {
pub const VarDecl = struct {
base: Node,
doc_comments: ?&DocComment,
visib_token: ?Token,
name_token: Token,
eq_token: Token,
@ -190,6 +200,7 @@ pub const Node = struct {
pub const Use = struct {
base: Node,
doc_comments: ?&DocComment,
visib_token: ?Token,
expr: &Node,
semicolon_token: Token,
@ -260,7 +271,7 @@ pub const Node = struct {
const InitArg = union(enum) {
None,
Enum,
Enum: ?&Node,
Type: &Node,
};
@ -293,6 +304,7 @@ pub const Node = struct {
pub const StructField = struct {
base: Node,
doc_comments: ?&DocComment,
visib_token: ?Token,
name_token: Token,
type_expr: &Node,
@ -318,8 +330,10 @@ pub const Node = struct {
pub const UnionTag = struct {
base: Node,
doc_comments: ?&DocComment,
name_token: Token,
type_expr: ?&Node,
value_expr: ?&Node,
pub fn iterate(self: &UnionTag, index: usize) ?&Node {
var i = index;
@ -329,6 +343,11 @@ pub const Node = struct {
i -= 1;
}
if (self.value_expr) |value_expr| {
if (i < 1) return value_expr;
i -= 1;
}
return null;
}
@ -337,6 +356,9 @@ pub const Node = struct {
}
pub fn lastToken(self: &UnionTag) Token {
if (self.value_expr) |value_expr| {
return value_expr.lastToken();
}
if (self.type_expr) |type_expr| {
return type_expr.lastToken();
}
@ -347,6 +369,7 @@ pub const Node = struct {
pub const EnumTag = struct {
base: Node,
doc_comments: ?&DocComment,
name_token: Token,
value: ?&Node,
@ -374,6 +397,31 @@ pub const Node = struct {
}
};
pub const ErrorTag = struct {
base: Node,
doc_comments: ?&DocComment,
name_token: Token,
pub fn iterate(self: &ErrorTag, index: usize) ?&Node {
var i = index;
if (self.doc_comments) |comments| {
if (i < 1) return &comments.base;
i -= 1;
}
return null;
}
pub fn firstToken(self: &ErrorTag) Token {
return self.name_token;
}
pub fn lastToken(self: &ErrorTag) Token {
return self.name_token;
}
};
pub const Identifier = struct {
base: Node,
token: Token,
@ -423,6 +471,7 @@ pub const Node = struct {
pub const FnProto = struct {
base: Node,
doc_comments: ?&DocComment,
visib_token: ?Token,
fn_token: Token,
name_token: ?Token,
@ -495,6 +544,37 @@ pub const Node = struct {
}
};
pub const PromiseType = struct {
base: Node,
promise_token: Token,
result: ?Result,
pub const Result = struct {
arrow_token: Token,
return_type: &Node,
};
pub fn iterate(self: &PromiseType, index: usize) ?&Node {
var i = index;
if (self.result) |result| {
if (i < 1) return result.return_type;
i -= 1;
}
return null;
}
pub fn firstToken(self: &PromiseType) Token {
return self.promise_token;
}
pub fn lastToken(self: &PromiseType) Token {
if (self.result) |result| return result.return_type.lastToken();
return self.promise_token;
}
};
pub const ParamDecl = struct {
base: Node,
comptime_token: ?Token,
@ -585,6 +665,7 @@ pub const Node = struct {
pub const Comptime = struct {
base: Node,
doc_comments: ?&DocComment,
comptime_token: Token,
expr: &Node,
@ -1188,7 +1269,7 @@ pub const Node = struct {
ArrayAccess: &Node,
Slice: SliceRange,
ArrayInitializer: ArrayList(&Node),
StructInitializer: ArrayList(&FieldInitializer),
StructInitializer: ArrayList(&Node),
};
const CallInfo = struct {
@ -1230,7 +1311,7 @@ pub const Node = struct {
i -= exprs.len;
},
Op.StructInitializer => |fields| {
if (i < fields.len) return &fields.at(i).base;
if (i < fields.len) return fields.at(i);
i -= fields.len;
},
}
@ -1339,6 +1420,7 @@ pub const Node = struct {
pub const Suspend = struct {
base: Node,
label: ?Token,
suspend_token: Token,
payload: ?&Node,
body: ?&Node,
@ -1360,6 +1442,7 @@ pub const Node = struct {
}
pub fn firstToken(self: &Suspend) Token {
if (self.label) |label| return label;
return self.suspend_token;
}
@ -1751,6 +1834,7 @@ pub const Node = struct {
pub const TestDecl = struct {
base: Node,
doc_comments: ?&DocComment,
test_token: Token,
name: &Node,
body_node: &Node,

38
std/zig/bench.zig Normal file
View File

@ -0,0 +1,38 @@
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;
const Tokenizer = std.zig.Tokenizer;
const Parser = std.zig.Parser;
const io = std.io;
const source = @embedFile("../os/index.zig");
var fixed_buffer_mem: [10 * 1024 * 1024]u8 = undefined;
pub fn main() !void {
var i: usize = 0;
var timer = try std.os.time.Timer.start();
const start = timer.lap();
const iterations = 100;
var memory_used: usize = 0;
while (i < iterations) : (i += 1) {
memory_used += testOnce();
}
const end = timer.read();
memory_used /= iterations;
const elapsed_s = f64(end - start) / std.os.time.ns_per_s;
const bytes_per_sec = f64(source.len * iterations) / elapsed_s;
const mb_per_sec = bytes_per_sec / (1024 * 1024);
var stdout_file = try std.io.getStdOut();
const stdout = &std.io.FileOutStream.init(&stdout_file).stream;
try stdout.print("{.3} MB/s, {} KB used \n", mb_per_sec, memory_used / 1024);
}
fn testOnce() usize {
var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
var allocator = &fixed_buf_alloc.allocator;
var tokenizer = Tokenizer.init(source);
var parser = Parser.init(&tokenizer, allocator, "(memory buffer)");
_ = parser.parse() catch @panic("parse failure");
return fixed_buf_alloc.end_index;
}

View File

@ -228,7 +228,6 @@ pub const Parser = struct {
Statement: &ast.Node.Block,
ComptimeStatement: ComptimeStatementCtx,
Semicolon: &&ast.Node,
AddComments: AddCommentsCtx,
LookForSameLineComment: &&ast.Node,
LookForSameLineCommentDirect: &ast.Node,
@ -239,11 +238,12 @@ pub const Parser = struct {
ExprListItemOrEnd: ExprListCtx,
ExprListCommaOrEnd: ExprListCtx,
FieldInitListItemOrEnd: ListSave(&ast.Node.FieldInitializer),
FieldInitListCommaOrEnd: ListSave(&ast.Node.FieldInitializer),
FieldInitListItemOrEnd: ListSave(&ast.Node),
FieldInitListCommaOrEnd: ListSave(&ast.Node),
FieldListCommaOrEnd: &ast.Node.ContainerDecl,
IdentifierListItemOrEnd: ListSave(&ast.Node),
IdentifierListCommaOrEnd: ListSave(&ast.Node),
FieldInitValue: OptionalCtx,
ErrorTagListItemOrEnd: ListSave(&ast.Node),
ErrorTagListCommaOrEnd: ListSave(&ast.Node),
SwitchCaseOrEnd: ListSave(&ast.Node),
SwitchCaseCommaOrEnd: ListSave(&ast.Node),
SwitchCaseFirstItem: &ArrayList(&ast.Node),
@ -300,6 +300,7 @@ pub const Parser = struct {
ErrorTypeOrSetDecl: ErrorTypeOrSetDeclCtx,
StringLiteral: OptionalCtx,
Identifier: OptionalCtx,
ErrorTag: &&ast.Node,
IfToken: @TagType(Token.Id),
@ -324,6 +325,7 @@ pub const Parser = struct {
ast.Node.Root {
.base = undefined,
.decls = ArrayList(&ast.Node).init(arena),
.doc_comments = null,
// initialized when we get the eof token
.eof_token = undefined,
}
@ -353,7 +355,7 @@ pub const Parser = struct {
try root_node.decls.append(&line_comment.base);
}
const comments = try self.eatComments(arena);
const comments = try self.eatDocComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Keyword_test => {
@ -362,7 +364,6 @@ pub const Parser = struct {
const block = try arena.construct(ast.Node.Block {
.base = ast.Node {
.id = ast.Node.Id.Block,
.doc_comments = null,
.same_line_comment = null,
},
.label = null,
@ -373,9 +374,9 @@ pub const Parser = struct {
const test_node = try arena.construct(ast.Node.TestDecl {
.base = ast.Node {
.id = ast.Node.Id.TestDecl,
.doc_comments = comments,
.same_line_comment = null,
},
.doc_comments = comments,
.test_token = token,
.name = undefined,
.body_node = &block.base,
@ -393,7 +394,11 @@ pub const Parser = struct {
},
Token.Id.Eof => {
root_node.eof_token = token;
return Tree {.root_node = root_node, .arena_allocator = arena_allocator};
root_node.doc_comments = comments;
return Tree {
.root_node = root_node,
.arena_allocator = arena_allocator,
};
},
Token.Id.Keyword_pub => {
stack.append(State.TopLevel) catch unreachable;
@ -423,6 +428,7 @@ pub const Parser = struct {
.base = undefined,
.comptime_token = token,
.expr = &block.base,
.doc_comments = comments,
}
);
stack.append(State.TopLevel) catch unreachable;
@ -519,6 +525,7 @@ pub const Parser = struct {
.visib_token = ctx.visib_token,
.expr = undefined,
.semicolon_token = undefined,
.doc_comments = ctx.comments,
}
);
stack.append(State {
@ -555,9 +562,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
.doc_comments = ctx.comments,
.same_line_comment = null,
},
.doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.name_token = null,
.fn_token = undefined,
@ -624,9 +631,9 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.StructField {
.base = ast.Node {
.id = ast.Node.Id.StructField,
.doc_comments = null,
.same_line_comment = null,
},
.doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.name_token = identifier,
.type_expr = undefined,
@ -653,6 +660,15 @@ pub const Parser = struct {
continue;
},
State.FieldInitValue => |ctx| {
const eq_tok = self.getNextToken();
if (eq_tok.id != Token.Id.Equal) {
self.putBackToken(eq_tok);
continue;
}
stack.append(State { .Expression = ctx }) catch unreachable;
continue;
},
State.ContainerKind => |ctx| {
const token = self.getNextToken();
@ -699,7 +715,16 @@ pub const Parser = struct {
const init_arg_token = self.getNextToken();
switch (init_arg_token.id) {
Token.Id.Keyword_enum => {
container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg.Enum;
container_decl.init_arg_expr = ast.Node.ContainerDecl.InitArg {.Enum = null};
const lparen_tok = self.getNextToken();
if (lparen_tok.id == Token.Id.LParen) {
try stack.append(State { .ExpectToken = Token.Id.RParen } );
try stack.append(State { .Expression = OptionalCtx {
.RequiredNull = &container_decl.init_arg_expr.Enum,
} });
} else {
self.putBackToken(lparen_tok);
}
},
else => {
self.putBackToken(init_arg_token);
@ -709,12 +734,13 @@ pub const Parser = struct {
}
continue;
},
State.ContainerDecl => |container_decl| {
while (try self.eatLineComment(arena)) |line_comment| {
try container_decl.fields_and_decls.append(&line_comment.base);
}
const comments = try self.eatComments(arena);
const comments = try self.eatDocComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Identifier => {
@ -723,9 +749,9 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.StructField {
.base = ast.Node {
.id = ast.Node.Id.StructField,
.doc_comments = comments,
.same_line_comment = null,
},
.doc_comments = comments,
.visib_token = null,
.name_token = token,
.type_expr = undefined,
@ -744,10 +770,13 @@ pub const Parser = struct {
.base = undefined,
.name_token = token,
.type_expr = null,
.value_expr = null,
.doc_comments = comments,
}
);
stack.append(State { .FieldListCommaOrEnd = container_decl }) catch unreachable;
try stack.append(State { .FieldInitValue = OptionalCtx { .RequiredNull = &node.value_expr } });
try stack.append(State { .TypeExprBegin = OptionalCtx { .RequiredNull = &node.type_expr } });
try stack.append(State { .IfToken = Token.Id.Colon });
continue;
@ -758,6 +787,7 @@ pub const Parser = struct {
.base = undefined,
.name_token = token,
.value = null,
.doc_comments = comments,
}
);
@ -809,6 +839,9 @@ pub const Parser = struct {
continue;
},
Token.Id.RBrace => {
if (comments != null) {
return self.parseError(token, "doc comments must be attached to a node");
}
container_decl.rbrace_token = token;
continue;
},
@ -834,9 +867,9 @@ pub const Parser = struct {
const var_decl = try arena.construct(ast.Node.VarDecl {
.base = ast.Node {
.id = ast.Node.Id.VarDecl,
.doc_comments = ctx.comments,
.same_line_comment = null,
},
.doc_comments = ctx.comments,
.visib_token = ctx.visib_token,
.mut_token = ctx.mut_token,
.comptime_token = ctx.comptime_token,
@ -1093,6 +1126,22 @@ pub const Parser = struct {
}) catch unreachable;
continue;
},
Token.Id.Keyword_suspend => {
const node = try arena.construct(ast.Node.Suspend {
.base = ast.Node {
.id = ast.Node.Id.Suspend,
.same_line_comment = null,
},
.label = ctx.label,
.suspend_token = token,
.payload = null,
.body = null,
});
ctx.opt_ctx.store(&node.base);
stack.append(State { .SuspendBody = node }) catch unreachable;
try stack.append(State { .Payload = OptionalCtx { .Optional = &node.payload } });
continue;
},
Token.Id.Keyword_inline => {
stack.append(State {
.Inline = InlineCtx {
@ -1244,7 +1293,6 @@ pub const Parser = struct {
}
},
State.Statement => |block| {
const comments = try self.eatComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Keyword_comptime => {
@ -1259,7 +1307,7 @@ pub const Parser = struct {
Token.Id.Keyword_var, Token.Id.Keyword_const => {
stack.append(State {
.VarDecl = VarDeclCtx {
.comments = comments,
.comments = null,
.visib_token = null,
.comptime_token = null,
.extern_export_token = null,
@ -1274,7 +1322,6 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.Defer {
.base = ast.Node {
.id = ast.Node.Id.Defer,
.doc_comments = comments,
.same_line_comment = null,
},
.defer_token = token,
@ -1310,23 +1357,18 @@ pub const Parser = struct {
const statement = try block.statements.addOne();
stack.append(State { .LookForSameLineComment = statement }) catch unreachable;
try stack.append(State { .Semicolon = statement });
try stack.append(State { .AddComments = AddCommentsCtx {
.node_ptr = statement,
.comments = comments,
}});
try stack.append(State { .AssignmentExpressionBegin = OptionalCtx{ .Required = statement } });
continue;
}
}
},
State.ComptimeStatement => |ctx| {
const comments = try self.eatComments(arena);
const token = self.getNextToken();
switch (token.id) {
Token.Id.Keyword_var, Token.Id.Keyword_const => {
stack.append(State {
.VarDecl = VarDeclCtx {
.comments = comments,
.comments = null,
.visib_token = null,
.comptime_token = ctx.comptime_token,
.extern_export_token = null,
@ -1340,9 +1382,10 @@ pub const Parser = struct {
else => {
self.putBackToken(token);
self.putBackToken(ctx.comptime_token);
const statememt = try ctx.block.statements.addOne();
stack.append(State { .Semicolon = statememt }) catch unreachable;
try stack.append(State { .Expression = OptionalCtx { .Required = statememt } });
const statement = try ctx.block.statements.addOne();
stack.append(State { .LookForSameLineComment = statement }) catch unreachable;
try stack.append(State { .Semicolon = statement });
try stack.append(State { .Expression = OptionalCtx { .Required = statement } });
continue;
}
}
@ -1356,12 +1399,6 @@ pub const Parser = struct {
continue;
},
State.AddComments => |add_comments_ctx| {
const node = *add_comments_ctx.node_ptr;
node.doc_comments = add_comments_ctx.comments;
continue;
},
State.LookForSameLineComment => |node_ptr| {
try self.lookForSameLineComment(arena, *node_ptr);
continue;
@ -1474,6 +1511,10 @@ pub const Parser = struct {
}
},
State.FieldInitListItemOrEnd => |list_state| {
while (try self.eatLineComment(arena)) |line_comment| {
try list_state.list.append(&line_comment.base);
}
if (self.eatToken(Token.Id.RBrace)) |rbrace| {
*list_state.ptr = rbrace;
continue;
@ -1482,14 +1523,13 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.FieldInitializer {
.base = ast.Node {
.id = ast.Node.Id.FieldInitializer,
.doc_comments = null,
.same_line_comment = null,
},
.period_token = undefined,
.name_token = undefined,
.expr = undefined,
});
try list_state.list.append(node);
try list_state.list.append(&node.base);
stack.append(State { .FieldInitListCommaOrEnd = list_state }) catch unreachable;
try stack.append(State { .Expression = OptionalCtx{ .Required = &node.expr } });
@ -1527,7 +1567,7 @@ pub const Parser = struct {
try stack.append(State { .ContainerDecl = container_decl });
continue;
},
State.IdentifierListItemOrEnd => |list_state| {
State.ErrorTagListItemOrEnd => |list_state| {
while (try self.eatLineComment(arena)) |line_comment| {
try list_state.list.append(&line_comment.base);
}
@ -1537,23 +1577,18 @@ pub const Parser = struct {
continue;
}
const comments = try self.eatComments(arena);
const node_ptr = try list_state.list.addOne();
try stack.append(State { .AddComments = AddCommentsCtx {
.node_ptr = node_ptr,
.comments = comments,
}});
try stack.append(State { .IdentifierListCommaOrEnd = list_state });
try stack.append(State { .Identifier = OptionalCtx { .Required = node_ptr } });
try stack.append(State { .ErrorTagListCommaOrEnd = list_state });
try stack.append(State { .ErrorTag = node_ptr });
continue;
},
State.IdentifierListCommaOrEnd => |list_state| {
State.ErrorTagListCommaOrEnd => |list_state| {
if (try self.expectCommaOrEnd(Token.Id.RBrace)) |end| {
*list_state.ptr = end;
continue;
} else {
stack.append(State { .IdentifierListItemOrEnd = list_state }) catch unreachable;
stack.append(State { .ErrorTagListItemOrEnd = list_state }) catch unreachable;
continue;
}
},
@ -1567,11 +1602,10 @@ pub const Parser = struct {
continue;
}
const comments = try self.eatComments(arena);
const comments = try self.eatDocComments(arena);
const node = try arena.construct(ast.Node.SwitchCase {
.base = ast.Node {
.id = ast.Node.Id.SwitchCase,
.doc_comments = comments,
.same_line_comment = null,
},
.items = ArrayList(&ast.Node).init(arena),
@ -1684,9 +1718,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
.doc_comments = ctx.comments,
.same_line_comment = null,
},
.doc_comments = ctx.comments,
.visib_token = null,
.name_token = null,
.fn_token = fn_token,
@ -1857,12 +1891,13 @@ pub const Parser = struct {
}
);
stack.append(State {
stack.append(State {.LookForSameLineCommentDirect = &node.base }) catch unreachable;
try stack.append(State {
.ExpectTokenSave = ExpectTokenSave {
.id = Token.Id.Pipe,
.ptr = &node.rpipe,
}
}) catch unreachable;
});
try stack.append(State { .Identifier = OptionalCtx { .Required = &node.value_symbol } });
try stack.append(State {
.OptionalTokenSave = OptionalTokenSave {
@ -2317,7 +2352,7 @@ pub const Parser = struct {
.base = undefined,
.lhs = lhs,
.op = ast.Node.SuffixOp.Op {
.StructInitializer = ArrayList(&ast.Node.FieldInitializer).init(arena),
.StructInitializer = ArrayList(&ast.Node).init(arena),
},
.rtoken = undefined,
}
@ -2325,7 +2360,7 @@ pub const Parser = struct {
stack.append(State { .CurlySuffixExpressionEnd = opt_ctx.toRequired() }) catch unreachable;
try stack.append(State { .IfToken = Token.Id.LBrace });
try stack.append(State {
.FieldInitListItemOrEnd = ListSave(&ast.Node.FieldInitializer) {
.FieldInitListItemOrEnd = ListSave(&ast.Node) {
.list = &node.op.StructInitializer,
.ptr = &node.rtoken,
}
@ -2550,6 +2585,29 @@ pub const Parser = struct {
_ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.Unreachable, token);
continue;
},
Token.Id.Keyword_promise => {
const node = try arena.construct(ast.Node.PromiseType {
.base = ast.Node {
.id = ast.Node.Id.PromiseType,
.same_line_comment = null,
},
.promise_token = token,
.result = null,
});
opt_ctx.store(&node.base);
const next_token = self.getNextToken();
if (next_token.id != Token.Id.Arrow) {
self.putBackToken(next_token);
continue;
}
node.result = ast.Node.PromiseType.Result {
.arrow_token = next_token,
.return_type = undefined,
};
const return_type_ptr = &((??node.result).return_type);
try stack.append(State { .Expression = OptionalCtx { .Required = return_type_ptr, } });
continue;
},
Token.Id.StringLiteral, Token.Id.MultilineStringLiteralLine => {
opt_ctx.store((try self.parseStringLiteral(arena, token)) ?? unreachable);
continue;
@ -2656,9 +2714,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
.doc_comments = null,
.same_line_comment = null,
},
.doc_comments = null,
.visib_token = null,
.name_token = null,
.fn_token = token,
@ -2680,9 +2738,9 @@ pub const Parser = struct {
const fn_proto = try arena.construct(ast.Node.FnProto {
.base = ast.Node {
.id = ast.Node.Id.FnProto,
.doc_comments = null,
.same_line_comment = null,
},
.doc_comments = null,
.visib_token = null,
.name_token = null,
.fn_token = undefined,
@ -2773,7 +2831,6 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.ErrorSetDecl {
.base = ast.Node {
.id = ast.Node.Id.ErrorSetDecl,
.doc_comments = null,
.same_line_comment = null,
},
.error_token = ctx.error_token,
@ -2783,7 +2840,7 @@ pub const Parser = struct {
ctx.opt_ctx.store(&node.base);
stack.append(State {
.IdentifierListItemOrEnd = ListSave(&ast.Node) {
.ErrorTagListItemOrEnd = ListSave(&ast.Node) {
.list = &node.decls,
.ptr = &node.rbrace_token,
}
@ -2803,6 +2860,7 @@ pub const Parser = struct {
}
);
},
State.Identifier => |opt_ctx| {
if (self.eatToken(Token.Id.Identifier)) |ident_token| {
_ = try self.createToCtxLiteral(arena, opt_ctx, ast.Node.Identifier, ident_token);
@ -2815,6 +2873,25 @@ pub const Parser = struct {
}
},
State.ErrorTag => |node_ptr| {
const comments = try self.eatDocComments(arena);
const ident_token = self.getNextToken();
if (ident_token.id != Token.Id.Identifier) {
return self.parseError(ident_token, "expected {}, found {}",
@tagName(Token.Id.Identifier), @tagName(ident_token.id));
}
const node = try arena.construct(ast.Node.ErrorTag {
.base = ast.Node {
.id = ast.Node.Id.ErrorTag,
.same_line_comment = null,
},
.doc_comments = comments,
.name_token = ident_token,
});
*node_ptr = &node.base;
continue;
},
State.ExpectToken => |token_id| {
_ = try self.expectToken(token_id);
@ -2853,7 +2930,7 @@ pub const Parser = struct {
}
}
fn eatComments(self: &Parser, arena: &mem.Allocator) !?&ast.Node.DocComment {
fn eatDocComments(self: &Parser, arena: &mem.Allocator) !?&ast.Node.DocComment {
var result: ?&ast.Node.DocComment = null;
while (true) {
if (self.eatToken(Token.Id.DocComment)) |line_comment| {
@ -2864,7 +2941,6 @@ pub const Parser = struct {
const comment_node = try arena.construct(ast.Node.DocComment {
.base = ast.Node {
.id = ast.Node.Id.DocComment,
.doc_comments = null,
.same_line_comment = null,
},
.lines = ArrayList(Token).init(arena),
@ -2886,7 +2962,6 @@ pub const Parser = struct {
return try arena.construct(ast.Node.LineComment {
.base = ast.Node {
.id = ast.Node.Id.LineComment,
.doc_comments = null,
.same_line_comment = null,
},
.token = token,
@ -3022,6 +3097,7 @@ pub const Parser = struct {
const node = try self.createToCtxNode(arena, ctx, ast.Node.Suspend,
ast.Node.Suspend {
.base = undefined,
.label = null,
.suspend_token = *token,
.payload = null,
.body = null,
@ -3047,6 +3123,7 @@ pub const Parser = struct {
stack.append(State { .Else = &node.@"else" }) catch unreachable;
try stack.append(State { .Expression = OptionalCtx { .Required = &node.body } });
try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
try stack.append(State { .LookForSameLineComment = &node.condition });
try stack.append(State { .ExpectToken = Token.Id.RParen });
try stack.append(State { .Expression = OptionalCtx { .Required = &node.condition } });
try stack.append(State { .ExpectToken = Token.Id.LParen });
@ -3078,7 +3155,6 @@ pub const Parser = struct {
const node = try arena.construct(ast.Node.Switch {
.base = ast.Node {
.id = ast.Node.Id.Switch,
.doc_comments = null,
.same_line_comment = null,
},
.switch_token = *token,
@ -3106,6 +3182,7 @@ pub const Parser = struct {
.base = undefined,
.comptime_token = *token,
.expr = undefined,
.doc_comments = null,
}
);
try stack.append(State { .Expression = OptionalCtx { .Required = &node.expr } });
@ -3248,7 +3325,6 @@ pub const Parser = struct {
const id = ast.Node.typeToId(T);
break :blk ast.Node {
.id = id,
.doc_comments = null,
.same_line_comment = null,
};
};
@ -3383,11 +3459,10 @@ pub const Parser = struct {
Expression: &ast.Node,
VarDecl: &ast.Node.VarDecl,
Statement: &ast.Node,
FieldInitializer: &ast.Node.FieldInitializer,
PrintIndent,
Indent: usize,
PrintSameLineComment: ?&Token,
PrintComments: &ast.Node,
PrintLineComment: &Token,
};
pub fn renderSource(self: &Parser, stream: var, root_node: &ast.Node.Root) !void {
@ -3426,7 +3501,7 @@ pub const Parser = struct {
switch (decl.id) {
ast.Node.Id.FnProto => {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
try self.renderComments(stream, &fn_proto.base, indent);
try self.renderComments(stream, fn_proto, indent);
if (fn_proto.body_node) |body_node| {
stack.append(RenderState { .Expression = body_node}) catch unreachable;
@ -3448,12 +3523,12 @@ pub const Parser = struct {
},
ast.Node.Id.VarDecl => {
const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
try self.renderComments(stream, &var_decl.base, indent);
try self.renderComments(stream, var_decl, indent);
try stack.append(RenderState { .VarDecl = var_decl});
},
ast.Node.Id.TestDecl => {
const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl);
try self.renderComments(stream, &test_decl.base, indent);
try self.renderComments(stream, test_decl, indent);
try stream.print("test ");
try stack.append(RenderState { .Expression = test_decl.body_node });
try stack.append(RenderState { .Text = " " });
@ -3461,6 +3536,7 @@ pub const Parser = struct {
},
ast.Node.Id.StructField => {
const field = @fieldParentPtr(ast.Node.StructField, "base", decl);
try self.renderComments(stream, field, indent);
if (field.visib_token) |visib_token| {
try stream.print("{} ", self.tokenizer.getTokenSlice(visib_token));
}
@ -3470,9 +3546,16 @@ pub const Parser = struct {
},
ast.Node.Id.UnionTag => {
const tag = @fieldParentPtr(ast.Node.UnionTag, "base", decl);
try self.renderComments(stream, tag, indent);
try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
try stack.append(RenderState { .Text = "," });
if (tag.value_expr) |value_expr| {
try stack.append(RenderState { .Expression = value_expr });
try stack.append(RenderState { .Text = " = " });
}
if (tag.type_expr) |type_expr| {
try stream.print(": ");
try stack.append(RenderState { .Expression = type_expr});
@ -3480,6 +3563,7 @@ pub const Parser = struct {
},
ast.Node.Id.EnumTag => {
const tag = @fieldParentPtr(ast.Node.EnumTag, "base", decl);
try self.renderComments(stream, tag, indent);
try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
try stack.append(RenderState { .Text = "," });
@ -3488,6 +3572,11 @@ pub const Parser = struct {
try stack.append(RenderState { .Expression = value});
}
},
ast.Node.Id.ErrorTag => {
const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", decl);
try self.renderComments(stream, tag, indent);
try stream.print("{}", self.tokenizer.getTokenSlice(tag.name_token));
},
ast.Node.Id.Comptime => {
if (requireSemiColon(decl)) {
try stack.append(RenderState { .Text = ";" });
@ -3502,17 +3591,12 @@ pub const Parser = struct {
}
},
RenderState.FieldInitializer => |field_init| {
try stream.print(".{}", self.tokenizer.getTokenSlice(field_init.name_token));
try stream.print(" = ");
try stack.append(RenderState { .Expression = field_init.expr });
},
RenderState.VarDecl => |var_decl| {
try stack.append(RenderState { .Text = ";" });
if (var_decl.init_node) |init_node| {
try stack.append(RenderState { .Expression = init_node });
try stack.append(RenderState { .Text = " = " });
const text = if (init_node.id == ast.Node.Id.MultilineStringLiteral) " =" else " = ";
try stack.append(RenderState { .Text = text });
}
if (var_decl.align_node) |align_node| {
try stack.append(RenderState { .Text = ")" });
@ -3630,6 +3714,9 @@ pub const Parser = struct {
},
ast.Node.Id.Suspend => {
const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
if (suspend_node.label) |label| {
try stream.print("{}: ", self.tokenizer.getTokenSlice(label));
}
try stream.print("{}", self.tokenizer.getTokenSlice(suspend_node.suspend_token));
if (suspend_node.body) |body| {
@ -3803,19 +3890,41 @@ pub const Parser = struct {
try stack.append(RenderState { .Expression = suffix_op.lhs });
continue;
}
if (field_inits.len == 1) {
const field_init = field_inits.at(0);
try stack.append(RenderState { .Text = " }" });
try stack.append(RenderState { .Expression = field_init });
try stack.append(RenderState { .Text = "{ " });
try stack.append(RenderState { .Expression = suffix_op.lhs });
continue;
}
try stack.append(RenderState { .Text = "}"});
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState { .Indent = indent });
try stack.append(RenderState { .Text = "\n" });
var i = field_inits.len;
while (i != 0) {
i -= 1;
const field_init = field_inits.at(i);
try stack.append(RenderState { .Text = ",\n" });
try stack.append(RenderState { .FieldInitializer = field_init });
if (field_init.id != ast.Node.Id.LineComment) {
try stack.append(RenderState { .Text = "," });
}
try stack.append(RenderState { .Expression = field_init });
try stack.append(RenderState.PrintIndent);
if (i != 0) {
try stack.append(RenderState { .Text = blk: {
const prev_node = field_inits.at(i - 1);
const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, field_init.firstToken());
if (loc.line >= 2) {
break :blk "\n\n";
}
break :blk "\n";
}});
}
}
try stack.append(RenderState { .Indent = indent + indent_delta });
try stack.append(RenderState { .Text = " {\n"});
try stack.append(RenderState { .Text = "{\n"});
try stack.append(RenderState { .Expression = suffix_op.lhs });
},
ast.Node.SuffixOp.Op.ArrayInitializer => |exprs| {
@ -3829,7 +3938,7 @@ pub const Parser = struct {
try stack.append(RenderState { .Text = "}" });
try stack.append(RenderState { .Expression = expr });
try stack.append(RenderState { .Text = " {" });
try stack.append(RenderState { .Text = "{" });
try stack.append(RenderState { .Expression = suffix_op.lhs });
continue;
}
@ -3846,7 +3955,7 @@ pub const Parser = struct {
try stack.append(RenderState.PrintIndent);
}
try stack.append(RenderState { .Indent = indent + indent_delta });
try stack.append(RenderState { .Text = " {\n"});
try stack.append(RenderState { .Text = "{\n"});
try stack.append(RenderState { .Expression = suffix_op.lhs });
},
}
@ -4014,7 +4123,15 @@ pub const Parser = struct {
switch (container_decl.init_arg_expr) {
ast.Node.ContainerDecl.InitArg.None => try stack.append(RenderState { .Text = " "}),
ast.Node.ContainerDecl.InitArg.Enum => try stack.append(RenderState { .Text = "(enum) "}),
ast.Node.ContainerDecl.InitArg.Enum => |enum_tag_type| {
if (enum_tag_type) |expr| {
try stack.append(RenderState { .Text = ")) "});
try stack.append(RenderState { .Expression = expr});
try stack.append(RenderState { .Text = "(enum("});
} else {
try stack.append(RenderState { .Text = "(enum) "});
}
},
ast.Node.ContainerDecl.InitArg.Type => |type_expr| {
try stack.append(RenderState { .Text = ") "});
try stack.append(RenderState { .Expression = type_expr});
@ -4024,14 +4141,39 @@ pub const Parser = struct {
},
ast.Node.Id.ErrorSetDecl => {
const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base);
try stream.print("error ");
const decls = err_set_decl.decls.toSliceConst();
if (decls.len == 0) {
try stream.write("error{}");
continue;
}
if (decls.len == 1) blk: {
const node = decls[0];
// if there are any doc comments or same line comments
// don't try to put it all on one line
if (node.same_line_comment != null) break :blk;
if (node.cast(ast.Node.ErrorTag)) |tag| {
if (tag.doc_comments != null) break :blk;
} else {
break :blk;
}
try stream.write("error{");
try stack.append(RenderState { .Text = "}" });
try stack.append(RenderState { .TopLevelDecl = node });
continue;
}
try stream.write("error{");
try stack.append(RenderState { .Text = "}"});
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState { .Indent = indent });
try stack.append(RenderState { .Text = "\n"});
const decls = err_set_decl.decls.toSliceConst();
var i = decls.len;
while (i != 0) {
i -= 1;
@ -4039,8 +4181,7 @@ pub const Parser = struct {
if (node.id != ast.Node.Id.LineComment) {
try stack.append(RenderState { .Text = "," });
}
try stack.append(RenderState { .Expression = node });
try stack.append(RenderState { .PrintComments = node });
try stack.append(RenderState { .TopLevelDecl = node });
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState {
.Text = blk: {
@ -4056,7 +4197,6 @@ pub const Parser = struct {
});
}
try stack.append(RenderState { .Indent = indent + indent_delta});
try stack.append(RenderState { .Text = "{"});
},
ast.Node.Id.MultilineStringLiteral => {
const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
@ -4068,7 +4208,7 @@ pub const Parser = struct {
try stream.writeByteNTimes(' ', indent + indent_delta);
try stream.print("{}", self.tokenizer.getTokenSlice(t));
}
try stream.writeByteNTimes(' ', indent + indent_delta);
try stream.writeByteNTimes(' ', indent);
},
ast.Node.Id.UndefinedLiteral => {
const undefined_literal = @fieldParentPtr(ast.Node.UndefinedLiteral, "base", base);
@ -4151,6 +4291,14 @@ pub const Parser = struct {
try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(visib_token) });
}
},
ast.Node.Id.PromiseType => {
const promise_type = @fieldParentPtr(ast.Node.PromiseType, "base", base);
try stream.write(self.tokenizer.getTokenSlice(promise_type.promise_token));
if (promise_type.result) |result| {
try stream.write(self.tokenizer.getTokenSlice(result.arrow_token));
try stack.append(RenderState { .Expression = result.return_type});
}
},
ast.Node.Id.LineComment => {
const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", base);
try stream.write(self.tokenizer.getTokenSlice(line_comment_node.token));
@ -4158,14 +4306,21 @@ pub const Parser = struct {
ast.Node.Id.DocComment => unreachable, // doc comments are attached to nodes
ast.Node.Id.Switch => {
const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
const cases = switch_node.cases.toSliceConst();
try stream.print("{} (", self.tokenizer.getTokenSlice(switch_node.switch_token));
if (cases.len == 0) {
try stack.append(RenderState { .Text = ") {}"});
try stack.append(RenderState { .Expression = switch_node.expr });
continue;
}
try stack.append(RenderState { .Text = "}"});
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState { .Indent = indent });
try stack.append(RenderState { .Text = "\n"});
const cases = switch_node.cases.toSliceConst();
var i = cases.len;
while (i != 0) {
i -= 1;
@ -4192,8 +4347,6 @@ pub const Parser = struct {
ast.Node.Id.SwitchCase => {
const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base);
try self.renderComments(stream, base, indent);
try stack.append(RenderState { .PrintSameLineComment = base.same_line_comment });
try stack.append(RenderState { .Text = "," });
try stack.append(RenderState { .Expression = switch_case.expr });
@ -4372,7 +4525,18 @@ pub const Parser = struct {
}
}
try stack.append(RenderState { .Expression = if_node.body });
if (if_node.condition.same_line_comment) |comment| {
try stack.append(RenderState { .Indent = indent });
try stack.append(RenderState { .Expression = if_node.body });
try stack.append(RenderState.PrintIndent);
try stack.append(RenderState { .Indent = indent + indent_delta });
try stack.append(RenderState { .Text = "\n" });
try stack.append(RenderState { .PrintLineComment = comment });
} else {
try stack.append(RenderState { .Expression = if_node.body });
}
try stack.append(RenderState { .Text = " " });
if (if_node.payload) |payload| {
@ -4505,6 +4669,7 @@ pub const Parser = struct {
ast.Node.Id.StructField,
ast.Node.Id.UnionTag,
ast.Node.Id.EnumTag,
ast.Node.Id.ErrorTag,
ast.Node.Id.Root,
ast.Node.Id.VarDecl,
ast.Node.Id.Use,
@ -4512,7 +4677,6 @@ pub const Parser = struct {
ast.Node.Id.ParamDecl => unreachable,
},
RenderState.Statement => |base| {
try self.renderComments(stream, base, indent);
try stack.append(RenderState { .PrintSameLineComment = base.same_line_comment } );
switch (base.id) {
ast.Node.Id.VarDecl => {
@ -4533,15 +4697,14 @@ pub const Parser = struct {
const comment_token = maybe_comment ?? break :blk;
try stream.print(" {}", self.tokenizer.getTokenSlice(comment_token));
},
RenderState.PrintComments => |node| blk: {
try self.renderComments(stream, node, indent);
RenderState.PrintLineComment => |comment_token| {
try stream.write(self.tokenizer.getTokenSlice(comment_token));
},
}
}
}
fn renderComments(self: &Parser, stream: var, node: &ast.Node, indent: usize) !void {
fn renderComments(self: &Parser, stream: var, node: var, indent: usize) !void {
const comment = node.doc_comments ?? return;
for (comment.lines.toSliceConst()) |line_token| {
try stream.print("{}\n", self.tokenizer.getTokenSlice(line_token));

View File

@ -1,6 +1,129 @@
test "zig fmt: same-line comment after non-block if expression" {
try testCanonical(
\\comptime {
\\ if (sr > n_uword_bits - 1) {
\\ // d > r
\\ return 0;
\\ }
\\}
\\
);
}
test "zig fmt: switch with empty body" {
try testCanonical(
\\test "" {
\\ foo() catch |err| switch (err) {};
\\}
\\
);
}
test "zig fmt: same-line comment on comptime expression" {
try testCanonical(
\\test "" {
\\ comptime assert(@typeId(T) == builtin.TypeId.Int); // must pass an integer to absInt
\\}
\\
);
}
test "zig fmt: float literal with exponent" {
try testCanonical(
\\pub const f64_true_min = 4.94065645841246544177e-324;
\\
);
}
test "zig fmt: line comments in struct initializer" {
try testCanonical(
\\fn foo() void {
\\ return Self{
\\ .a = b,
\\
\\ // Initialize these two fields to buffer_size so that
\\ // in `readFn` we treat the state as being able to read
\\ .start_index = buffer_size,
\\ .end_index = buffer_size,
\\
\\ // middle
\\
\\ .a = b,
\\
\\ // end
\\ };
\\}
\\
);
}
test "zig fmt: doc comments before struct field" {
try testCanonical(
\\pub const Allocator = struct {
\\ /// Allocate byte_count bytes and return them in a slice, with the
\\ /// slice's pointer aligned at least to alignment bytes.
\\ allocFn: fn() void,
\\};
\\
);
}
test "zig fmt: error set declaration" {
try testCanonical(
\\const E = error{
\\ A,
\\ B,
\\
\\ C,
\\};
\\
\\const Error = error{
\\ /// no more memory
\\ OutOfMemory,
\\};
\\
\\const Error = error{
\\ /// no more memory
\\ OutOfMemory,
\\
\\ /// another
\\ Another,
\\
\\ // end
\\};
\\
\\const Error = error{OutOfMemory};
\\const Error = error{};
\\
);
}
test "zig fmt: union(enum(u32)) with assigned enum values" {
try testCanonical(
\\const MultipleChoice = union(enum(u32)) {
\\ A = 20,
\\ B = 40,
\\ C = 60,
\\ D = 1000,
\\};
\\
);
}
test "zig fmt: labeled suspend" {
try testCanonical(
\\fn foo() void {
\\ s: suspend |p| {
\\ break :s;
\\ }
\\}
\\
);
}
test "zig fmt: comments before error set decl" {
try testCanonical(
\\const UnexpectedError = error {
\\const UnexpectedError = error{
\\ /// The Operating System returned an undocumented error code.
\\ Unexpected,
\\ // another
@ -92,7 +215,7 @@ test "zig fmt: same-line comment after field decl" {
test "zig fmt: array literal with 1 item on 1 line" {
try testCanonical(
\\var s = []const u64 {0} ** 25;
\\var s = []const u64{0} ** 25;
\\
);
}
@ -117,7 +240,7 @@ test "zig fmt: comments before global variables" {
);
}
test "zig fmt: comments before statements" {
test "zig fmt: comments in statements" {
try testCanonical(
\\test "std" {
\\ // statement comment
@ -147,22 +270,6 @@ test "zig fmt: comments before test decl" {
);
}
test "zig fmt: comments before variable declarations" {
try testCanonical(
\\const std = @import("std");
\\
\\pub fn main() !void {
\\ /// If this program is run without stdout attached, exit with an error.
\\ /// another comment
\\ var stdout_file = try std.io.getStdOut;
\\ // If this program is run without stdout attached, exit with an error.
\\ // another comment
\\ var stdout_file = try std.io.getStdOut;
\\}
\\
);
}
test "zig fmt: preserve spacing" {
try testCanonical(
\\const std = @import("std");
@ -423,10 +530,18 @@ test "zig fmt: functions" {
test "zig fmt: multiline string" {
try testCanonical(
\\const s =
\\ \\ something
\\ \\ something else
\\test "" {
\\ const s1 =
\\ \\one
\\ \\two)
\\ \\three
\\ ;
\\ const s2 =
\\ c\\one
\\ c\\two)
\\ c\\three
\\ ;
\\}
\\
);
}
@ -570,26 +685,14 @@ test "zig fmt: union declaration" {
);
}
test "zig fmt: error set declaration" {
try testCanonical(
\\const E = error {
\\ A,
\\ B,
\\
\\ C,
\\};
\\
);
}
test "zig fmt: arrays" {
try testCanonical(
\\test "test array" {
\\ const a: [2]u8 = [2]u8 {
\\ const a: [2]u8 = [2]u8{
\\ 1,
\\ 2,
\\ };
\\ const a: [2]u8 = []u8 {
\\ const a: [2]u8 = []u8{
\\ 1,
\\ 2,
\\ };
@ -601,15 +704,17 @@ test "zig fmt: arrays" {
test "zig fmt: container initializers" {
try testCanonical(
\\const a1 = []u8{};
\\const a2 = []u8 {
\\const a0 = []u8{};
\\const a1 = []u8{1};
\\const a2 = []u8{
\\ 1,
\\ 2,
\\ 3,
\\ 4,
\\};
\\const s1 = S{};
\\const s2 = S {
\\const s0 = S{};
\\const s1 = S{ .a = 1 };
\\const s2 = S{
\\ .a = 1,
\\ .b = 2,
\\};
@ -678,9 +783,6 @@ test "zig fmt: switch" {
\\ Float: f64,
\\ };
\\
\\ const u = Union {
\\ .Int = 0,
\\ };
\\ switch (u) {
\\ Union.Int => |int| {},
\\ Union.Float => |*float| unreachable,
@ -759,11 +861,6 @@ test "zig fmt: while" {
test "zig fmt: for" {
try testCanonical(
\\test "for" {
\\ const a = []u8 {
\\ 1,
\\ 2,
\\ 3,
\\ };
\\ for (a) |v| {
\\ continue;
\\ }
@ -940,12 +1037,12 @@ test "zig fmt: coroutines" {
\\ suspend;
\\ x += 1;
\\ suspend |p| {}
\\ const p = async simpleAsyncFn() catch unreachable;
\\ const p: promise->void = async simpleAsyncFn() catch unreachable;
\\ await p;
\\}
\\
\\test "coroutine suspend, resume, cancel" {
\\ const p = try async<std.debug.global_allocator> testAsyncSeq();
\\ const p: promise = try async<std.debug.global_allocator> testAsyncSeq();
\\ resume p;
\\ cancel p;
\\}
@ -994,15 +1091,6 @@ test "zig fmt: error return" {
);
}
test "zig fmt: struct literals with fields on each line" {
try testCanonical(
\\var self = BufSet {
\\ .hash_map = BufSetHashMap.init(a),
\\};
\\
);
}
const std = @import("std");
const mem = std.mem;
const warn = std.debug.warn;
@ -1028,15 +1116,15 @@ fn testParse(source: []const u8, allocator: &mem.Allocator) ![]u8 {
return buffer.toOwnedSlice();
}
fn testCanonical(source: []const u8) !void {
fn testTransform(source: []const u8, expected_source: []const u8) !void {
const needed_alloc_count = x: {
// Try it once with unlimited memory, make sure it works
var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
var failing_allocator = std.debug.FailingAllocator.init(&fixed_allocator.allocator, @maxValue(usize));
const result_source = try testParse(source, &failing_allocator.allocator);
if (!mem.eql(u8, result_source, source)) {
if (!mem.eql(u8, result_source, expected_source)) {
warn("\n====== expected this output: =========\n");
warn("{}", source);
warn("{}", expected_source);
warn("\n======== instead found this: =========\n");
warn("{}", result_source);
warn("\n======================================\n");
@ -1067,3 +1155,7 @@ fn testCanonical(source: []const u8) !void {
}
}
fn testCanonical(source: []const u8) !void {
return testTransform(source, source);
}

View File

@ -6,59 +6,60 @@ pub const Token = struct {
start: usize,
end: usize,
const KeywordId = struct {
const Keyword = struct {
bytes: []const u8,
id: Id,
};
const keywords = []KeywordId {
KeywordId{.bytes="align", .id = Id.Keyword_align},
KeywordId{.bytes="and", .id = Id.Keyword_and},
KeywordId{.bytes="asm", .id = Id.Keyword_asm},
KeywordId{.bytes="async", .id = Id.Keyword_async},
KeywordId{.bytes="await", .id = Id.Keyword_await},
KeywordId{.bytes="break", .id = Id.Keyword_break},
KeywordId{.bytes="catch", .id = Id.Keyword_catch},
KeywordId{.bytes="cancel", .id = Id.Keyword_cancel},
KeywordId{.bytes="comptime", .id = Id.Keyword_comptime},
KeywordId{.bytes="const", .id = Id.Keyword_const},
KeywordId{.bytes="continue", .id = Id.Keyword_continue},
KeywordId{.bytes="defer", .id = Id.Keyword_defer},
KeywordId{.bytes="else", .id = Id.Keyword_else},
KeywordId{.bytes="enum", .id = Id.Keyword_enum},
KeywordId{.bytes="errdefer", .id = Id.Keyword_errdefer},
KeywordId{.bytes="error", .id = Id.Keyword_error},
KeywordId{.bytes="export", .id = Id.Keyword_export},
KeywordId{.bytes="extern", .id = Id.Keyword_extern},
KeywordId{.bytes="false", .id = Id.Keyword_false},
KeywordId{.bytes="fn", .id = Id.Keyword_fn},
KeywordId{.bytes="for", .id = Id.Keyword_for},
KeywordId{.bytes="if", .id = Id.Keyword_if},
KeywordId{.bytes="inline", .id = Id.Keyword_inline},
KeywordId{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
KeywordId{.bytes="noalias", .id = Id.Keyword_noalias},
KeywordId{.bytes="null", .id = Id.Keyword_null},
KeywordId{.bytes="or", .id = Id.Keyword_or},
KeywordId{.bytes="packed", .id = Id.Keyword_packed},
KeywordId{.bytes="pub", .id = Id.Keyword_pub},
KeywordId{.bytes="resume", .id = Id.Keyword_resume},
KeywordId{.bytes="return", .id = Id.Keyword_return},
KeywordId{.bytes="section", .id = Id.Keyword_section},
KeywordId{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
KeywordId{.bytes="struct", .id = Id.Keyword_struct},
KeywordId{.bytes="suspend", .id = Id.Keyword_suspend},
KeywordId{.bytes="switch", .id = Id.Keyword_switch},
KeywordId{.bytes="test", .id = Id.Keyword_test},
KeywordId{.bytes="this", .id = Id.Keyword_this},
KeywordId{.bytes="true", .id = Id.Keyword_true},
KeywordId{.bytes="try", .id = Id.Keyword_try},
KeywordId{.bytes="undefined", .id = Id.Keyword_undefined},
KeywordId{.bytes="union", .id = Id.Keyword_union},
KeywordId{.bytes="unreachable", .id = Id.Keyword_unreachable},
KeywordId{.bytes="use", .id = Id.Keyword_use},
KeywordId{.bytes="var", .id = Id.Keyword_var},
KeywordId{.bytes="volatile", .id = Id.Keyword_volatile},
KeywordId{.bytes="while", .id = Id.Keyword_while},
const keywords = []Keyword {
Keyword{.bytes="align", .id = Id.Keyword_align},
Keyword{.bytes="and", .id = Id.Keyword_and},
Keyword{.bytes="asm", .id = Id.Keyword_asm},
Keyword{.bytes="async", .id = Id.Keyword_async},
Keyword{.bytes="await", .id = Id.Keyword_await},
Keyword{.bytes="break", .id = Id.Keyword_break},
Keyword{.bytes="catch", .id = Id.Keyword_catch},
Keyword{.bytes="cancel", .id = Id.Keyword_cancel},
Keyword{.bytes="comptime", .id = Id.Keyword_comptime},
Keyword{.bytes="const", .id = Id.Keyword_const},
Keyword{.bytes="continue", .id = Id.Keyword_continue},
Keyword{.bytes="defer", .id = Id.Keyword_defer},
Keyword{.bytes="else", .id = Id.Keyword_else},
Keyword{.bytes="enum", .id = Id.Keyword_enum},
Keyword{.bytes="errdefer", .id = Id.Keyword_errdefer},
Keyword{.bytes="error", .id = Id.Keyword_error},
Keyword{.bytes="export", .id = Id.Keyword_export},
Keyword{.bytes="extern", .id = Id.Keyword_extern},
Keyword{.bytes="false", .id = Id.Keyword_false},
Keyword{.bytes="fn", .id = Id.Keyword_fn},
Keyword{.bytes="for", .id = Id.Keyword_for},
Keyword{.bytes="if", .id = Id.Keyword_if},
Keyword{.bytes="inline", .id = Id.Keyword_inline},
Keyword{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
Keyword{.bytes="noalias", .id = Id.Keyword_noalias},
Keyword{.bytes="null", .id = Id.Keyword_null},
Keyword{.bytes="or", .id = Id.Keyword_or},
Keyword{.bytes="packed", .id = Id.Keyword_packed},
Keyword{.bytes="promise", .id = Id.Keyword_promise},
Keyword{.bytes="pub", .id = Id.Keyword_pub},
Keyword{.bytes="resume", .id = Id.Keyword_resume},
Keyword{.bytes="return", .id = Id.Keyword_return},
Keyword{.bytes="section", .id = Id.Keyword_section},
Keyword{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
Keyword{.bytes="struct", .id = Id.Keyword_struct},
Keyword{.bytes="suspend", .id = Id.Keyword_suspend},
Keyword{.bytes="switch", .id = Id.Keyword_switch},
Keyword{.bytes="test", .id = Id.Keyword_test},
Keyword{.bytes="this", .id = Id.Keyword_this},
Keyword{.bytes="true", .id = Id.Keyword_true},
Keyword{.bytes="try", .id = Id.Keyword_try},
Keyword{.bytes="undefined", .id = Id.Keyword_undefined},
Keyword{.bytes="union", .id = Id.Keyword_union},
Keyword{.bytes="unreachable", .id = Id.Keyword_unreachable},
Keyword{.bytes="use", .id = Id.Keyword_use},
Keyword{.bytes="var", .id = Id.Keyword_var},
Keyword{.bytes="volatile", .id = Id.Keyword_volatile},
Keyword{.bytes="while", .id = Id.Keyword_while},
};
fn getKeyword(bytes: []const u8) ?Id {
@ -166,6 +167,7 @@ pub const Token = struct {
Keyword_null,
Keyword_or,
Keyword_packed,
Keyword_promise,
Keyword_pub,
Keyword_resume,
Keyword_return,
@ -910,10 +912,10 @@ pub const Tokenizer = struct {
},
},
State.FloatFraction => switch (c) {
'p', 'P' => {
'p', 'P', 'e', 'E' => {
state = State.FloatExponentUnsigned;
},
'0'...'9', 'a'...'f', 'A'...'F' => {},
'0'...'9' => {},
else => break,
},
State.FloatExponentUnsigned => switch (c) {
@ -1106,6 +1108,15 @@ test "tokenizer" {
});
}
test "tokenizer - float literal" {
testTokenize("a = 4.94065645841246544177e-324;\n", []Token.Id {
Token.Id.Identifier,
Token.Id.Equal,
Token.Id.FloatLiteral,
Token.Id.Semicolon,
});
}
test "tokenizer - chars" {
testTokenize("'c'", []Token.Id {Token.Id.CharLiteral});
}

View File

@ -37,6 +37,7 @@ comptime {
_ = @import("cases/pub_enum/index.zig");
_ = @import("cases/ref_var_in_if_after_if_2nd_switch_prong.zig");
_ = @import("cases/reflection.zig");
_ = @import("cases/type_info.zig");
_ = @import("cases/sizeof_and_typeof.zig");
_ = @import("cases/slice.zig");
_ = @import("cases/struct.zig");
@ -53,4 +54,5 @@ comptime {
_ = @import("cases/var_args.zig");
_ = @import("cases/void.zig");
_ = @import("cases/while.zig");
_ = @import("cases/fn_in_struct_in_comptime.zig");
}

View File

@ -219,8 +219,9 @@ async fn printTrace(p: promise->error!void) void {
std.debug.assert(e == error.Fail);
if (@errorReturnTrace()) |trace| {
assert(trace.index == 1);
} else if (builtin.mode != builtin.Mode.ReleaseFast) {
@panic("expected return trace");
} else switch (builtin.mode) {
builtin.Mode.Debug, builtin.Mode.ReleaseSafe => @panic("expected return trace"),
builtin.Mode.ReleaseFast, builtin.Mode.ReleaseSmall => {},
}
};
}

View File

@ -882,3 +882,12 @@ test "enum with 1 field but explicit tag type should still have the tag type" {
};
comptime @import("std").debug.assert(@sizeOf(Enum) == @sizeOf(u8));
}
test "empty extern enum with members" {
const E = extern enum {
A,
B,
C,
};
assert(@sizeOf(E) == @sizeOf(c_int));
}

View File

@ -0,0 +1,17 @@
const assert = @import("std").debug.assert;
fn get_foo() fn(&u8)usize {
comptime {
return struct {
fn func(ptr: &u8) usize {
var u = @ptrToInt(ptr);
return u;
}
}.func;
}
}
test "define a function in an anonymous struct in comptime" {
const foo = get_foo();
assert(foo(@intToPtr(&u8, 12345)) == 12345);
}

View File

@ -335,6 +335,23 @@ test "big number shifting" {
}
}
test "big number multi-limb shift and mask" {
comptime {
var a = 0xefffffffa0000001eeeeeeefaaaaaaab;
assert(u32(a & 0xffffffff) == 0xaaaaaaab);
a >>= 32;
assert(u32(a & 0xffffffff) == 0xeeeeeeef);
a >>= 32;
assert(u32(a & 0xffffffff) == 0xa0000001);
a >>= 32;
assert(u32(a & 0xffffffff) == 0xefffffff);
a >>= 32;
assert(a == 0);
}
}
test "xor" {
test_xor();
comptime test_xor();

200
test/cases/type_info.zig Normal file
View File

@ -0,0 +1,200 @@
const assert = @import("std").debug.assert;
const mem = @import("std").mem;
const TypeInfo = @import("builtin").TypeInfo;
const TypeId = @import("builtin").TypeId;
test "type info: tag type, void info" {
comptime {
assert(@TagType(TypeInfo) == TypeId);
const void_info = @typeInfo(void);
assert(TypeId(void_info) == TypeId.Void);
assert(void_info.Void == {});
}
}
test "type info: integer, floating point type info" {
comptime {
const u8_info = @typeInfo(u8);
assert(TypeId(u8_info) == TypeId.Int);
assert(!u8_info.Int.is_signed);
assert(u8_info.Int.bits == 8);
const f64_info = @typeInfo(f64);
assert(TypeId(f64_info) == TypeId.Float);
assert(f64_info.Float.bits == 64);
}
}
test "type info: pointer type info" {
comptime {
const u32_ptr_info = @typeInfo(&u32);
assert(TypeId(u32_ptr_info) == TypeId.Pointer);
assert(u32_ptr_info.Pointer.is_const == false);
assert(u32_ptr_info.Pointer.is_volatile == false);
assert(u32_ptr_info.Pointer.alignment == 4);
assert(u32_ptr_info.Pointer.child == u32);
}
}
test "type info: slice type info" {
comptime {
const u32_slice_info = @typeInfo([]u32);
assert(TypeId(u32_slice_info) == TypeId.Slice);
assert(u32_slice_info.Slice.is_const == false);
assert(u32_slice_info.Slice.is_volatile == false);
assert(u32_slice_info.Slice.alignment == 4);
assert(u32_slice_info.Slice.child == u32);
}
}
test "type info: array type info" {
comptime {
const arr_info = @typeInfo([42]bool);
assert(TypeId(arr_info) == TypeId.Array);
assert(arr_info.Array.len == 42);
assert(arr_info.Array.child == bool);
}
}
test "type info: nullable type info" {
comptime {
const null_info = @typeInfo(?void);
assert(TypeId(null_info) == TypeId.Nullable);
assert(null_info.Nullable.child == void);
}
}
test "type info: promise info" {
comptime {
const null_promise_info = @typeInfo(promise);
assert(TypeId(null_promise_info) == TypeId.Promise);
assert(null_promise_info.Promise.child == @typeOf(undefined));
const promise_info = @typeInfo(promise->usize);
assert(TypeId(promise_info) == TypeId.Promise);
assert(promise_info.Promise.child == usize);
}
}
test "type info: error set, error union info" {
comptime {
const TestErrorSet = error {
First,
Second,
Third,
};
const error_set_info = @typeInfo(TestErrorSet);
assert(TypeId(error_set_info) == TypeId.ErrorSet);
assert(error_set_info.ErrorSet.errors.len == 3);
assert(mem.eql(u8, error_set_info.ErrorSet.errors[0].name, "First"));
assert(error_set_info.ErrorSet.errors[2].value == usize(TestErrorSet.Third));
const error_union_info = @typeInfo(TestErrorSet!usize);
assert(TypeId(error_union_info) == TypeId.ErrorUnion);
assert(error_union_info.ErrorUnion.error_set == TestErrorSet);
assert(error_union_info.ErrorUnion.payload == usize);
}
}
test "type info: enum info" {
comptime {
const Os = @import("builtin").Os;
const os_info = @typeInfo(Os);
assert(TypeId(os_info) == TypeId.Enum);
assert(os_info.Enum.layout == TypeInfo.ContainerLayout.Auto);
assert(os_info.Enum.fields.len == 32);
assert(mem.eql(u8, os_info.Enum.fields[1].name, "ananas"));
assert(os_info.Enum.fields[10].value == 10);
assert(os_info.Enum.tag_type == u5);
assert(os_info.Enum.defs.len == 0);
}
}
test "type info: union info" {
comptime {
const typeinfo_info = @typeInfo(TypeInfo);
assert(TypeId(typeinfo_info) == TypeId.Union);
assert(typeinfo_info.Union.layout == TypeInfo.ContainerLayout.Auto);
assert(typeinfo_info.Union.tag_type == TypeId);
assert(typeinfo_info.Union.fields.len == 26);
assert(typeinfo_info.Union.fields[4].enum_field != null);
assert((??typeinfo_info.Union.fields[4].enum_field).value == 4);
assert(typeinfo_info.Union.fields[4].field_type == @typeOf(@typeInfo(u8).Int));
assert(typeinfo_info.Union.defs.len == 21);
const TestNoTagUnion = union {
Foo: void,
Bar: u32,
};
const notag_union_info = @typeInfo(TestNoTagUnion);
assert(TypeId(notag_union_info) == TypeId.Union);
assert(notag_union_info.Union.tag_type == @typeOf(undefined));
assert(notag_union_info.Union.layout == TypeInfo.ContainerLayout.Auto);
assert(notag_union_info.Union.fields.len == 2);
assert(notag_union_info.Union.fields[0].enum_field == null);
assert(notag_union_info.Union.fields[1].field_type == u32);
const TestExternUnion = extern union {
foo: &c_void,
};
const extern_union_info = @typeInfo(TestExternUnion);
assert(extern_union_info.Union.layout == TypeInfo.ContainerLayout.Extern);
assert(extern_union_info.Union.tag_type == @typeOf(undefined));
assert(extern_union_info.Union.fields[0].enum_field == null);
assert(extern_union_info.Union.fields[0].field_type == &c_void);
}
}
test "type info: struct info" {
comptime {
const struct_info = @typeInfo(TestStruct);
assert(TypeId(struct_info) == TypeId.Struct);
assert(struct_info.Struct.layout == TypeInfo.ContainerLayout.Packed);
assert(struct_info.Struct.fields.len == 3);
assert(struct_info.Struct.fields[1].offset == null);
assert(struct_info.Struct.fields[2].field_type == &TestStruct);
assert(struct_info.Struct.defs.len == 2);
assert(struct_info.Struct.defs[0].is_pub);
assert(!struct_info.Struct.defs[0].data.Fn.is_extern);
assert(struct_info.Struct.defs[0].data.Fn.lib_name == null);
assert(struct_info.Struct.defs[0].data.Fn.return_type == void);
assert(struct_info.Struct.defs[0].data.Fn.fn_type == fn(&const TestStruct)void);
}
}
const TestStruct = packed struct {
const Self = this;
fieldA: usize,
fieldB: void,
fieldC: &Self,
pub fn foo(self: &const Self) void {}
};
test "type info: function type info" {
comptime {
const fn_info = @typeInfo(@typeOf(foo));
assert(TypeId(fn_info) == TypeId.Fn);
assert(fn_info.Fn.calling_convention == TypeInfo.CallingConvention.Unspecified);
assert(fn_info.Fn.is_generic);
assert(fn_info.Fn.args.len == 2);
assert(fn_info.Fn.is_var_args);
assert(fn_info.Fn.return_type == @typeOf(undefined));
assert(fn_info.Fn.async_allocator_type == @typeOf(undefined));
const test_instance: TestStruct = undefined;
const bound_fn_info = @typeInfo(@typeOf(test_instance.foo));
assert(TypeId(bound_fn_info) == TypeId.BoundFn);
assert(bound_fn_info.BoundFn.args[0].arg_type == &const TestStruct);
}
}
fn foo(comptime a: usize, b: bool, args: ...) usize {
return 0;
}

View File

@ -48,6 +48,16 @@ test "basic unions" {
assert(foo.float == 12.34);
}
test "comptime union field access" {
comptime {
var foo = Foo { .int = 0 };
assert(foo.int == 0);
foo = Foo { .float = 42.42 };
assert(foo.float == 42.42);
}
}
test "init union with runtime value" {
var foo: Foo = undefined;
@ -275,3 +285,16 @@ const PartialInst = union(enum) {
const PartialInstWithPayload = union(enum) {
Compiled: i32,
};
test "access a member of tagged union with conflicting enum tag name" {
const Bar = union(enum) {
A: A,
B: B,
const A = u8;
const B = void;
};
comptime assert(Bar.A == u8);
}

View File

@ -3210,6 +3210,18 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
,
".tmp_source.zig:5:42: error: zero-bit field 'val' in struct 'Empty' has no offset");
cases.add("invalid union field access in comptime",
\\const Foo = union {
\\ Bar: u8,
\\ Baz: void,
\\};
\\comptime {
\\ var foo = Foo {.Baz = {}};
\\ const bar_val = foo.Bar;
\\}
,
".tmp_source.zig:7:24: error: accessing union field 'Bar' while field 'Baz' is set");
cases.add("getting return type of generic function",
\\fn generic(a: var) void {}
\\comptime {
@ -3225,5 +3237,4 @@ pub fn addCases(cases: &tests.CompileErrorContext) void {
\\}
,
".tmp_source.zig:3:36: error: @ArgType could not resolve the type of arg 0 because 'fn(var)var' is generic");
}

View File

@ -152,7 +152,7 @@ pub fn addPkgTests(b: &build.Builder, test_filter: ?[]const u8, root_src: []cons
const step = b.step(b.fmt("test-{}", name), desc);
for (test_targets) |test_target| {
const is_native = (test_target.os == builtin.os and test_target.arch == builtin.arch);
for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast, Mode.ReleaseSmall}) |mode| {
for ([]bool{false, true}) |link_libc| {
if (link_libc and !is_native) {
// don't assume we have a cross-compiling libc set up
@ -451,7 +451,7 @@ pub const CompareOutputContext = struct {
self.step.dependOn(&run_and_cmp_output.step);
},
Special.None => {
for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast, Mode.ReleaseSmall}) |mode| {
const annotated_case_name = fmt.allocPrint(self.b.allocator, "{} {} ({})",
"compare-output", case.name, @tagName(mode)) catch unreachable;
if (self.test_filter) |filter| {
@ -705,7 +705,7 @@ pub const CompileErrorContext = struct {
pub fn addCase(self: &CompileErrorContext, case: &const TestCase) void {
const b = self.b;
for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
for ([]Mode{Mode.Debug, Mode.ReleaseFast}) |mode| {
const annotated_case_name = fmt.allocPrint(self.b.allocator, "compile-error {} ({})",
case.name, @tagName(mode)) catch unreachable;
if (self.test_filter) |filter| {
@ -773,7 +773,7 @@ pub const BuildExamplesContext = struct {
pub fn addAllArgs(self: &BuildExamplesContext, root_src: []const u8, link_libc: bool) void {
const b = self.b;
for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast}) |mode| {
for ([]Mode{Mode.Debug, Mode.ReleaseSafe, Mode.ReleaseFast, Mode.ReleaseSmall}) |mode| {
const annotated_case_name = fmt.allocPrint(self.b.allocator, "build {} ({})",
root_src, @tagName(mode)) catch unreachable;
if (self.test_filter) |filter| {

View File

@ -1,6 +1,27 @@
const tests = @import("tests.zig");
pub fn addCases(cases: &tests.TranslateCContext) void {
cases.add("double define struct",
\\typedef struct Bar Bar;
\\typedef struct Foo Foo;
\\
\\struct Foo {
\\ Foo *a;
\\};
\\
\\struct Bar {
\\ Foo *a;
\\};
,
\\pub const struct_Foo = extern struct {
\\ a: ?&Foo,
\\};
\\pub const Foo = struct_Foo;
\\pub const struct_Bar = extern struct {
\\ a: ?&Foo,
\\};
);
cases.addAllowWarnings("simple data types",
\\#include <stdint.h>
\\int foo(char a, unsigned char b, signed char c);
@ -53,6 +74,28 @@ pub fn addCases(cases: &tests.TranslateCContext) void {
\\pub const Foo = enum_Foo;
);
cases.add("enums",
\\enum Foo {
\\ FooA = 2,
\\ FooB = 5,
\\ Foo1,
\\};
,
\\pub const enum_Foo = extern enum {
\\ A = 2,
\\ B = 5,
\\ @"1" = 6,
\\};
,
\\pub const FooA = enum_Foo.A;
,
\\pub const FooB = enum_Foo.B;
,
\\pub const Foo1 = enum_Foo.@"1";
,
\\pub const Foo = enum_Foo;
);
cases.add("restrict -> noalias",
\\void foo(void *restrict bar, void *restrict);
,