diff --git a/doc/langref.html.in b/doc/langref.html.in
index fb3d05503..a28e0f5df 100644
--- a/doc/langref.html.in
+++ b/doc/langref.html.in
@@ -10097,7 +10097,7 @@ TopLevelComptime <- KEYWORD_comptime BlockExpr
TopLevelDecl
<- (KEYWORD_export / KEYWORD_extern STRINGLITERAL? / KEYWORD_inline)? FnProto (SEMICOLON / Block)
/ (KEYWORD_export / KEYWORD_extern STRINGLITERAL?)? KEYWORD_threadlocal? VarDecl
- / KEYWORD_use Expr SEMICOLON
+ / KEYWORD_usingnamespace Expr SEMICOLON
FnProto <- FnCC? KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? EXCLAMATIONMARK? (KEYWORD_var / TypeExpr)
diff --git a/src/all_types.hpp b/src/all_types.hpp
index 1119d2bf4..f927fc291 100644
--- a/src/all_types.hpp
+++ b/src/all_types.hpp
@@ -589,6 +589,7 @@ enum NodeType {
NodeTypeIfErrorExpr,
NodeTypeIfOptional,
NodeTypeErrorSetDecl,
+ NodeTypeErrorSetField,
NodeTypeResume,
NodeTypeAwaitExpr,
NodeTypeSuspend,
@@ -612,16 +613,10 @@ enum FnInline {
};
struct AstNodeFnProto {
- VisibMod visib_mod;
Buf *name;
ZigList params;
AstNode *return_type;
Token *return_var_token;
- bool is_var_args;
- bool is_extern;
- bool is_export;
- FnInline fn_inline;
- CallingConvention cc;
AstNode *fn_def_node;
// populated if this is an extern declaration
Buf *lib_name;
@@ -629,8 +624,16 @@ struct AstNodeFnProto {
AstNode *align_expr;
// populated if the "section(S)" is present
AstNode *section_expr;
+ Buf doc_comments;
+ FnInline fn_inline;
+ CallingConvention cc;
+
+ VisibMod visib_mod;
bool auto_err_set;
+ bool is_var_args;
+ bool is_extern;
+ bool is_export;
};
struct AstNodeFnDef {
@@ -642,6 +645,7 @@ struct AstNodeParamDecl {
Buf *name;
AstNode *type;
Token *var_token;
+ Buf doc_comments;
bool is_noalias;
bool is_comptime;
bool is_var_args;
@@ -684,6 +688,7 @@ struct AstNodeVariableDeclaration {
// populated if the "section(S)" is present
AstNode *section_expr;
Token *threadlocal_tok;
+ Buf doc_comments;
VisibMod visib_mod;
bool is_const;
@@ -957,25 +962,35 @@ enum ContainerLayout {
};
struct AstNodeContainerDecl {
- ContainerKind kind;
+ AstNode *init_arg_expr; // enum(T), struct(endianness), or union(T), or union(enum(T))
ZigList fields;
ZigList decls;
+
+ ContainerKind kind;
ContainerLayout layout;
- AstNode *init_arg_expr; // enum(T), struct(endianness), or union(T), or union(enum(T))
+
bool auto_enum, is_root; // union(enum)
};
+struct AstNodeErrorSetField {
+ Buf doc_comments;
+ AstNode *field_name;
+};
+
struct AstNodeErrorSetDecl {
+ // Each AstNode could be AstNodeErrorSetField or just AstNodeSymbolExpr to save memory
ZigList decls;
};
struct AstNodeStructField {
- VisibMod visib_mod;
Buf *name;
AstNode *type;
AstNode *value;
// populated if the "align(A)" is present
AstNode *align_expr;
+ Buf doc_comments;
+
+ VisibMod visib_mod;
};
struct AstNodeStringLiteral {
@@ -1126,6 +1141,7 @@ struct AstNode {
AstNodeInferredArrayType inferred_array_type;
AstNodeErrorType error_type;
AstNodeErrorSetDecl err_set_decl;
+ AstNodeErrorSetField err_set_field;
AstNodeResumeExpr resume_expr;
AstNodeAwaitExpr await_expr;
AstNodeSuspend suspend;
diff --git a/src/analyze.cpp b/src/analyze.cpp
index cca239c27..c3206eac4 100644
--- a/src/analyze.cpp
+++ b/src/analyze.cpp
@@ -3572,6 +3572,7 @@ void scan_decls(CodeGen *g, ScopeDecls *decls_scope, AstNode *node) {
case NodeTypeSuspend:
case NodeTypeEnumLiteral:
case NodeTypeAnyFrameType:
+ case NodeTypeErrorSetField:
zig_unreachable();
}
}
diff --git a/src/ast_render.cpp b/src/ast_render.cpp
index 537a74d7b..18940c4b8 100644
--- a/src/ast_render.cpp
+++ b/src/ast_render.cpp
@@ -266,6 +266,8 @@ static const char *node_type_str(NodeType node_type) {
return "AnyFrameType";
case NodeTypeEnumLiteral:
return "EnumLiteral";
+ case NodeTypeErrorSetField:
+ return "ErrorSetField";
}
zig_unreachable();
}
@@ -1177,6 +1179,7 @@ static void render_node_extra(AstRender *ar, AstNode *node, bool grouped) {
case NodeTypeTestDecl:
case NodeTypeStructField:
case NodeTypeUsingNamespace:
+ case NodeTypeErrorSetField:
zig_panic("TODO more ast rendering");
}
}
diff --git a/src/ir.cpp b/src/ir.cpp
index 52b59ddca..2bfde74da 100644
--- a/src/ir.cpp
+++ b/src/ir.cpp
@@ -7945,7 +7945,15 @@ static IrInstruction *ir_gen_err_set_decl(IrBuilder *irb, Scope *parent_scope, A
ErrorTableEntry **errors = allocate(irb->codegen->errors_by_index.length + err_count);
for (uint32_t i = 0; i < err_count; i += 1) {
- AstNode *symbol_node = node->data.err_set_decl.decls.at(i);
+ AstNode *field_node = node->data.err_set_decl.decls.at(i);
+ AstNode *symbol_node;
+ if (field_node->type == NodeTypeSymbol) {
+ symbol_node = field_node;
+ } else if (field_node->type == NodeTypeErrorSetField) {
+ symbol_node = field_node->data.err_set_field.field_name;
+ } else {
+ zig_unreachable();
+ }
assert(symbol_node->type == NodeTypeSymbol);
Buf *err_name = symbol_node->data.symbol_expr.symbol;
ErrorTableEntry *err = allocate(1);
@@ -8116,6 +8124,7 @@ static IrInstruction *ir_gen_node_raw(IrBuilder *irb, AstNode *node, Scope *scop
case NodeTypeSwitchProng:
case NodeTypeSwitchRange:
case NodeTypeStructField:
+ case NodeTypeErrorSetField:
case NodeTypeFnDef:
case NodeTypeTestDecl:
zig_unreachable();
diff --git a/src/parser.cpp b/src/parser.cpp
index 96071daa0..c4be11706 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -37,7 +37,7 @@ static AstNode *ast_parse_root(ParseContext *pc);
static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc);
static AstNode *ast_parse_test_decl(ParseContext *pc);
static AstNode *ast_parse_top_level_comptime(ParseContext *pc);
-static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod);
+static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod, Buf *doc_comments);
static AstNode *ast_parse_fn_proto(ParseContext *pc);
static AstNode *ast_parse_var_decl(ParseContext *pc);
static AstNode *ast_parse_container_field(ParseContext *pc);
@@ -497,6 +497,19 @@ static AstNode *ast_parse_root(ParseContext *pc) {
return node;
}
+static Token *ast_parse_doc_comments(ParseContext *pc, Buf *buf) {
+ Token *doc_token = nullptr;
+ while ((doc_token = eat_token_if(pc, TokenIdDocComment))) {
+ if (buf->list.length == 0) {
+ buf_resize(buf, 0);
+ }
+ // chops off '///' and '\n'
+ buf_append_mem(buf, buf_ptr(pc->buf) + doc_token->start_pos + 3,
+ doc_token->end_pos - doc_token->start_pos - 4);
+ }
+ return doc_token;
+}
+
// ContainerMembers
// <- TestDecl ContainerMembers
// / TopLevelComptime ContainerMembers
@@ -519,10 +532,13 @@ static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
continue;
}
+ Buf doc_comment_buf = BUF_INIT;
+ ast_parse_doc_comments(pc, &doc_comment_buf);
+
Token *visib_token = eat_token_if(pc, TokenIdKeywordPub);
VisibMod visib_mod = visib_token != nullptr ? VisibModPub : VisibModPrivate;
- AstNode *top_level_decl = ast_parse_top_level_decl(pc, visib_mod);
+ AstNode *top_level_decl = ast_parse_top_level_decl(pc, visib_mod, &doc_comment_buf);
if (top_level_decl != nullptr) {
res.decls.append(top_level_decl);
continue;
@@ -532,6 +548,7 @@ static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
if (container_field != nullptr) {
assert(container_field->type == NodeTypeStructField);
container_field->data.struct_field.visib_mod = visib_mod;
+ container_field->data.struct_field.doc_comments = doc_comment_buf;
res.fields.append(container_field);
if (eat_token_if(pc, TokenIdComma) != nullptr) {
continue;
@@ -581,7 +598,7 @@ static AstNode *ast_parse_top_level_comptime(ParseContext *pc) {
// <- (KEYWORD_export / KEYWORD_extern STRINGLITERAL? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
// / (KEYWORD_export / KEYWORD_extern STRINGLITERAL?)? KEYWORD_threadlocal? VarDecl
// / KEYWORD_use Expr SEMICOLON
-static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod) {
+static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod, Buf *doc_comments) {
Token *first = eat_token_if(pc, TokenIdKeywordExport);
if (first == nullptr)
first = eat_token_if(pc, TokenIdKeywordExtern);
@@ -603,6 +620,7 @@ static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod) {
var_decl->column = first->start_column;
var_decl->data.variable_declaration.threadlocal_tok = thread_local_kw;
var_decl->data.variable_declaration.visib_mod = visib_mod;
+ var_decl->data.variable_declaration.doc_comments = *doc_comments;
var_decl->data.variable_declaration.is_extern = first->id == TokenIdKeywordExtern;
var_decl->data.variable_declaration.is_export = first->id == TokenIdKeywordExport;
var_decl->data.variable_declaration.lib_name = token_buf(lib_name);
@@ -623,6 +641,7 @@ static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod) {
fn_proto->line = first->start_line;
fn_proto->column = first->start_column;
fn_proto->data.fn_proto.visib_mod = visib_mod;
+ fn_proto->data.fn_proto.doc_comments = *doc_comments;
fn_proto->data.fn_proto.is_extern = first->id == TokenIdKeywordExtern;
fn_proto->data.fn_proto.is_export = first->id == TokenIdKeywordExport;
switch (first->id) {
@@ -657,6 +676,7 @@ static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod) {
if (var_decl != nullptr) {
assert(var_decl->type == NodeTypeVariableDeclaration);
var_decl->data.variable_declaration.visib_mod = visib_mod;
+ var_decl->data.variable_declaration.doc_comments = *doc_comments;
var_decl->data.variable_declaration.threadlocal_tok = thread_local_kw;
return var_decl;
}
@@ -672,6 +692,7 @@ static AstNode *ast_parse_top_level_decl(ParseContext *pc, VisibMod visib_mod) {
assert(fn_proto->type == NodeTypeFnProto);
fn_proto->data.fn_proto.visib_mod = visib_mod;
+ fn_proto->data.fn_proto.doc_comments = *doc_comments;
AstNode *res = fn_proto;
if (body != nullptr) {
res = ast_create_node_copy_line_info(pc, NodeTypeFnDef, fn_proto);
@@ -1719,11 +1740,20 @@ static AstNode *ast_parse_error_set_decl(ParseContext *pc) {
}
ZigList decls = ast_parse_list(pc, TokenIdComma, [](ParseContext *context) {
+ Buf doc_comment_buf = BUF_INIT;
+ Token *doc_token = ast_parse_doc_comments(context, &doc_comment_buf);
Token *ident = eat_token_if(context, TokenIdSymbol);
if (ident == nullptr)
return (AstNode*)nullptr;
- return token_symbol(context, ident);
+ AstNode *symbol_node = token_symbol(context, ident);
+ if (doc_token == nullptr)
+ return symbol_node;
+
+ AstNode *field_node = ast_create_node(context, NodeTypeErrorSetField, doc_token);
+ field_node->data.err_set_field.field_name = symbol_node;
+ field_node->data.err_set_field.doc_comments = doc_comment_buf;
+ return field_node;
});
expect_token(pc, TokenIdRBrace);
@@ -2057,6 +2087,9 @@ static Optional ast_parse_fn_cc(ParseContext *pc) {
// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
static AstNode *ast_parse_param_decl(ParseContext *pc) {
+ Buf doc_comments = BUF_INIT;
+ ast_parse_doc_comments(pc, &doc_comments);
+
Token *first = eat_token_if(pc, TokenIdKeywordNoAlias);
if (first == nullptr)
first = eat_token_if(pc, TokenIdKeywordCompTime);
@@ -2089,6 +2122,7 @@ static AstNode *ast_parse_param_decl(ParseContext *pc) {
res->line = first->start_line;
res->column = first->start_column;
res->data.param_decl.name = token_buf(name);
+ res->data.param_decl.doc_comments = doc_comments;
res->data.param_decl.is_noalias = first->id == TokenIdKeywordNoAlias;
res->data.param_decl.is_comptime = first->id == TokenIdKeywordCompTime;
return res;
@@ -3029,6 +3063,9 @@ void ast_visit_node_children(AstNode *node, void (*visit)(AstNode **, void *cont
case NodeTypeErrorSetDecl:
visit_node_list(&node->data.err_set_decl.decls, visit, context);
break;
+ case NodeTypeErrorSetField:
+ visit_field(&node->data.err_set_field.field_name, visit, context);
+ break;
case NodeTypeResume:
visit_field(&node->data.resume_expr.expr, visit, context);
break;
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index 71a24fe72..399597b7b 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -196,6 +196,8 @@ enum TokenizeState {
TokenizeStateSawStar,
TokenizeStateSawStarPercent,
TokenizeStateSawSlash,
+ TokenizeStateSawSlash2,
+ TokenizeStateSawSlash3,
TokenizeStateSawBackslash,
TokenizeStateSawPercent,
TokenizeStateSawPlus,
@@ -206,6 +208,7 @@ enum TokenizeState {
TokenizeStateSawCaret,
TokenizeStateSawBar,
TokenizeStateSawBarBar,
+ TokenizeStateDocComment,
TokenizeStateLineComment,
TokenizeStateLineString,
TokenizeStateLineStringEnd,
@@ -910,8 +913,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateSawSlash:
switch (c) {
case '/':
- cancel_token(&t);
- t.state = TokenizeStateLineComment;
+ t.state = TokenizeStateSawSlash2;
break;
case '=':
set_token_id(&t, t.cur_tok, TokenIdDivEq);
@@ -925,6 +927,38 @@ void tokenize(Buf *buf, Tokenization *out) {
continue;
}
break;
+ case TokenizeStateSawSlash2:
+ switch (c) {
+ case '/':
+ t.state = TokenizeStateSawSlash3;
+ break;
+ case '\n':
+ cancel_token(&t);
+ t.state = TokenizeStateStart;
+ break;
+ default:
+ cancel_token(&t);
+ t.state = TokenizeStateLineComment;
+ break;
+ }
+ break;
+ case TokenizeStateSawSlash3:
+ switch (c) {
+ case '/':
+ cancel_token(&t);
+ t.state = TokenizeStateLineComment;
+ break;
+ case '\n':
+ set_token_id(&t, t.cur_tok, TokenIdDocComment);
+ end_token(&t);
+ t.state = TokenizeStateStart;
+ break;
+ default:
+ set_token_id(&t, t.cur_tok, TokenIdDocComment);
+ t.state = TokenizeStateDocComment;
+ break;
+ }
+ break;
case TokenizeStateSawBackslash:
switch (c) {
case '\\':
@@ -1004,6 +1038,17 @@ void tokenize(Buf *buf, Tokenization *out) {
break;
}
break;
+ case TokenizeStateDocComment:
+ switch (c) {
+ case '\n':
+ end_token(&t);
+ t.state = TokenizeStateStart;
+ break;
+ default:
+ // do nothing
+ break;
+ }
+ break;
case TokenizeStateSymbolFirstC:
switch (c) {
case '"':
@@ -1466,6 +1511,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateLineStringEnd:
case TokenizeStateSawBarBar:
case TokenizeStateLBracket:
+ case TokenizeStateDocComment:
end_token(&t);
break;
case TokenizeStateSawDotDot:
@@ -1478,6 +1524,8 @@ void tokenize(Buf *buf, Tokenization *out) {
tokenize_error(&t, "unexpected EOF");
break;
case TokenizeStateLineComment:
+ case TokenizeStateSawSlash2:
+ case TokenizeStateSawSlash3:
break;
}
if (t.state != TokenizeStateError) {
@@ -1524,6 +1572,7 @@ const char * token_name(TokenId id) {
case TokenIdComma: return ",";
case TokenIdDash: return "-";
case TokenIdDivEq: return "/=";
+ case TokenIdDocComment: return "DocComment";
case TokenIdDot: return ".";
case TokenIdEllipsis2: return "..";
case TokenIdEllipsis3: return "...";
diff --git a/src/tokenizer.hpp b/src/tokenizer.hpp
index a3d1a6000..e33a82f31 100644
--- a/src/tokenizer.hpp
+++ b/src/tokenizer.hpp
@@ -42,6 +42,7 @@ enum TokenId {
TokenIdComma,
TokenIdDash,
TokenIdDivEq,
+ TokenIdDocComment,
TokenIdDot,
TokenIdEllipsis2,
TokenIdEllipsis3,