implemented container doc comments in stage 1
parent
b92f42d1f4
commit
e509d21f39
|
@ -968,6 +968,7 @@ struct AstNodeContainerDecl {
|
|||
AstNode *init_arg_expr; // enum(T), struct(endianness), or union(T), or union(enum(T))
|
||||
ZigList<AstNode *> fields;
|
||||
ZigList<AstNode *> decls;
|
||||
Buf doc_comments;
|
||||
|
||||
ContainerKind kind;
|
||||
ContainerLayout layout;
|
||||
|
|
|
@ -493,6 +493,7 @@ static AstNode *ast_parse_root(ParseContext *pc) {
|
|||
node->data.container_decl.layout = ContainerLayoutAuto;
|
||||
node->data.container_decl.kind = ContainerKindStruct;
|
||||
node->data.container_decl.is_root = true;
|
||||
node->data.container_decl.doc_comments = members.doc_comments;
|
||||
|
||||
return node;
|
||||
}
|
||||
|
@ -514,6 +515,21 @@ static Token *ast_parse_doc_comments(ParseContext *pc, Buf *buf) {
|
|||
return first_doc_token;
|
||||
}
|
||||
|
||||
static void ast_parse_container_doc_comments(ParseContext *pc, Buf *buf) {
|
||||
if (buf_len(buf) != 0 && peek_token(pc)->id == TokenIdContainerDocComment) {
|
||||
buf_append_char(buf, '\n');
|
||||
}
|
||||
Token *doc_token = nullptr;
|
||||
while ((doc_token = eat_token_if(pc, TokenIdContainerDocComment))) {
|
||||
if (buf->list.length == 0) {
|
||||
buf_resize(buf, 0);
|
||||
}
|
||||
// chops off '//!' but leaves '\n'
|
||||
buf_append_mem(buf, buf_ptr(pc->buf) + doc_token->start_pos + 3,
|
||||
doc_token->end_pos - doc_token->start_pos - 3);
|
||||
}
|
||||
}
|
||||
|
||||
// ContainerMembers
|
||||
// <- TestDecl ContainerMembers
|
||||
// / TopLevelComptime ContainerMembers
|
||||
|
@ -523,7 +539,11 @@ static Token *ast_parse_doc_comments(ParseContext *pc, Buf *buf) {
|
|||
// /
|
||||
static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
|
||||
AstNodeContainerDecl res = {};
|
||||
Buf tld_doc_comment_buf = BUF_INIT;
|
||||
buf_resize(&tld_doc_comment_buf, 0);
|
||||
for (;;) {
|
||||
ast_parse_container_doc_comments(pc, &tld_doc_comment_buf);
|
||||
|
||||
AstNode *test_decl = ast_parse_test_decl(pc);
|
||||
if (test_decl != nullptr) {
|
||||
res.decls.append(test_decl);
|
||||
|
@ -566,7 +586,7 @@ static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
|
|||
|
||||
break;
|
||||
}
|
||||
|
||||
res.doc_comments = tld_doc_comment_buf;
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -2797,6 +2817,7 @@ static AstNode *ast_parse_container_decl_auto(ParseContext *pc) {
|
|||
|
||||
res->data.container_decl.fields = members.fields;
|
||||
res->data.container_decl.decls = members.decls;
|
||||
res->data.container_decl.doc_comments = members.doc_comments;
|
||||
return res;
|
||||
}
|
||||
|
||||
|
|
|
@ -198,6 +198,7 @@ enum TokenizeState {
|
|||
TokenizeStateSawSlash,
|
||||
TokenizeStateSawSlash2,
|
||||
TokenizeStateSawSlash3,
|
||||
TokenizeStateSawSlashBang,
|
||||
TokenizeStateSawBackslash,
|
||||
TokenizeStateSawPercent,
|
||||
TokenizeStateSawPlus,
|
||||
|
@ -209,6 +210,7 @@ enum TokenizeState {
|
|||
TokenizeStateSawBar,
|
||||
TokenizeStateSawBarBar,
|
||||
TokenizeStateDocComment,
|
||||
TokenizeStateContainerDocComment,
|
||||
TokenizeStateLineComment,
|
||||
TokenizeStateLineString,
|
||||
TokenizeStateLineStringEnd,
|
||||
|
@ -938,6 +940,9 @@ void tokenize(Buf *buf, Tokenization *out) {
|
|||
case '/':
|
||||
t.state = TokenizeStateSawSlash3;
|
||||
break;
|
||||
case '!':
|
||||
t.state = TokenizeStateSawSlashBang;
|
||||
break;
|
||||
case '\n':
|
||||
cancel_token(&t);
|
||||
t.state = TokenizeStateStart;
|
||||
|
@ -965,6 +970,19 @@ void tokenize(Buf *buf, Tokenization *out) {
|
|||
break;
|
||||
}
|
||||
break;
|
||||
case TokenizeStateSawSlashBang:
|
||||
switch (c) {
|
||||
case '\n':
|
||||
set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
|
||||
end_token(&t);
|
||||
t.state = TokenizeStateStart;
|
||||
break;
|
||||
default:
|
||||
set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
|
||||
t.state = TokenizeStateContainerDocComment;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case TokenizeStateSawBackslash:
|
||||
switch (c) {
|
||||
case '\\':
|
||||
|
@ -1055,6 +1073,17 @@ void tokenize(Buf *buf, Tokenization *out) {
|
|||
break;
|
||||
}
|
||||
break;
|
||||
case TokenizeStateContainerDocComment:
|
||||
switch (c) {
|
||||
case '\n':
|
||||
end_token(&t);
|
||||
t.state = TokenizeStateStart;
|
||||
break;
|
||||
default:
|
||||
// do nothing
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case TokenizeStateSymbolFirstC:
|
||||
switch (c) {
|
||||
case '"':
|
||||
|
@ -1545,6 +1574,7 @@ void tokenize(Buf *buf, Tokenization *out) {
|
|||
case TokenizeStateSawBarBar:
|
||||
case TokenizeStateLBracket:
|
||||
case TokenizeStateDocComment:
|
||||
case TokenizeStateContainerDocComment:
|
||||
end_token(&t);
|
||||
break;
|
||||
case TokenizeStateSawDotDot:
|
||||
|
@ -1559,6 +1589,7 @@ void tokenize(Buf *buf, Tokenization *out) {
|
|||
case TokenizeStateLineComment:
|
||||
case TokenizeStateSawSlash2:
|
||||
case TokenizeStateSawSlash3:
|
||||
case TokenizeStateSawSlashBang:
|
||||
break;
|
||||
}
|
||||
if (t.state != TokenizeStateError) {
|
||||
|
@ -1606,6 +1637,7 @@ const char * token_name(TokenId id) {
|
|||
case TokenIdDash: return "-";
|
||||
case TokenIdDivEq: return "/=";
|
||||
case TokenIdDocComment: return "DocComment";
|
||||
case TokenIdContainerDocComment: return "ContainerDocComment";
|
||||
case TokenIdDot: return ".";
|
||||
case TokenIdDotStar: return ".*";
|
||||
case TokenIdEllipsis2: return "..";
|
||||
|
|
|
@ -43,6 +43,7 @@ enum TokenId {
|
|||
TokenIdDash,
|
||||
TokenIdDivEq,
|
||||
TokenIdDocComment,
|
||||
TokenIdContainerDocComment,
|
||||
TokenIdDot,
|
||||
TokenIdDotStar,
|
||||
TokenIdEllipsis2,
|
||||
|
|
Loading…
Reference in New Issue