implemented container doc comments in stage 1

This commit is contained in:
Vexu 2019-11-15 14:12:14 +02:00
parent b92f42d1f4
commit e509d21f39
No known key found for this signature in database
GPG Key ID: 5AEABFCAFF5CD8D6
4 changed files with 56 additions and 1 deletions

View File

@ -968,6 +968,7 @@ struct AstNodeContainerDecl {
AstNode *init_arg_expr; // enum(T), struct(endianness), or union(T), or union(enum(T)) AstNode *init_arg_expr; // enum(T), struct(endianness), or union(T), or union(enum(T))
ZigList<AstNode *> fields; ZigList<AstNode *> fields;
ZigList<AstNode *> decls; ZigList<AstNode *> decls;
Buf doc_comments;
ContainerKind kind; ContainerKind kind;
ContainerLayout layout; ContainerLayout layout;

View File

@ -493,6 +493,7 @@ static AstNode *ast_parse_root(ParseContext *pc) {
node->data.container_decl.layout = ContainerLayoutAuto; node->data.container_decl.layout = ContainerLayoutAuto;
node->data.container_decl.kind = ContainerKindStruct; node->data.container_decl.kind = ContainerKindStruct;
node->data.container_decl.is_root = true; node->data.container_decl.is_root = true;
node->data.container_decl.doc_comments = members.doc_comments;
return node; return node;
} }
@ -514,6 +515,21 @@ static Token *ast_parse_doc_comments(ParseContext *pc, Buf *buf) {
return first_doc_token; return first_doc_token;
} }
static void ast_parse_container_doc_comments(ParseContext *pc, Buf *buf) {
if (buf_len(buf) != 0 && peek_token(pc)->id == TokenIdContainerDocComment) {
buf_append_char(buf, '\n');
}
Token *doc_token = nullptr;
while ((doc_token = eat_token_if(pc, TokenIdContainerDocComment))) {
if (buf->list.length == 0) {
buf_resize(buf, 0);
}
// chops off '//!' but leaves '\n'
buf_append_mem(buf, buf_ptr(pc->buf) + doc_token->start_pos + 3,
doc_token->end_pos - doc_token->start_pos - 3);
}
}
// ContainerMembers // ContainerMembers
// <- TestDecl ContainerMembers // <- TestDecl ContainerMembers
// / TopLevelComptime ContainerMembers // / TopLevelComptime ContainerMembers
@ -523,7 +539,11 @@ static Token *ast_parse_doc_comments(ParseContext *pc, Buf *buf) {
// / // /
static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) { static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
AstNodeContainerDecl res = {}; AstNodeContainerDecl res = {};
Buf tld_doc_comment_buf = BUF_INIT;
buf_resize(&tld_doc_comment_buf, 0);
for (;;) { for (;;) {
ast_parse_container_doc_comments(pc, &tld_doc_comment_buf);
AstNode *test_decl = ast_parse_test_decl(pc); AstNode *test_decl = ast_parse_test_decl(pc);
if (test_decl != nullptr) { if (test_decl != nullptr) {
res.decls.append(test_decl); res.decls.append(test_decl);
@ -566,7 +586,7 @@ static AstNodeContainerDecl ast_parse_container_members(ParseContext *pc) {
break; break;
} }
res.doc_comments = tld_doc_comment_buf;
return res; return res;
} }
@ -2797,6 +2817,7 @@ static AstNode *ast_parse_container_decl_auto(ParseContext *pc) {
res->data.container_decl.fields = members.fields; res->data.container_decl.fields = members.fields;
res->data.container_decl.decls = members.decls; res->data.container_decl.decls = members.decls;
res->data.container_decl.doc_comments = members.doc_comments;
return res; return res;
} }

View File

@ -198,6 +198,7 @@ enum TokenizeState {
TokenizeStateSawSlash, TokenizeStateSawSlash,
TokenizeStateSawSlash2, TokenizeStateSawSlash2,
TokenizeStateSawSlash3, TokenizeStateSawSlash3,
TokenizeStateSawSlashBang,
TokenizeStateSawBackslash, TokenizeStateSawBackslash,
TokenizeStateSawPercent, TokenizeStateSawPercent,
TokenizeStateSawPlus, TokenizeStateSawPlus,
@ -209,6 +210,7 @@ enum TokenizeState {
TokenizeStateSawBar, TokenizeStateSawBar,
TokenizeStateSawBarBar, TokenizeStateSawBarBar,
TokenizeStateDocComment, TokenizeStateDocComment,
TokenizeStateContainerDocComment,
TokenizeStateLineComment, TokenizeStateLineComment,
TokenizeStateLineString, TokenizeStateLineString,
TokenizeStateLineStringEnd, TokenizeStateLineStringEnd,
@ -938,6 +940,9 @@ void tokenize(Buf *buf, Tokenization *out) {
case '/': case '/':
t.state = TokenizeStateSawSlash3; t.state = TokenizeStateSawSlash3;
break; break;
case '!':
t.state = TokenizeStateSawSlashBang;
break;
case '\n': case '\n':
cancel_token(&t); cancel_token(&t);
t.state = TokenizeStateStart; t.state = TokenizeStateStart;
@ -965,6 +970,19 @@ void tokenize(Buf *buf, Tokenization *out) {
break; break;
} }
break; break;
case TokenizeStateSawSlashBang:
switch (c) {
case '\n':
set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
end_token(&t);
t.state = TokenizeStateStart;
break;
default:
set_token_id(&t, t.cur_tok, TokenIdContainerDocComment);
t.state = TokenizeStateContainerDocComment;
break;
}
break;
case TokenizeStateSawBackslash: case TokenizeStateSawBackslash:
switch (c) { switch (c) {
case '\\': case '\\':
@ -1055,6 +1073,17 @@ void tokenize(Buf *buf, Tokenization *out) {
break; break;
} }
break; break;
case TokenizeStateContainerDocComment:
switch (c) {
case '\n':
end_token(&t);
t.state = TokenizeStateStart;
break;
default:
// do nothing
break;
}
break;
case TokenizeStateSymbolFirstC: case TokenizeStateSymbolFirstC:
switch (c) { switch (c) {
case '"': case '"':
@ -1545,6 +1574,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateSawBarBar: case TokenizeStateSawBarBar:
case TokenizeStateLBracket: case TokenizeStateLBracket:
case TokenizeStateDocComment: case TokenizeStateDocComment:
case TokenizeStateContainerDocComment:
end_token(&t); end_token(&t);
break; break;
case TokenizeStateSawDotDot: case TokenizeStateSawDotDot:
@ -1559,6 +1589,7 @@ void tokenize(Buf *buf, Tokenization *out) {
case TokenizeStateLineComment: case TokenizeStateLineComment:
case TokenizeStateSawSlash2: case TokenizeStateSawSlash2:
case TokenizeStateSawSlash3: case TokenizeStateSawSlash3:
case TokenizeStateSawSlashBang:
break; break;
} }
if (t.state != TokenizeStateError) { if (t.state != TokenizeStateError) {
@ -1606,6 +1637,7 @@ const char * token_name(TokenId id) {
case TokenIdDash: return "-"; case TokenIdDash: return "-";
case TokenIdDivEq: return "/="; case TokenIdDivEq: return "/=";
case TokenIdDocComment: return "DocComment"; case TokenIdDocComment: return "DocComment";
case TokenIdContainerDocComment: return "ContainerDocComment";
case TokenIdDot: return "."; case TokenIdDot: return ".";
case TokenIdDotStar: return ".*"; case TokenIdDotStar: return ".*";
case TokenIdEllipsis2: return ".."; case TokenIdEllipsis2: return "..";

View File

@ -43,6 +43,7 @@ enum TokenId {
TokenIdDash, TokenIdDash,
TokenIdDivEq, TokenIdDivEq,
TokenIdDocComment, TokenIdDocComment,
TokenIdContainerDocComment,
TokenIdDot, TokenIdDot,
TokenIdDotStar, TokenIdDotStar,
TokenIdEllipsis2, TokenIdEllipsis2,