zig fmt: support line comments and doc comments
line comments can go anywhere a list of something is allowedmaster
parent
39befc35a8
commit
fd2cd38bdb
|
@ -6,7 +6,7 @@ const mem = std.mem;
|
||||||
|
|
||||||
pub const Node = struct {
|
pub const Node = struct {
|
||||||
id: Id,
|
id: Id,
|
||||||
before_comments: ?&LineComment,
|
doc_comments: ?&DocComment,
|
||||||
same_line_comment: ?&Token,
|
same_line_comment: ?&Token,
|
||||||
|
|
||||||
pub const Id = enum {
|
pub const Id = enum {
|
||||||
|
@ -59,6 +59,7 @@ pub const Node = struct {
|
||||||
|
|
||||||
// Misc
|
// Misc
|
||||||
LineComment,
|
LineComment,
|
||||||
|
DocComment,
|
||||||
SwitchCase,
|
SwitchCase,
|
||||||
SwitchElse,
|
SwitchElse,
|
||||||
Else,
|
Else,
|
||||||
|
@ -718,7 +719,8 @@ pub const Node = struct {
|
||||||
base: Node,
|
base: Node,
|
||||||
switch_token: Token,
|
switch_token: Token,
|
||||||
expr: &Node,
|
expr: &Node,
|
||||||
cases: ArrayList(&SwitchCase),
|
/// these can be SwitchCase nodes or LineComment nodes
|
||||||
|
cases: ArrayList(&Node),
|
||||||
rbrace: Token,
|
rbrace: Token,
|
||||||
|
|
||||||
pub fn iterate(self: &Switch, index: usize) ?&Node {
|
pub fn iterate(self: &Switch, index: usize) ?&Node {
|
||||||
|
@ -727,7 +729,7 @@ pub const Node = struct {
|
||||||
if (i < 1) return self.expr;
|
if (i < 1) return self.expr;
|
||||||
i -= 1;
|
i -= 1;
|
||||||
|
|
||||||
if (i < self.cases.len) return &self.cases.at(i).base;
|
if (i < self.cases.len) return self.cases.at(i);
|
||||||
i -= self.cases.len;
|
i -= self.cases.len;
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
|
@ -1715,17 +1717,34 @@ pub const Node = struct {
|
||||||
|
|
||||||
pub const LineComment = struct {
|
pub const LineComment = struct {
|
||||||
base: Node,
|
base: Node,
|
||||||
lines: ArrayList(Token),
|
token: Token,
|
||||||
|
|
||||||
pub fn iterate(self: &LineComment, index: usize) ?&Node {
|
pub fn iterate(self: &LineComment, index: usize) ?&Node {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn firstToken(self: &LineComment) Token {
|
pub fn firstToken(self: &LineComment) Token {
|
||||||
return self.lines.at(0);
|
return self.token;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lastToken(self: &LineComment) Token {
|
pub fn lastToken(self: &LineComment) Token {
|
||||||
|
return self.token;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const DocComment = struct {
|
||||||
|
base: Node,
|
||||||
|
lines: ArrayList(Token),
|
||||||
|
|
||||||
|
pub fn iterate(self: &DocComment, index: usize) ?&Node {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn firstToken(self: &DocComment) Token {
|
||||||
|
return self.lines.at(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lastToken(self: &DocComment) Token {
|
||||||
return self.lines.at(self.lines.len - 1);
|
return self.lines.at(self.lines.len - 1);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -55,7 +55,7 @@ pub const Parser = struct {
|
||||||
visib_token: ?Token,
|
visib_token: ?Token,
|
||||||
extern_export_inline_token: ?Token,
|
extern_export_inline_token: ?Token,
|
||||||
lib_name: ?&ast.Node,
|
lib_name: ?&ast.Node,
|
||||||
comments: ?&ast.Node.LineComment,
|
comments: ?&ast.Node.DocComment,
|
||||||
};
|
};
|
||||||
|
|
||||||
const VarDeclCtx = struct {
|
const VarDeclCtx = struct {
|
||||||
|
@ -65,19 +65,19 @@ pub const Parser = struct {
|
||||||
extern_export_token: ?Token,
|
extern_export_token: ?Token,
|
||||||
lib_name: ?&ast.Node,
|
lib_name: ?&ast.Node,
|
||||||
list: &ArrayList(&ast.Node),
|
list: &ArrayList(&ast.Node),
|
||||||
comments: ?&ast.Node.LineComment,
|
comments: ?&ast.Node.DocComment,
|
||||||
};
|
};
|
||||||
|
|
||||||
const TopLevelExternOrFieldCtx = struct {
|
const TopLevelExternOrFieldCtx = struct {
|
||||||
visib_token: Token,
|
visib_token: Token,
|
||||||
container_decl: &ast.Node.ContainerDecl,
|
container_decl: &ast.Node.ContainerDecl,
|
||||||
comments: ?&ast.Node.LineComment,
|
comments: ?&ast.Node.DocComment,
|
||||||
};
|
};
|
||||||
|
|
||||||
const ExternTypeCtx = struct {
|
const ExternTypeCtx = struct {
|
||||||
opt_ctx: OptionalCtx,
|
opt_ctx: OptionalCtx,
|
||||||
extern_token: Token,
|
extern_token: Token,
|
||||||
comments: ?&ast.Node.LineComment,
|
comments: ?&ast.Node.DocComment,
|
||||||
};
|
};
|
||||||
|
|
||||||
const ContainerKindCtx = struct {
|
const ContainerKindCtx = struct {
|
||||||
|
@ -186,7 +186,7 @@ pub const Parser = struct {
|
||||||
|
|
||||||
const AddCommentsCtx = struct {
|
const AddCommentsCtx = struct {
|
||||||
node_ptr: &&ast.Node,
|
node_ptr: &&ast.Node,
|
||||||
comments: ?&ast.Node.LineComment,
|
comments: ?&ast.Node.DocComment,
|
||||||
};
|
};
|
||||||
|
|
||||||
const State = union(enum) {
|
const State = union(enum) {
|
||||||
|
@ -244,8 +244,8 @@ pub const Parser = struct {
|
||||||
FieldListCommaOrEnd: &ast.Node.ContainerDecl,
|
FieldListCommaOrEnd: &ast.Node.ContainerDecl,
|
||||||
IdentifierListItemOrEnd: ListSave(&ast.Node),
|
IdentifierListItemOrEnd: ListSave(&ast.Node),
|
||||||
IdentifierListCommaOrEnd: ListSave(&ast.Node),
|
IdentifierListCommaOrEnd: ListSave(&ast.Node),
|
||||||
SwitchCaseOrEnd: ListSave(&ast.Node.SwitchCase),
|
SwitchCaseOrEnd: ListSave(&ast.Node),
|
||||||
SwitchCaseCommaOrEnd: ListSave(&ast.Node.SwitchCase),
|
SwitchCaseCommaOrEnd: ListSave(&ast.Node),
|
||||||
SwitchCaseFirstItem: &ArrayList(&ast.Node),
|
SwitchCaseFirstItem: &ArrayList(&ast.Node),
|
||||||
SwitchCaseItem: &ArrayList(&ast.Node),
|
SwitchCaseItem: &ArrayList(&ast.Node),
|
||||||
SwitchCaseItemCommaOrEnd: &ArrayList(&ast.Node),
|
SwitchCaseItemCommaOrEnd: &ArrayList(&ast.Node),
|
||||||
|
@ -349,6 +349,10 @@ pub const Parser = struct {
|
||||||
|
|
||||||
switch (state) {
|
switch (state) {
|
||||||
State.TopLevel => {
|
State.TopLevel => {
|
||||||
|
while (try self.eatLineComment(arena)) |line_comment| {
|
||||||
|
try root_node.decls.append(&line_comment.base);
|
||||||
|
}
|
||||||
|
|
||||||
const comments = try self.eatComments(arena);
|
const comments = try self.eatComments(arena);
|
||||||
const token = self.getNextToken();
|
const token = self.getNextToken();
|
||||||
switch (token.id) {
|
switch (token.id) {
|
||||||
|
@ -358,7 +362,7 @@ pub const Parser = struct {
|
||||||
const block = try arena.construct(ast.Node.Block {
|
const block = try arena.construct(ast.Node.Block {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.Block,
|
.id = ast.Node.Id.Block,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.label = null,
|
.label = null,
|
||||||
|
@ -369,7 +373,7 @@ pub const Parser = struct {
|
||||||
const test_node = try arena.construct(ast.Node.TestDecl {
|
const test_node = try arena.construct(ast.Node.TestDecl {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.TestDecl,
|
.id = ast.Node.Id.TestDecl,
|
||||||
.before_comments = comments,
|
.doc_comments = comments,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.test_token = token,
|
.test_token = token,
|
||||||
|
@ -551,7 +555,7 @@ pub const Parser = struct {
|
||||||
const fn_proto = try arena.construct(ast.Node.FnProto {
|
const fn_proto = try arena.construct(ast.Node.FnProto {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.FnProto,
|
.id = ast.Node.Id.FnProto,
|
||||||
.before_comments = ctx.comments,
|
.doc_comments = ctx.comments,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.visib_token = ctx.visib_token,
|
.visib_token = ctx.visib_token,
|
||||||
|
@ -620,7 +624,7 @@ pub const Parser = struct {
|
||||||
const node = try arena.construct(ast.Node.StructField {
|
const node = try arena.construct(ast.Node.StructField {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.StructField,
|
.id = ast.Node.Id.StructField,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.visib_token = ctx.visib_token,
|
.visib_token = ctx.visib_token,
|
||||||
|
@ -706,6 +710,10 @@ pub const Parser = struct {
|
||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
State.ContainerDecl => |container_decl| {
|
State.ContainerDecl => |container_decl| {
|
||||||
|
while (try self.eatLineComment(arena)) |line_comment| {
|
||||||
|
try container_decl.fields_and_decls.append(&line_comment.base);
|
||||||
|
}
|
||||||
|
|
||||||
const comments = try self.eatComments(arena);
|
const comments = try self.eatComments(arena);
|
||||||
const token = self.getNextToken();
|
const token = self.getNextToken();
|
||||||
switch (token.id) {
|
switch (token.id) {
|
||||||
|
@ -715,7 +723,7 @@ pub const Parser = struct {
|
||||||
const node = try arena.construct(ast.Node.StructField {
|
const node = try arena.construct(ast.Node.StructField {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.StructField,
|
.id = ast.Node.Id.StructField,
|
||||||
.before_comments = comments,
|
.doc_comments = comments,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.visib_token = null,
|
.visib_token = null,
|
||||||
|
@ -826,7 +834,7 @@ pub const Parser = struct {
|
||||||
const var_decl = try arena.construct(ast.Node.VarDecl {
|
const var_decl = try arena.construct(ast.Node.VarDecl {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.VarDecl,
|
.id = ast.Node.Id.VarDecl,
|
||||||
.before_comments = ctx.comments,
|
.doc_comments = ctx.comments,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.visib_token = ctx.visib_token,
|
.visib_token = ctx.visib_token,
|
||||||
|
@ -1222,6 +1230,14 @@ pub const Parser = struct {
|
||||||
else => {
|
else => {
|
||||||
self.putBackToken(token);
|
self.putBackToken(token);
|
||||||
stack.append(State { .Block = block }) catch unreachable;
|
stack.append(State { .Block = block }) catch unreachable;
|
||||||
|
|
||||||
|
var any_comments = false;
|
||||||
|
while (try self.eatLineComment(arena)) |line_comment| {
|
||||||
|
try block.statements.append(&line_comment.base);
|
||||||
|
any_comments = true;
|
||||||
|
}
|
||||||
|
if (any_comments) continue;
|
||||||
|
|
||||||
try stack.append(State { .Statement = block });
|
try stack.append(State { .Statement = block });
|
||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
|
@ -1258,7 +1274,7 @@ pub const Parser = struct {
|
||||||
const node = try arena.construct(ast.Node.Defer {
|
const node = try arena.construct(ast.Node.Defer {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.Defer,
|
.id = ast.Node.Id.Defer,
|
||||||
.before_comments = comments,
|
.doc_comments = comments,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.defer_token = token,
|
.defer_token = token,
|
||||||
|
@ -1342,7 +1358,7 @@ pub const Parser = struct {
|
||||||
|
|
||||||
State.AddComments => |add_comments_ctx| {
|
State.AddComments => |add_comments_ctx| {
|
||||||
const node = *add_comments_ctx.node_ptr;
|
const node = *add_comments_ctx.node_ptr;
|
||||||
node.before_comments = add_comments_ctx.comments;
|
node.doc_comments = add_comments_ctx.comments;
|
||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -1466,7 +1482,7 @@ pub const Parser = struct {
|
||||||
const node = try arena.construct(ast.Node.FieldInitializer {
|
const node = try arena.construct(ast.Node.FieldInitializer {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.FieldInitializer,
|
.id = ast.Node.Id.FieldInitializer,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.period_token = undefined,
|
.period_token = undefined,
|
||||||
|
@ -1512,6 +1528,10 @@ pub const Parser = struct {
|
||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
State.IdentifierListItemOrEnd => |list_state| {
|
State.IdentifierListItemOrEnd => |list_state| {
|
||||||
|
while (try self.eatLineComment(arena)) |line_comment| {
|
||||||
|
try list_state.list.append(&line_comment.base);
|
||||||
|
}
|
||||||
|
|
||||||
if (self.eatToken(Token.Id.RBrace)) |rbrace| {
|
if (self.eatToken(Token.Id.RBrace)) |rbrace| {
|
||||||
*list_state.ptr = rbrace;
|
*list_state.ptr = rbrace;
|
||||||
continue;
|
continue;
|
||||||
|
@ -1538,6 +1558,10 @@ pub const Parser = struct {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
State.SwitchCaseOrEnd => |list_state| {
|
State.SwitchCaseOrEnd => |list_state| {
|
||||||
|
while (try self.eatLineComment(arena)) |line_comment| {
|
||||||
|
try list_state.list.append(&line_comment.base);
|
||||||
|
}
|
||||||
|
|
||||||
if (self.eatToken(Token.Id.RBrace)) |rbrace| {
|
if (self.eatToken(Token.Id.RBrace)) |rbrace| {
|
||||||
*list_state.ptr = rbrace;
|
*list_state.ptr = rbrace;
|
||||||
continue;
|
continue;
|
||||||
|
@ -1547,14 +1571,14 @@ pub const Parser = struct {
|
||||||
const node = try arena.construct(ast.Node.SwitchCase {
|
const node = try arena.construct(ast.Node.SwitchCase {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.SwitchCase,
|
.id = ast.Node.Id.SwitchCase,
|
||||||
.before_comments = comments,
|
.doc_comments = comments,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.items = ArrayList(&ast.Node).init(arena),
|
.items = ArrayList(&ast.Node).init(arena),
|
||||||
.payload = null,
|
.payload = null,
|
||||||
.expr = undefined,
|
.expr = undefined,
|
||||||
});
|
});
|
||||||
try list_state.list.append(node);
|
try list_state.list.append(&node.base);
|
||||||
try stack.append(State { .SwitchCaseCommaOrEnd = list_state });
|
try stack.append(State { .SwitchCaseCommaOrEnd = list_state });
|
||||||
try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .Required = &node.expr } });
|
try stack.append(State { .AssignmentExpressionBegin = OptionalCtx { .Required = &node.expr } });
|
||||||
try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
|
try stack.append(State { .PointerPayload = OptionalCtx { .Optional = &node.payload } });
|
||||||
|
@ -1569,8 +1593,8 @@ pub const Parser = struct {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const switch_case = list_state.list.toSlice()[list_state.list.len - 1];
|
const node = list_state.list.toSlice()[list_state.list.len - 1];
|
||||||
try self.lookForSameLineComment(arena, &switch_case.base);
|
try self.lookForSameLineComment(arena, node);
|
||||||
try stack.append(State { .SwitchCaseOrEnd = list_state });
|
try stack.append(State { .SwitchCaseOrEnd = list_state });
|
||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
|
@ -1660,7 +1684,7 @@ pub const Parser = struct {
|
||||||
const fn_proto = try arena.construct(ast.Node.FnProto {
|
const fn_proto = try arena.construct(ast.Node.FnProto {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.FnProto,
|
.id = ast.Node.Id.FnProto,
|
||||||
.before_comments = ctx.comments,
|
.doc_comments = ctx.comments,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.visib_token = null,
|
.visib_token = null,
|
||||||
|
@ -2632,7 +2656,7 @@ pub const Parser = struct {
|
||||||
const fn_proto = try arena.construct(ast.Node.FnProto {
|
const fn_proto = try arena.construct(ast.Node.FnProto {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.FnProto,
|
.id = ast.Node.Id.FnProto,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.visib_token = null,
|
.visib_token = null,
|
||||||
|
@ -2656,7 +2680,7 @@ pub const Parser = struct {
|
||||||
const fn_proto = try arena.construct(ast.Node.FnProto {
|
const fn_proto = try arena.construct(ast.Node.FnProto {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.FnProto,
|
.id = ast.Node.Id.FnProto,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.visib_token = null,
|
.visib_token = null,
|
||||||
|
@ -2749,7 +2773,7 @@ pub const Parser = struct {
|
||||||
const node = try arena.construct(ast.Node.ErrorSetDecl {
|
const node = try arena.construct(ast.Node.ErrorSetDecl {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.ErrorSetDecl,
|
.id = ast.Node.Id.ErrorSetDecl,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.error_token = ctx.error_token,
|
.error_token = ctx.error_token,
|
||||||
|
@ -2829,18 +2853,18 @@ pub const Parser = struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eatComments(self: &Parser, arena: &mem.Allocator) !?&ast.Node.LineComment {
|
fn eatComments(self: &Parser, arena: &mem.Allocator) !?&ast.Node.DocComment {
|
||||||
var result: ?&ast.Node.LineComment = null;
|
var result: ?&ast.Node.DocComment = null;
|
||||||
while (true) {
|
while (true) {
|
||||||
if (self.eatToken(Token.Id.LineComment)) |line_comment| {
|
if (self.eatToken(Token.Id.DocComment)) |line_comment| {
|
||||||
const node = blk: {
|
const node = blk: {
|
||||||
if (result) |comment_node| {
|
if (result) |comment_node| {
|
||||||
break :blk comment_node;
|
break :blk comment_node;
|
||||||
} else {
|
} else {
|
||||||
const comment_node = try arena.construct(ast.Node.LineComment {
|
const comment_node = try arena.construct(ast.Node.DocComment {
|
||||||
.base = ast.Node {
|
.base = ast.Node {
|
||||||
.id = ast.Node.Id.LineComment,
|
.id = ast.Node.Id.DocComment,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
},
|
},
|
||||||
.lines = ArrayList(Token).init(arena),
|
.lines = ArrayList(Token).init(arena),
|
||||||
|
@ -2857,6 +2881,18 @@ pub const Parser = struct {
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn eatLineComment(self: &Parser, arena: &mem.Allocator) !?&ast.Node.LineComment {
|
||||||
|
const token = self.eatToken(Token.Id.LineComment) ?? return null;
|
||||||
|
return try arena.construct(ast.Node.LineComment {
|
||||||
|
.base = ast.Node {
|
||||||
|
.id = ast.Node.Id.LineComment,
|
||||||
|
.doc_comments = null,
|
||||||
|
.same_line_comment = null,
|
||||||
|
},
|
||||||
|
.token = token,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
fn requireSemiColon(node: &const ast.Node) bool {
|
fn requireSemiColon(node: &const ast.Node) bool {
|
||||||
var n = node;
|
var n = node;
|
||||||
while (true) {
|
while (true) {
|
||||||
|
@ -2874,6 +2910,7 @@ pub const Parser = struct {
|
||||||
ast.Node.Id.SwitchCase,
|
ast.Node.Id.SwitchCase,
|
||||||
ast.Node.Id.SwitchElse,
|
ast.Node.Id.SwitchElse,
|
||||||
ast.Node.Id.FieldInitializer,
|
ast.Node.Id.FieldInitializer,
|
||||||
|
ast.Node.Id.DocComment,
|
||||||
ast.Node.Id.LineComment,
|
ast.Node.Id.LineComment,
|
||||||
ast.Node.Id.TestDecl => return false,
|
ast.Node.Id.TestDecl => return false,
|
||||||
ast.Node.Id.While => {
|
ast.Node.Id.While => {
|
||||||
|
@ -2933,7 +2970,7 @@ pub const Parser = struct {
|
||||||
const node_last_token = node.lastToken();
|
const node_last_token = node.lastToken();
|
||||||
|
|
||||||
const line_comment_token = self.getNextToken();
|
const line_comment_token = self.getNextToken();
|
||||||
if (line_comment_token.id != Token.Id.LineComment) {
|
if (line_comment_token.id != Token.Id.DocComment and line_comment_token.id != Token.Id.LineComment) {
|
||||||
self.putBackToken(line_comment_token);
|
self.putBackToken(line_comment_token);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -3038,18 +3075,21 @@ pub const Parser = struct {
|
||||||
return true;
|
return true;
|
||||||
},
|
},
|
||||||
Token.Id.Keyword_switch => {
|
Token.Id.Keyword_switch => {
|
||||||
const node = try self.createToCtxNode(arena, ctx, ast.Node.Switch,
|
const node = try arena.construct(ast.Node.Switch {
|
||||||
ast.Node.Switch {
|
.base = ast.Node {
|
||||||
.base = undefined,
|
.id = ast.Node.Id.Switch,
|
||||||
|
.doc_comments = null,
|
||||||
|
.same_line_comment = null,
|
||||||
|
},
|
||||||
.switch_token = *token,
|
.switch_token = *token,
|
||||||
.expr = undefined,
|
.expr = undefined,
|
||||||
.cases = ArrayList(&ast.Node.SwitchCase).init(arena),
|
.cases = ArrayList(&ast.Node).init(arena),
|
||||||
.rbrace = undefined,
|
.rbrace = undefined,
|
||||||
}
|
});
|
||||||
);
|
ctx.store(&node.base);
|
||||||
|
|
||||||
stack.append(State {
|
stack.append(State {
|
||||||
.SwitchCaseOrEnd = ListSave(&ast.Node.SwitchCase) {
|
.SwitchCaseOrEnd = ListSave(&ast.Node) {
|
||||||
.list = &node.cases,
|
.list = &node.cases,
|
||||||
.ptr = &node.rbrace,
|
.ptr = &node.rbrace,
|
||||||
},
|
},
|
||||||
|
@ -3208,7 +3248,7 @@ pub const Parser = struct {
|
||||||
const id = ast.Node.typeToId(T);
|
const id = ast.Node.typeToId(T);
|
||||||
break :blk ast.Node {
|
break :blk ast.Node {
|
||||||
.id = id,
|
.id = id,
|
||||||
.before_comments = null,
|
.doc_comments = null,
|
||||||
.same_line_comment = null,
|
.same_line_comment = null,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -3454,6 +3494,10 @@ pub const Parser = struct {
|
||||||
}
|
}
|
||||||
try stack.append(RenderState { .Expression = decl });
|
try stack.append(RenderState { .Expression = decl });
|
||||||
},
|
},
|
||||||
|
ast.Node.Id.LineComment => {
|
||||||
|
const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", decl);
|
||||||
|
try stream.write(self.tokenizer.getTokenSlice(line_comment_node.token));
|
||||||
|
},
|
||||||
else => unreachable,
|
else => unreachable,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -3987,7 +4031,9 @@ pub const Parser = struct {
|
||||||
while (i != 0) {
|
while (i != 0) {
|
||||||
i -= 1;
|
i -= 1;
|
||||||
const node = decls[i];
|
const node = decls[i];
|
||||||
|
if (node.id != ast.Node.Id.LineComment) {
|
||||||
try stack.append(RenderState { .Text = "," });
|
try stack.append(RenderState { .Text = "," });
|
||||||
|
}
|
||||||
try stack.append(RenderState { .Expression = node });
|
try stack.append(RenderState { .Expression = node });
|
||||||
try stack.append(RenderState { .PrintComments = node });
|
try stack.append(RenderState { .PrintComments = node });
|
||||||
try stack.append(RenderState.PrintIndent);
|
try stack.append(RenderState.PrintIndent);
|
||||||
|
@ -4100,7 +4146,11 @@ pub const Parser = struct {
|
||||||
try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(visib_token) });
|
try stack.append(RenderState { .Text = self.tokenizer.getTokenSlice(visib_token) });
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ast.Node.Id.LineComment => @panic("TODO render line comment in an expression"),
|
ast.Node.Id.LineComment => {
|
||||||
|
const line_comment_node = @fieldParentPtr(ast.Node.LineComment, "base", base);
|
||||||
|
try stream.write(self.tokenizer.getTokenSlice(line_comment_node.token));
|
||||||
|
},
|
||||||
|
ast.Node.Id.DocComment => unreachable, // doc comments are attached to nodes
|
||||||
ast.Node.Id.Switch => {
|
ast.Node.Id.Switch => {
|
||||||
const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
|
const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
|
||||||
try stream.print("{} (", self.tokenizer.getTokenSlice(switch_node.switch_token));
|
try stream.print("{} (", self.tokenizer.getTokenSlice(switch_node.switch_token));
|
||||||
|
@ -4115,7 +4165,7 @@ pub const Parser = struct {
|
||||||
while (i != 0) {
|
while (i != 0) {
|
||||||
i -= 1;
|
i -= 1;
|
||||||
const node = cases[i];
|
const node = cases[i];
|
||||||
try stack.append(RenderState { .Expression = &node.base});
|
try stack.append(RenderState { .Expression = node});
|
||||||
try stack.append(RenderState.PrintIndent);
|
try stack.append(RenderState.PrintIndent);
|
||||||
try stack.append(RenderState {
|
try stack.append(RenderState {
|
||||||
.Text = blk: {
|
.Text = blk: {
|
||||||
|
@ -4487,7 +4537,7 @@ pub const Parser = struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn renderComments(self: &Parser, stream: var, node: &ast.Node, indent: usize) !void {
|
fn renderComments(self: &Parser, stream: var, node: &ast.Node, indent: usize) !void {
|
||||||
const comment = node.before_comments ?? return;
|
const comment = node.doc_comments ?? return;
|
||||||
for (comment.lines.toSliceConst()) |line_token| {
|
for (comment.lines.toSliceConst()) |line_token| {
|
||||||
try stream.print("{}\n", self.tokenizer.getTokenSlice(line_token));
|
try stream.print("{}\n", self.tokenizer.getTokenSlice(line_token));
|
||||||
try stream.writeByteNTimes(' ', indent);
|
try stream.writeByteNTimes(' ', indent);
|
||||||
|
|
|
@ -3,9 +3,12 @@ test "zig fmt: comments before error set decl" {
|
||||||
\\const UnexpectedError = error {
|
\\const UnexpectedError = error {
|
||||||
\\ /// The Operating System returned an undocumented error code.
|
\\ /// The Operating System returned an undocumented error code.
|
||||||
\\ Unexpected,
|
\\ Unexpected,
|
||||||
\\
|
|
||||||
\\ // another
|
\\ // another
|
||||||
\\ Another,
|
\\ Another,
|
||||||
|
\\
|
||||||
|
\\ // in between
|
||||||
|
\\
|
||||||
|
\\ // at end
|
||||||
\\};
|
\\};
|
||||||
\\
|
\\
|
||||||
);
|
);
|
||||||
|
@ -18,8 +21,10 @@ test "zig fmt: comments before switch prong" {
|
||||||
\\ error.PathAlreadyExists => continue,
|
\\ error.PathAlreadyExists => continue,
|
||||||
\\
|
\\
|
||||||
\\ // comment 1
|
\\ // comment 1
|
||||||
|
\\
|
||||||
\\ // comment 2
|
\\ // comment 2
|
||||||
\\ else => return err,
|
\\ else => return err,
|
||||||
|
\\ // at end
|
||||||
\\ }
|
\\ }
|
||||||
\\}
|
\\}
|
||||||
\\
|
\\
|
||||||
|
@ -47,6 +52,17 @@ test "zig fmt: comments before var decl in struct" {
|
||||||
\\ permitted: u32,
|
\\ permitted: u32,
|
||||||
\\ inheritable: u32,
|
\\ inheritable: u32,
|
||||||
\\ };
|
\\ };
|
||||||
|
\\
|
||||||
|
\\ // in between
|
||||||
|
\\
|
||||||
|
\\ /// All of these are mandated as little endian
|
||||||
|
\\ /// when on disk.
|
||||||
|
\\ const Data = struct {
|
||||||
|
\\ permitted: u32,
|
||||||
|
\\ inheritable: u32,
|
||||||
|
\\ };
|
||||||
|
\\
|
||||||
|
\\ // at end
|
||||||
\\};
|
\\};
|
||||||
\\
|
\\
|
||||||
);
|
);
|
||||||
|
@ -106,6 +122,10 @@ test "zig fmt: comments before statements" {
|
||||||
\\test "std" {
|
\\test "std" {
|
||||||
\\ // statement comment
|
\\ // statement comment
|
||||||
\\ _ = @import("foo/bar.zig");
|
\\ _ = @import("foo/bar.zig");
|
||||||
|
\\
|
||||||
|
\\ // middle
|
||||||
|
\\
|
||||||
|
\\ // end
|
||||||
\\}
|
\\}
|
||||||
\\
|
\\
|
||||||
);
|
);
|
||||||
|
@ -113,17 +133,27 @@ test "zig fmt: comments before statements" {
|
||||||
|
|
||||||
test "zig fmt: comments before test decl" {
|
test "zig fmt: comments before test decl" {
|
||||||
try testCanonical(
|
try testCanonical(
|
||||||
\\// top level comment
|
\\/// top level doc comment
|
||||||
\\test "hi" {}
|
\\test "hi" {}
|
||||||
\\
|
\\
|
||||||
|
\\// top level normal comment
|
||||||
|
\\test "hi" {}
|
||||||
|
\\
|
||||||
|
\\// middle
|
||||||
|
\\
|
||||||
|
\\// end
|
||||||
|
\\
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
test "zig fmt: get stdout or fail" {
|
test "zig fmt: comments before variable declarations" {
|
||||||
try testCanonical(
|
try testCanonical(
|
||||||
\\const std = @import("std");
|
\\const std = @import("std");
|
||||||
\\
|
\\
|
||||||
\\pub fn main() !void {
|
\\pub fn main() !void {
|
||||||
|
\\ /// If this program is run without stdout attached, exit with an error.
|
||||||
|
\\ /// another comment
|
||||||
|
\\ var stdout_file = try std.io.getStdOut;
|
||||||
\\ // If this program is run without stdout attached, exit with an error.
|
\\ // If this program is run without stdout attached, exit with an error.
|
||||||
\\ // another comment
|
\\ // another comment
|
||||||
\\ var stdout_file = try std.io.getStdOut;
|
\\ var stdout_file = try std.io.getStdOut;
|
||||||
|
|
|
@ -137,6 +137,7 @@ pub const Token = struct {
|
||||||
IntegerLiteral,
|
IntegerLiteral,
|
||||||
FloatLiteral,
|
FloatLiteral,
|
||||||
LineComment,
|
LineComment,
|
||||||
|
DocComment,
|
||||||
Keyword_align,
|
Keyword_align,
|
||||||
Keyword_and,
|
Keyword_and,
|
||||||
Keyword_asm,
|
Keyword_asm,
|
||||||
|
@ -257,6 +258,7 @@ pub const Tokenizer = struct {
|
||||||
Asterisk,
|
Asterisk,
|
||||||
AsteriskPercent,
|
AsteriskPercent,
|
||||||
Slash,
|
Slash,
|
||||||
|
LineCommentStart,
|
||||||
LineComment,
|
LineComment,
|
||||||
Zero,
|
Zero,
|
||||||
IntegerLiteral,
|
IntegerLiteral,
|
||||||
|
@ -822,8 +824,7 @@ pub const Tokenizer = struct {
|
||||||
|
|
||||||
State.Slash => switch (c) {
|
State.Slash => switch (c) {
|
||||||
'/' => {
|
'/' => {
|
||||||
result.id = Token.Id.LineComment;
|
state = State.LineCommentStart;
|
||||||
state = State.LineComment;
|
|
||||||
},
|
},
|
||||||
'=' => {
|
'=' => {
|
||||||
result.id = Token.Id.SlashEqual;
|
result.id = Token.Id.SlashEqual;
|
||||||
|
@ -835,6 +836,17 @@ pub const Tokenizer = struct {
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
State.LineCommentStart => switch (c) {
|
||||||
|
'/' => {
|
||||||
|
result.id = Token.Id.DocComment;
|
||||||
|
state = State.LineComment;
|
||||||
|
},
|
||||||
|
'\n' => {
|
||||||
|
result.id = Token.Id.LineComment;
|
||||||
|
break;
|
||||||
|
},
|
||||||
|
else => self.checkLiteralCharacter(),
|
||||||
|
},
|
||||||
State.LineComment => switch (c) {
|
State.LineComment => switch (c) {
|
||||||
'\n' => break,
|
'\n' => break,
|
||||||
else => self.checkLiteralCharacter(),
|
else => self.checkLiteralCharacter(),
|
||||||
|
@ -920,6 +932,7 @@ pub const Tokenizer = struct {
|
||||||
result.id = id;
|
result.id = id;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
State.LineCommentStart,
|
||||||
State.LineComment => {
|
State.LineComment => {
|
||||||
result.id = Token.Id.Eof;
|
result.id = Token.Id.Eof;
|
||||||
},
|
},
|
||||||
|
|
Loading…
Reference in New Issue