stage2 parser: don't append doc comments to the list
The DocComment AST node now only points to the first doc comment token. API users are expected to iterate over the following tokens directly. After this commit there are no more linked lists in use in the self-hosted AST API. Performance impact is negligible. Memory usage slightly reduced.master
parent
1dac9e71b5
commit
295bca9b5f
|
@ -1,7 +1,6 @@
|
|||
const std = @import("../std.zig");
|
||||
const assert = std.debug.assert;
|
||||
const testing = std.testing;
|
||||
const LinkedList = std.SinglyLinkedList;
|
||||
const mem = std.mem;
|
||||
const Token = std.zig.Token;
|
||||
|
||||
|
@ -3013,9 +3012,10 @@ pub const Node = struct {
|
|||
|
||||
pub const DocComment = struct {
|
||||
base: Node = Node{ .id = .DocComment },
|
||||
lines: LineList,
|
||||
|
||||
pub const LineList = LinkedList(TokenIndex);
|
||||
/// Points to the first doc comment token. API users are expected to iterate over the
|
||||
/// tokens array, looking for more doc comments, ignoring line comments, and stopping
|
||||
/// at the first other token.
|
||||
first_line: TokenIndex,
|
||||
|
||||
pub fn iterate(self: *const DocComment) Node.Iterator {
|
||||
return .{ .parent_node = &self.base, .index = 0 };
|
||||
|
@ -3026,14 +3026,13 @@ pub const Node = struct {
|
|||
}
|
||||
|
||||
pub fn firstToken(self: *const DocComment) TokenIndex {
|
||||
return self.lines.first.?.data;
|
||||
return self.first_line;
|
||||
}
|
||||
|
||||
/// Returns the first doc comment line. Be careful, this may not be the desired behavior,
|
||||
/// which would require the tokens array.
|
||||
pub fn lastToken(self: *const DocComment) TokenIndex {
|
||||
var node = self.lines.first.?;
|
||||
while (true) {
|
||||
node = node.next orelse return node.data;
|
||||
}
|
||||
return self.first_line;
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -58,6 +58,8 @@ const Parser = struct {
|
|||
arena: std.heap.ArenaAllocator,
|
||||
gpa: *Allocator,
|
||||
source: []const u8,
|
||||
/// TODO: Optimization idea: have this be several arrays of the token fields rather
|
||||
/// than an array of structs.
|
||||
tokens: []const Token,
|
||||
tok_i: TokenIndex,
|
||||
errors: std.ArrayListUnmanaged(AstError),
|
||||
|
@ -367,21 +369,14 @@ const Parser = struct {
|
|||
|
||||
/// Eat a multiline container doc comment
|
||||
fn parseContainerDocComments(p: *Parser) !?*Node {
|
||||
var lines = Node.DocComment.LineList{};
|
||||
var lines_it: *?*Node.DocComment.LineList.Node = &lines.first;
|
||||
|
||||
while (p.eatToken(.ContainerDocComment)) |line| {
|
||||
lines_it = try p.llpush(TokenIndex, lines_it, line);
|
||||
}
|
||||
|
||||
if (lines.first == null) return null;
|
||||
|
||||
if (p.eatToken(.ContainerDocComment)) |first_line| {
|
||||
while (p.eatToken(.ContainerDocComment)) |_| {}
|
||||
const node = try p.arena.allocator.create(Node.DocComment);
|
||||
node.* = .{
|
||||
.lines = lines,
|
||||
};
|
||||
node.* = .{ .first_line = first_line };
|
||||
return &node.base;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block
|
||||
fn parseTestDecl(p: *Parser) !?*Node {
|
||||
|
@ -3210,21 +3205,14 @@ const Parser = struct {
|
|||
|
||||
/// Eat a multiline doc comment
|
||||
fn parseDocComment(p: *Parser) !?*Node.DocComment {
|
||||
var lines = Node.DocComment.LineList{};
|
||||
var lines_it = &lines.first;
|
||||
|
||||
while (p.eatToken(.DocComment)) |line| {
|
||||
lines_it = try p.llpush(TokenIndex, lines_it, line);
|
||||
}
|
||||
|
||||
if (lines.first == null) return null;
|
||||
|
||||
if (p.eatToken(.DocComment)) |first_line| {
|
||||
while (p.eatToken(.DocComment)) |_| {}
|
||||
const node = try p.arena.allocator.create(Node.DocComment);
|
||||
node.* = .{
|
||||
.lines = lines,
|
||||
};
|
||||
node.* = .{ .first_line = first_line };
|
||||
return node;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool {
|
||||
return std.mem.indexOfScalar(u8, p.source[p.tokens[token1].end..p.tokens[token2].start], '\n') == null;
|
||||
|
@ -3234,11 +3222,8 @@ const Parser = struct {
|
|||
fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment {
|
||||
const comment_token = p.eatToken(.DocComment) orelse return null;
|
||||
if (p.tokensOnSameLine(after_token, comment_token)) {
|
||||
var lines = Node.DocComment.LineList{};
|
||||
_ = try p.llpush(TokenIndex, &lines.first, comment_token);
|
||||
|
||||
const node = try p.arena.allocator.create(Node.DocComment);
|
||||
node.* = .{ .lines = lines };
|
||||
node.* = .{ .first_line = comment_token };
|
||||
return node;
|
||||
}
|
||||
p.putBackToken(comment_token);
|
||||
|
|
|
@ -327,11 +327,18 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: var, tree: *ast.Tree,
|
|||
|
||||
.DocComment => {
|
||||
const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl);
|
||||
var it = comment.lines.first;
|
||||
while (it) |node| : (it = node.next) {
|
||||
try renderToken(tree, stream, node.data, indent, start_col, .Newline);
|
||||
if (node.next != null) {
|
||||
const kind = tree.tokens[comment.first_line].id;
|
||||
try renderToken(tree, stream, comment.first_line, indent, start_col, .Newline);
|
||||
var tok_i = comment.first_line + 1;
|
||||
while (true) : (tok_i += 1) {
|
||||
const tok_id = tree.tokens[tok_i].id;
|
||||
if (tok_id == kind) {
|
||||
try stream.writeByteNTimes(' ', indent);
|
||||
try renderToken(tree, stream, tok_i, indent, start_col, .Newline);
|
||||
} else if (tok_id == .LineComment) {
|
||||
continue;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -2428,18 +2435,33 @@ fn renderDocComments(
|
|||
start_col: *usize,
|
||||
) (@TypeOf(stream).Error || Error)!void {
|
||||
const comment = node.doc_comments orelse return;
|
||||
var it = comment.lines.first;
|
||||
const first_token = node.firstToken();
|
||||
while (it) |line_token_index_node| : (it = line_token_index_node.next) {
|
||||
const line_token_index = line_token_index_node.data;
|
||||
if (line_token_index < first_token) {
|
||||
try renderToken(tree, stream, line_token_index, indent, start_col, Space.Newline);
|
||||
return renderDocCommentsToken(tree, stream, comment, node.firstToken(), indent, start_col);
|
||||
}
|
||||
|
||||
fn renderDocCommentsToken(
|
||||
tree: *ast.Tree,
|
||||
stream: var,
|
||||
comment: *ast.Node.DocComment,
|
||||
first_token: ast.TokenIndex,
|
||||
indent: usize,
|
||||
start_col: *usize,
|
||||
) (@TypeOf(stream).Error || Error)!void {
|
||||
var tok_i = comment.first_line;
|
||||
while (true) : (tok_i += 1) {
|
||||
switch (tree.tokens[tok_i].id) {
|
||||
.DocComment, .ContainerDocComment => {
|
||||
if (comment.first_line < first_token) {
|
||||
try renderToken(tree, stream, tok_i, indent, start_col, Space.Newline);
|
||||
try stream.writeByteNTimes(' ', indent);
|
||||
} else {
|
||||
try renderToken(tree, stream, line_token_index, indent, start_col, Space.NoComment);
|
||||
try renderToken(tree, stream, tok_i, indent, start_col, Space.NoComment);
|
||||
try stream.writeAll("\n");
|
||||
try stream.writeByteNTimes(' ', indent);
|
||||
}
|
||||
},
|
||||
.LineComment => continue,
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue