stage2 parser: don't append doc comments to the list

The DocComment AST node now only points to the first doc comment token.
API users are expected to iterate over the following tokens directly.

After this commit there are no more linked lists in use in the
self-hosted AST API.

Performance impact is negligible. Memory usage slightly reduced.
This commit is contained in:
Andrew Kelley 2020-05-22 00:28:07 -04:00
parent 1dac9e71b5
commit 295bca9b5f
3 changed files with 60 additions and 54 deletions

View File

@ -1,7 +1,6 @@
const std = @import("../std.zig"); const std = @import("../std.zig");
const assert = std.debug.assert; const assert = std.debug.assert;
const testing = std.testing; const testing = std.testing;
const LinkedList = std.SinglyLinkedList;
const mem = std.mem; const mem = std.mem;
const Token = std.zig.Token; const Token = std.zig.Token;
@ -3013,9 +3012,10 @@ pub const Node = struct {
pub const DocComment = struct { pub const DocComment = struct {
base: Node = Node{ .id = .DocComment }, base: Node = Node{ .id = .DocComment },
lines: LineList, /// Points to the first doc comment token. API users are expected to iterate over the
/// tokens array, looking for more doc comments, ignoring line comments, and stopping
pub const LineList = LinkedList(TokenIndex); /// at the first other token.
first_line: TokenIndex,
pub fn iterate(self: *const DocComment) Node.Iterator { pub fn iterate(self: *const DocComment) Node.Iterator {
return .{ .parent_node = &self.base, .index = 0 }; return .{ .parent_node = &self.base, .index = 0 };
@ -3026,14 +3026,13 @@ pub const Node = struct {
} }
pub fn firstToken(self: *const DocComment) TokenIndex { pub fn firstToken(self: *const DocComment) TokenIndex {
return self.lines.first.?.data; return self.first_line;
} }
/// Returns the first doc comment line. Be careful, this may not be the desired behavior,
/// which would require the tokens array.
pub fn lastToken(self: *const DocComment) TokenIndex { pub fn lastToken(self: *const DocComment) TokenIndex {
var node = self.lines.first.?; return self.first_line;
while (true) {
node = node.next orelse return node.data;
}
} }
}; };

View File

@ -58,6 +58,8 @@ const Parser = struct {
arena: std.heap.ArenaAllocator, arena: std.heap.ArenaAllocator,
gpa: *Allocator, gpa: *Allocator,
source: []const u8, source: []const u8,
/// TODO: Optimization idea: have this be several arrays of the token fields rather
/// than an array of structs.
tokens: []const Token, tokens: []const Token,
tok_i: TokenIndex, tok_i: TokenIndex,
errors: std.ArrayListUnmanaged(AstError), errors: std.ArrayListUnmanaged(AstError),
@ -367,20 +369,13 @@ const Parser = struct {
/// Eat a multiline container doc comment /// Eat a multiline container doc comment
fn parseContainerDocComments(p: *Parser) !?*Node { fn parseContainerDocComments(p: *Parser) !?*Node {
var lines = Node.DocComment.LineList{}; if (p.eatToken(.ContainerDocComment)) |first_line| {
var lines_it: *?*Node.DocComment.LineList.Node = &lines.first; while (p.eatToken(.ContainerDocComment)) |_| {}
const node = try p.arena.allocator.create(Node.DocComment);
while (p.eatToken(.ContainerDocComment)) |line| { node.* = .{ .first_line = first_line };
lines_it = try p.llpush(TokenIndex, lines_it, line); return &node.base;
} }
return null;
if (lines.first == null) return null;
const node = try p.arena.allocator.create(Node.DocComment);
node.* = .{
.lines = lines,
};
return &node.base;
} }
/// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block
@ -3210,20 +3205,13 @@ const Parser = struct {
/// Eat a multiline doc comment /// Eat a multiline doc comment
fn parseDocComment(p: *Parser) !?*Node.DocComment { fn parseDocComment(p: *Parser) !?*Node.DocComment {
var lines = Node.DocComment.LineList{}; if (p.eatToken(.DocComment)) |first_line| {
var lines_it = &lines.first; while (p.eatToken(.DocComment)) |_| {}
const node = try p.arena.allocator.create(Node.DocComment);
while (p.eatToken(.DocComment)) |line| { node.* = .{ .first_line = first_line };
lines_it = try p.llpush(TokenIndex, lines_it, line); return node;
} }
return null;
if (lines.first == null) return null;
const node = try p.arena.allocator.create(Node.DocComment);
node.* = .{
.lines = lines,
};
return node;
} }
fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool { fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool {
@ -3234,11 +3222,8 @@ const Parser = struct {
fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment { fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment {
const comment_token = p.eatToken(.DocComment) orelse return null; const comment_token = p.eatToken(.DocComment) orelse return null;
if (p.tokensOnSameLine(after_token, comment_token)) { if (p.tokensOnSameLine(after_token, comment_token)) {
var lines = Node.DocComment.LineList{};
_ = try p.llpush(TokenIndex, &lines.first, comment_token);
const node = try p.arena.allocator.create(Node.DocComment); const node = try p.arena.allocator.create(Node.DocComment);
node.* = .{ .lines = lines }; node.* = .{ .first_line = comment_token };
return node; return node;
} }
p.putBackToken(comment_token); p.putBackToken(comment_token);

View File

@ -327,11 +327,18 @@ fn renderContainerDecl(allocator: *mem.Allocator, stream: var, tree: *ast.Tree,
.DocComment => { .DocComment => {
const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl); const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl);
var it = comment.lines.first; const kind = tree.tokens[comment.first_line].id;
while (it) |node| : (it = node.next) { try renderToken(tree, stream, comment.first_line, indent, start_col, .Newline);
try renderToken(tree, stream, node.data, indent, start_col, .Newline); var tok_i = comment.first_line + 1;
if (node.next != null) { while (true) : (tok_i += 1) {
const tok_id = tree.tokens[tok_i].id;
if (tok_id == kind) {
try stream.writeByteNTimes(' ', indent); try stream.writeByteNTimes(' ', indent);
try renderToken(tree, stream, tok_i, indent, start_col, .Newline);
} else if (tok_id == .LineComment) {
continue;
} else {
break;
} }
} }
}, },
@ -2428,17 +2435,32 @@ fn renderDocComments(
start_col: *usize, start_col: *usize,
) (@TypeOf(stream).Error || Error)!void { ) (@TypeOf(stream).Error || Error)!void {
const comment = node.doc_comments orelse return; const comment = node.doc_comments orelse return;
var it = comment.lines.first; return renderDocCommentsToken(tree, stream, comment, node.firstToken(), indent, start_col);
const first_token = node.firstToken(); }
while (it) |line_token_index_node| : (it = line_token_index_node.next) {
const line_token_index = line_token_index_node.data; fn renderDocCommentsToken(
if (line_token_index < first_token) { tree: *ast.Tree,
try renderToken(tree, stream, line_token_index, indent, start_col, Space.Newline); stream: var,
try stream.writeByteNTimes(' ', indent); comment: *ast.Node.DocComment,
} else { first_token: ast.TokenIndex,
try renderToken(tree, stream, line_token_index, indent, start_col, Space.NoComment); indent: usize,
try stream.writeAll("\n"); start_col: *usize,
try stream.writeByteNTimes(' ', indent); ) (@TypeOf(stream).Error || Error)!void {
var tok_i = comment.first_line;
while (true) : (tok_i += 1) {
switch (tree.tokens[tok_i].id) {
.DocComment, .ContainerDocComment => {
if (comment.first_line < first_token) {
try renderToken(tree, stream, tok_i, indent, start_col, Space.Newline);
try stream.writeByteNTimes(' ', indent);
} else {
try renderToken(tree, stream, tok_i, indent, start_col, Space.NoComment);
try stream.writeAll("\n");
try stream.writeByteNTimes(' ', indent);
}
},
.LineComment => continue,
else => break,
} }
} }
} }