stage2 parsing: rework block statements AST memory layout
block statements are now directly following the Block AST node rather than a singly linked list. This had negligible impact on performance: throughput: 72.3 MiB/s => 72.7 MiB/s however it greatly improves the API since the statements are laid out in a flat array in memory.master
parent
3c5d581ce3
commit
d57d9448aa
|
@ -804,6 +804,7 @@ pub const Node = struct {
|
|||
}
|
||||
};
|
||||
|
||||
/// The fields and decls Node pointers directly follow this struct in memory.
|
||||
pub const ContainerDecl = struct {
|
||||
base: Node = Node{ .id = .ContainerDecl },
|
||||
kind_token: TokenIndex,
|
||||
|
@ -1188,23 +1189,37 @@ pub const Node = struct {
|
|||
}
|
||||
};
|
||||
|
||||
/// The statements of the block follow Block directly in memory.
|
||||
pub const Block = struct {
|
||||
base: Node = Node{ .id = .Block },
|
||||
label: ?TokenIndex,
|
||||
statements_len: NodeIndex,
|
||||
lbrace: TokenIndex,
|
||||
statements: StatementList,
|
||||
rbrace: TokenIndex,
|
||||
label: ?TokenIndex,
|
||||
|
||||
pub const StatementList = LinkedList(*Node);
|
||||
/// After this the caller must initialize the statements list.
|
||||
pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*Block {
|
||||
const bytes = try allocator.alignedAlloc(u8, @alignOf(Block), sizeInBytes(statements_len));
|
||||
return @ptrCast(*Block, bytes.ptr);
|
||||
}
|
||||
|
||||
pub fn free(self: *Block, allocator: *mem.Allocator) void {
|
||||
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)];
|
||||
allocator.free(bytes);
|
||||
}
|
||||
|
||||
pub fn iterate(self: *const Block) Node.Iterator {
|
||||
return .{ .parent_node = &self.base, .index = 0, .node = self.statements.first };
|
||||
return .{ .parent_node = &self.base, .index = 0, .node = null };
|
||||
}
|
||||
|
||||
pub fn iterateNext(self: *const Block, it: *Node.Iterator) ?*Node {
|
||||
const child = it.node orelse return null;
|
||||
it.node = child.next;
|
||||
return child.data;
|
||||
var i = it.index;
|
||||
it.index += 1;
|
||||
|
||||
if (i < self.statements_len) return self.statementsConst()[i];
|
||||
i -= self.statements_len;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
pub fn firstToken(self: *const Block) TokenIndex {
|
||||
|
@ -1218,6 +1233,20 @@ pub const Node = struct {
|
|||
pub fn lastToken(self: *const Block) TokenIndex {
|
||||
return self.rbrace;
|
||||
}
|
||||
|
||||
pub fn statements(self: *Block) []*Node {
|
||||
const decls_start = @ptrCast([*]u8, self) + @sizeOf(Block);
|
||||
return @ptrCast([*]*Node, decls_start)[0..self.statements_len];
|
||||
}
|
||||
|
||||
pub fn statementsConst(self: *const Block) []const *Node {
|
||||
const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Block);
|
||||
return @ptrCast([*]const *Node, decls_start)[0..self.statements_len];
|
||||
}
|
||||
|
||||
fn sizeInBytes(statements_len: NodeIndex) usize {
|
||||
return @sizeOf(Block) + @sizeOf(*Node) * @as(usize, statements_len);
|
||||
}
|
||||
};
|
||||
|
||||
pub const Defer = struct {
|
||||
|
|
|
@ -1178,8 +1178,9 @@ const Parser = struct {
|
|||
fn parseBlock(p: *Parser) !?*Node {
|
||||
const lbrace = p.eatToken(.LBrace) orelse return null;
|
||||
|
||||
var statements = Node.Block.StatementList{};
|
||||
var statements_it = &statements.first;
|
||||
var statements = std.ArrayList(*Node).init(p.gpa);
|
||||
defer statements.deinit();
|
||||
|
||||
while (true) {
|
||||
const statement = (p.parseStatement() catch |err| switch (err) {
|
||||
error.OutOfMemory => return error.OutOfMemory,
|
||||
|
@ -1189,18 +1190,19 @@ const Parser = struct {
|
|||
continue;
|
||||
},
|
||||
}) orelse break;
|
||||
statements_it = try p.llpush(*Node, statements_it, statement);
|
||||
try statements.append(statement);
|
||||
}
|
||||
|
||||
const rbrace = try p.expectToken(.RBrace);
|
||||
|
||||
const block_node = try p.arena.allocator.create(Node.Block);
|
||||
const block_node = try Node.Block.alloc(&p.arena.allocator, statements.items.len);
|
||||
block_node.* = .{
|
||||
.label = null,
|
||||
.lbrace = lbrace,
|
||||
.statements = statements,
|
||||
.statements_len = statements.items.len,
|
||||
.rbrace = rbrace,
|
||||
};
|
||||
std.mem.copy(*Node, block_node.statements(), statements.items);
|
||||
|
||||
return &block_node.base;
|
||||
}
|
||||
|
|
|
@ -358,21 +358,20 @@ fn renderExpression(
|
|||
try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space);
|
||||
}
|
||||
|
||||
if (block.statements.first == null) {
|
||||
if (block.statements_len == 0) {
|
||||
try renderToken(tree, stream, block.lbrace, indent + indent_delta, start_col, Space.None);
|
||||
return renderToken(tree, stream, block.rbrace, indent, start_col, space);
|
||||
} else {
|
||||
const block_indent = indent + indent_delta;
|
||||
try renderToken(tree, stream, block.lbrace, block_indent, start_col, Space.Newline);
|
||||
|
||||
var it = block.statements.first;
|
||||
while (it) |statement_node| : (it = statement_node.next) {
|
||||
const statement = statement_node.data;
|
||||
const block_statements = block.statements();
|
||||
for (block_statements) |statement, i| {
|
||||
try stream.writeByteNTimes(' ', block_indent);
|
||||
try renderStatement(allocator, stream, tree, block_indent, start_col, statement);
|
||||
|
||||
if (statement_node.next) |next_statement| {
|
||||
try renderExtraNewline(tree, stream, start_col, next_statement.data);
|
||||
if (i + 1 < block_statements.len) {
|
||||
try renderExtraNewline(tree, stream, start_col, block_statements[i + 1]);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue