stage2 parsing: rework block statements AST memory layout
block statements are now directly following the Block AST node rather than a singly linked list. This had negligible impact on performance: throughput: 72.3 MiB/s => 72.7 MiB/s however it greatly improves the API since the statements are laid out in a flat array in memory.
This commit is contained in:
parent
3c5d581ce3
commit
d57d9448aa
@ -804,6 +804,7 @@ pub const Node = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// The fields and decls Node pointers directly follow this struct in memory.
|
||||||
pub const ContainerDecl = struct {
|
pub const ContainerDecl = struct {
|
||||||
base: Node = Node{ .id = .ContainerDecl },
|
base: Node = Node{ .id = .ContainerDecl },
|
||||||
kind_token: TokenIndex,
|
kind_token: TokenIndex,
|
||||||
@ -1188,23 +1189,37 @@ pub const Node = struct {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// The statements of the block follow Block directly in memory.
|
||||||
pub const Block = struct {
|
pub const Block = struct {
|
||||||
base: Node = Node{ .id = .Block },
|
base: Node = Node{ .id = .Block },
|
||||||
label: ?TokenIndex,
|
statements_len: NodeIndex,
|
||||||
lbrace: TokenIndex,
|
lbrace: TokenIndex,
|
||||||
statements: StatementList,
|
|
||||||
rbrace: TokenIndex,
|
rbrace: TokenIndex,
|
||||||
|
label: ?TokenIndex,
|
||||||
|
|
||||||
pub const StatementList = LinkedList(*Node);
|
/// After this the caller must initialize the statements list.
|
||||||
|
pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*Block {
|
||||||
|
const bytes = try allocator.alignedAlloc(u8, @alignOf(Block), sizeInBytes(statements_len));
|
||||||
|
return @ptrCast(*Block, bytes.ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free(self: *Block, allocator: *mem.Allocator) void {
|
||||||
|
const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)];
|
||||||
|
allocator.free(bytes);
|
||||||
|
}
|
||||||
|
|
||||||
pub fn iterate(self: *const Block) Node.Iterator {
|
pub fn iterate(self: *const Block) Node.Iterator {
|
||||||
return .{ .parent_node = &self.base, .index = 0, .node = self.statements.first };
|
return .{ .parent_node = &self.base, .index = 0, .node = null };
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn iterateNext(self: *const Block, it: *Node.Iterator) ?*Node {
|
pub fn iterateNext(self: *const Block, it: *Node.Iterator) ?*Node {
|
||||||
const child = it.node orelse return null;
|
var i = it.index;
|
||||||
it.node = child.next;
|
it.index += 1;
|
||||||
return child.data;
|
|
||||||
|
if (i < self.statements_len) return self.statementsConst()[i];
|
||||||
|
i -= self.statements_len;
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn firstToken(self: *const Block) TokenIndex {
|
pub fn firstToken(self: *const Block) TokenIndex {
|
||||||
@ -1218,6 +1233,20 @@ pub const Node = struct {
|
|||||||
pub fn lastToken(self: *const Block) TokenIndex {
|
pub fn lastToken(self: *const Block) TokenIndex {
|
||||||
return self.rbrace;
|
return self.rbrace;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn statements(self: *Block) []*Node {
|
||||||
|
const decls_start = @ptrCast([*]u8, self) + @sizeOf(Block);
|
||||||
|
return @ptrCast([*]*Node, decls_start)[0..self.statements_len];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn statementsConst(self: *const Block) []const *Node {
|
||||||
|
const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Block);
|
||||||
|
return @ptrCast([*]const *Node, decls_start)[0..self.statements_len];
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sizeInBytes(statements_len: NodeIndex) usize {
|
||||||
|
return @sizeOf(Block) + @sizeOf(*Node) * @as(usize, statements_len);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Defer = struct {
|
pub const Defer = struct {
|
||||||
|
@ -1178,8 +1178,9 @@ const Parser = struct {
|
|||||||
fn parseBlock(p: *Parser) !?*Node {
|
fn parseBlock(p: *Parser) !?*Node {
|
||||||
const lbrace = p.eatToken(.LBrace) orelse return null;
|
const lbrace = p.eatToken(.LBrace) orelse return null;
|
||||||
|
|
||||||
var statements = Node.Block.StatementList{};
|
var statements = std.ArrayList(*Node).init(p.gpa);
|
||||||
var statements_it = &statements.first;
|
defer statements.deinit();
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const statement = (p.parseStatement() catch |err| switch (err) {
|
const statement = (p.parseStatement() catch |err| switch (err) {
|
||||||
error.OutOfMemory => return error.OutOfMemory,
|
error.OutOfMemory => return error.OutOfMemory,
|
||||||
@ -1189,18 +1190,19 @@ const Parser = struct {
|
|||||||
continue;
|
continue;
|
||||||
},
|
},
|
||||||
}) orelse break;
|
}) orelse break;
|
||||||
statements_it = try p.llpush(*Node, statements_it, statement);
|
try statements.append(statement);
|
||||||
}
|
}
|
||||||
|
|
||||||
const rbrace = try p.expectToken(.RBrace);
|
const rbrace = try p.expectToken(.RBrace);
|
||||||
|
|
||||||
const block_node = try p.arena.allocator.create(Node.Block);
|
const block_node = try Node.Block.alloc(&p.arena.allocator, statements.items.len);
|
||||||
block_node.* = .{
|
block_node.* = .{
|
||||||
.label = null,
|
.label = null,
|
||||||
.lbrace = lbrace,
|
.lbrace = lbrace,
|
||||||
.statements = statements,
|
.statements_len = statements.items.len,
|
||||||
.rbrace = rbrace,
|
.rbrace = rbrace,
|
||||||
};
|
};
|
||||||
|
std.mem.copy(*Node, block_node.statements(), statements.items);
|
||||||
|
|
||||||
return &block_node.base;
|
return &block_node.base;
|
||||||
}
|
}
|
||||||
|
@ -358,21 +358,20 @@ fn renderExpression(
|
|||||||
try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space);
|
try renderToken(tree, stream, tree.nextToken(label), indent, start_col, Space.Space);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (block.statements.first == null) {
|
if (block.statements_len == 0) {
|
||||||
try renderToken(tree, stream, block.lbrace, indent + indent_delta, start_col, Space.None);
|
try renderToken(tree, stream, block.lbrace, indent + indent_delta, start_col, Space.None);
|
||||||
return renderToken(tree, stream, block.rbrace, indent, start_col, space);
|
return renderToken(tree, stream, block.rbrace, indent, start_col, space);
|
||||||
} else {
|
} else {
|
||||||
const block_indent = indent + indent_delta;
|
const block_indent = indent + indent_delta;
|
||||||
try renderToken(tree, stream, block.lbrace, block_indent, start_col, Space.Newline);
|
try renderToken(tree, stream, block.lbrace, block_indent, start_col, Space.Newline);
|
||||||
|
|
||||||
var it = block.statements.first;
|
const block_statements = block.statements();
|
||||||
while (it) |statement_node| : (it = statement_node.next) {
|
for (block_statements) |statement, i| {
|
||||||
const statement = statement_node.data;
|
|
||||||
try stream.writeByteNTimes(' ', block_indent);
|
try stream.writeByteNTimes(' ', block_indent);
|
||||||
try renderStatement(allocator, stream, tree, block_indent, start_col, statement);
|
try renderStatement(allocator, stream, tree, block_indent, start_col, statement);
|
||||||
|
|
||||||
if (statement_node.next) |next_statement| {
|
if (i + 1 < block_statements.len) {
|
||||||
try renderExtraNewline(tree, stream, start_col, next_statement.data);
|
try renderExtraNewline(tree, stream, start_col, block_statements[i + 1]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user