std.zig.tokenizer Tokens now don't contain a line and column field.

* Instead, this information is optained by asking the tokenizer.
* getTokenLocation takes a start_index, so relative loc can be optained
master
Jimmi Holst Christensen 2018-04-04 10:27:38 +02:00
parent 09cf823619
commit 020724cfa0
2 changed files with 25 additions and 42 deletions

View File

@ -1571,12 +1571,12 @@ pub const Parser = struct {
}
fn parseError(self: &Parser, token: &const Token, comptime fmt: []const u8, args: ...) (error{ParseError}) {
const loc = self.tokenizer.getTokenLocation(token);
warn("{}:{}:{}: error: " ++ fmt ++ "\n", self.source_file_name, token.line + 1, token.column + 1, args);
const loc = self.tokenizer.getTokenLocation(0, token);
warn("{}:{}:{}: error: " ++ fmt ++ "\n", self.source_file_name, loc.line + 1, loc.column + 1, args);
warn("{}\n", self.tokenizer.buffer[loc.line_start..loc.line_end]);
{
var i: usize = 0;
while (i < token.column) : (i += 1) {
while (i < loc.column) : (i += 1) {
warn(" ");
}
}
@ -1679,9 +1679,8 @@ pub const Parser = struct {
try stack.append(RenderState {
.Text = blk: {
const prev_node = root_node.decls.at(i - 1);
const prev_line_index = prev_node.lastToken().line;
const this_line_index = decl.firstToken().line;
if (this_line_index - prev_line_index >= 2) {
const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, decl.firstToken());
if (loc.line >= 2) {
break :blk "\n\n";
}
break :blk "\n";
@ -1858,10 +1857,9 @@ pub const Parser = struct {
try stack.append(RenderState {
.Text = blk: {
if (i != 0) {
const prev_statement_node = block.statements.items[i - 1];
const prev_line_index = prev_statement_node.lastToken().line;
const this_line_index = statement_node.firstToken().line;
if (this_line_index - prev_line_index >= 2) {
const prev_node = block.statements.items[i - 1];
const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, statement_node.firstToken());
if (loc.line >= 2) {
break :blk "\n\n";
}
}
@ -2083,9 +2081,8 @@ pub const Parser = struct {
.Text = blk: {
if (i != 0) {
const prev_node = fields_and_decls[i - 1];
const prev_line_index = prev_node.lastToken().line;
const this_line_index = node.firstToken().line;
if (this_line_index - prev_line_index >= 2) {
const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, node.firstToken());
if (loc.line >= 2) {
break :blk "\n\n";
}
}

View File

@ -5,8 +5,6 @@ pub const Token = struct {
id: Id,
start: usize,
end: usize,
line: usize,
column: usize,
const KeywordId = struct {
bytes: []const u8,
@ -180,28 +178,34 @@ pub const Token = struct {
pub const Tokenizer = struct {
buffer: []const u8,
index: usize,
line: usize,
column: usize,
pending_invalid_token: ?Token,
pub const LineLocation = struct {
pub const Location = struct {
line: usize,
column: usize,
line_start: usize,
line_end: usize,
};
pub fn getTokenLocation(self: &Tokenizer, token: &const Token) LineLocation {
var loc = LineLocation {
.line_start = 0,
pub fn getTokenLocation(self: &Tokenizer, start_index: usize, token: &const Token) Location {
var loc = Location {
.line = 0,
.column = 0,
.line_start = start_index,
.line_end = self.buffer.len,
};
for (self.buffer) |c, i| {
if (i == token.start) {
loc.line_end = i;
for (self.buffer[start_index..]) |c, i| {
if (i + start_index == token.start) {
loc.line_end = i + start_index;
while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
return loc;
}
if (c == '\n') {
loc.line += 1;
loc.column = 0;
loc.line_start = i + 1;
} else {
loc.column += 1;
}
}
return loc;
@ -216,8 +220,6 @@ pub const Tokenizer = struct {
return Tokenizer {
.buffer = buffer,
.index = 0,
.line = 0,
.column = 0,
.pending_invalid_token = null,
};
}
@ -277,8 +279,6 @@ pub const Tokenizer = struct {
.id = Token.Id.Eof,
.start = self.index,
.end = undefined,
.line = self.line,
.column = self.column,
};
while (self.index < self.buffer.len) : (self.index += 1) {
const c = self.buffer[self.index];
@ -286,12 +286,9 @@ pub const Tokenizer = struct {
State.Start => switch (c) {
' ' => {
result.start = self.index + 1;
result.column += 1;
},
'\n' => {
result.start = self.index + 1;
result.line += 1;
result.column = 0;
},
'c' => {
state = State.C;
@ -977,15 +974,6 @@ pub const Tokenizer = struct {
}
}
for (self.buffer[start_index..self.index]) |c| {
if (c == '\n') {
self.line += 1;
self.column = 0;
} else {
self.column += 1;
}
}
if (result.id == Token.Id.Eof) {
if (self.pending_invalid_token) |token| {
self.pending_invalid_token = null;
@ -1009,8 +997,6 @@ pub const Tokenizer = struct {
.id = Token.Id.Invalid,
.start = self.index,
.end = self.index + invalid_length,
.line = self.line,
.column = self.column,
};
}