std.zig.tokenizer Tokens now don't contain a line and column field.

* Instead, this information is optained by asking the tokenizer.
* getTokenLocation takes a start_index, so relative loc can be optained
master
Jimmi Holst Christensen 2018-04-04 10:27:38 +02:00
parent 09cf823619
commit 020724cfa0
2 changed files with 25 additions and 42 deletions

View File

@ -1571,12 +1571,12 @@ pub const Parser = struct {
} }
fn parseError(self: &Parser, token: &const Token, comptime fmt: []const u8, args: ...) (error{ParseError}) { fn parseError(self: &Parser, token: &const Token, comptime fmt: []const u8, args: ...) (error{ParseError}) {
const loc = self.tokenizer.getTokenLocation(token); const loc = self.tokenizer.getTokenLocation(0, token);
warn("{}:{}:{}: error: " ++ fmt ++ "\n", self.source_file_name, token.line + 1, token.column + 1, args); warn("{}:{}:{}: error: " ++ fmt ++ "\n", self.source_file_name, loc.line + 1, loc.column + 1, args);
warn("{}\n", self.tokenizer.buffer[loc.line_start..loc.line_end]); warn("{}\n", self.tokenizer.buffer[loc.line_start..loc.line_end]);
{ {
var i: usize = 0; var i: usize = 0;
while (i < token.column) : (i += 1) { while (i < loc.column) : (i += 1) {
warn(" "); warn(" ");
} }
} }
@ -1679,9 +1679,8 @@ pub const Parser = struct {
try stack.append(RenderState { try stack.append(RenderState {
.Text = blk: { .Text = blk: {
const prev_node = root_node.decls.at(i - 1); const prev_node = root_node.decls.at(i - 1);
const prev_line_index = prev_node.lastToken().line; const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, decl.firstToken());
const this_line_index = decl.firstToken().line; if (loc.line >= 2) {
if (this_line_index - prev_line_index >= 2) {
break :blk "\n\n"; break :blk "\n\n";
} }
break :blk "\n"; break :blk "\n";
@ -1858,10 +1857,9 @@ pub const Parser = struct {
try stack.append(RenderState { try stack.append(RenderState {
.Text = blk: { .Text = blk: {
if (i != 0) { if (i != 0) {
const prev_statement_node = block.statements.items[i - 1]; const prev_node = block.statements.items[i - 1];
const prev_line_index = prev_statement_node.lastToken().line; const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, statement_node.firstToken());
const this_line_index = statement_node.firstToken().line; if (loc.line >= 2) {
if (this_line_index - prev_line_index >= 2) {
break :blk "\n\n"; break :blk "\n\n";
} }
} }
@ -2083,9 +2081,8 @@ pub const Parser = struct {
.Text = blk: { .Text = blk: {
if (i != 0) { if (i != 0) {
const prev_node = fields_and_decls[i - 1]; const prev_node = fields_and_decls[i - 1];
const prev_line_index = prev_node.lastToken().line; const loc = self.tokenizer.getTokenLocation(prev_node.lastToken().end, node.firstToken());
const this_line_index = node.firstToken().line; if (loc.line >= 2) {
if (this_line_index - prev_line_index >= 2) {
break :blk "\n\n"; break :blk "\n\n";
} }
} }

View File

@ -5,8 +5,6 @@ pub const Token = struct {
id: Id, id: Id,
start: usize, start: usize,
end: usize, end: usize,
line: usize,
column: usize,
const KeywordId = struct { const KeywordId = struct {
bytes: []const u8, bytes: []const u8,
@ -180,28 +178,34 @@ pub const Token = struct {
pub const Tokenizer = struct { pub const Tokenizer = struct {
buffer: []const u8, buffer: []const u8,
index: usize, index: usize,
line: usize,
column: usize,
pending_invalid_token: ?Token, pending_invalid_token: ?Token,
pub const LineLocation = struct { pub const Location = struct {
line: usize,
column: usize,
line_start: usize, line_start: usize,
line_end: usize, line_end: usize,
}; };
pub fn getTokenLocation(self: &Tokenizer, token: &const Token) LineLocation { pub fn getTokenLocation(self: &Tokenizer, start_index: usize, token: &const Token) Location {
var loc = LineLocation { var loc = Location {
.line_start = 0, .line = 0,
.column = 0,
.line_start = start_index,
.line_end = self.buffer.len, .line_end = self.buffer.len,
}; };
for (self.buffer) |c, i| { for (self.buffer[start_index..]) |c, i| {
if (i == token.start) { if (i + start_index == token.start) {
loc.line_end = i; loc.line_end = i + start_index;
while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {} while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
return loc; return loc;
} }
if (c == '\n') { if (c == '\n') {
loc.line += 1;
loc.column = 0;
loc.line_start = i + 1; loc.line_start = i + 1;
} else {
loc.column += 1;
} }
} }
return loc; return loc;
@ -216,8 +220,6 @@ pub const Tokenizer = struct {
return Tokenizer { return Tokenizer {
.buffer = buffer, .buffer = buffer,
.index = 0, .index = 0,
.line = 0,
.column = 0,
.pending_invalid_token = null, .pending_invalid_token = null,
}; };
} }
@ -277,8 +279,6 @@ pub const Tokenizer = struct {
.id = Token.Id.Eof, .id = Token.Id.Eof,
.start = self.index, .start = self.index,
.end = undefined, .end = undefined,
.line = self.line,
.column = self.column,
}; };
while (self.index < self.buffer.len) : (self.index += 1) { while (self.index < self.buffer.len) : (self.index += 1) {
const c = self.buffer[self.index]; const c = self.buffer[self.index];
@ -286,12 +286,9 @@ pub const Tokenizer = struct {
State.Start => switch (c) { State.Start => switch (c) {
' ' => { ' ' => {
result.start = self.index + 1; result.start = self.index + 1;
result.column += 1;
}, },
'\n' => { '\n' => {
result.start = self.index + 1; result.start = self.index + 1;
result.line += 1;
result.column = 0;
}, },
'c' => { 'c' => {
state = State.C; state = State.C;
@ -977,15 +974,6 @@ pub const Tokenizer = struct {
} }
} }
for (self.buffer[start_index..self.index]) |c| {
if (c == '\n') {
self.line += 1;
self.column = 0;
} else {
self.column += 1;
}
}
if (result.id == Token.Id.Eof) { if (result.id == Token.Id.Eof) {
if (self.pending_invalid_token) |token| { if (self.pending_invalid_token) |token| {
self.pending_invalid_token = null; self.pending_invalid_token = null;
@ -1009,8 +997,6 @@ pub const Tokenizer = struct {
.id = Token.Id.Invalid, .id = Token.Id.Invalid,
.start = self.index, .start = self.index,
.end = self.index + invalid_length, .end = self.index + invalid_length,
.line = self.line,
.column = self.column,
}; };
} }