2017-10-24 07:08:20 -07:00
|
|
|
const builtin = @import("builtin");
|
2017-10-21 14:31:06 -07:00
|
|
|
const io = @import("std").io;
|
|
|
|
const os = @import("std").os;
|
2017-11-07 00:22:27 -08:00
|
|
|
const heap = @import("std").heap;
|
2017-12-04 19:05:27 -08:00
|
|
|
const warn = @import("std").debug.warn;
|
2017-12-04 20:09:03 -08:00
|
|
|
const assert = @import("std").debug.assert;
|
|
|
|
const mem = @import("std").mem;
|
2017-12-04 21:20:23 -08:00
|
|
|
const ArrayList = @import("std").ArrayList;
|
|
|
|
|
2017-10-21 14:31:06 -07:00
|
|
|
|
2017-12-04 19:05:27 -08:00
|
|
|
const Token = struct {
|
2017-12-04 20:09:03 -08:00
|
|
|
id: Id,
|
|
|
|
start: usize,
|
|
|
|
end: usize,
|
|
|
|
|
2017-12-04 21:20:23 -08:00
|
|
|
const KeywordId = struct {
|
|
|
|
bytes: []const u8,
|
|
|
|
id: Id,
|
|
|
|
};
|
|
|
|
|
|
|
|
const keywords = []KeywordId {
|
2017-12-06 18:41:38 -08:00
|
|
|
KeywordId{.bytes="align", .id = Id.Keyword_align},
|
|
|
|
KeywordId{.bytes="and", .id = Id.Keyword_and},
|
|
|
|
KeywordId{.bytes="asm", .id = Id.Keyword_asm},
|
|
|
|
KeywordId{.bytes="break", .id = Id.Keyword_break},
|
|
|
|
KeywordId{.bytes="coldcc", .id = Id.Keyword_coldcc},
|
|
|
|
KeywordId{.bytes="comptime", .id = Id.Keyword_comptime},
|
|
|
|
KeywordId{.bytes="const", .id = Id.Keyword_const},
|
|
|
|
KeywordId{.bytes="continue", .id = Id.Keyword_continue},
|
|
|
|
KeywordId{.bytes="defer", .id = Id.Keyword_defer},
|
|
|
|
KeywordId{.bytes="else", .id = Id.Keyword_else},
|
|
|
|
KeywordId{.bytes="enum", .id = Id.Keyword_enum},
|
|
|
|
KeywordId{.bytes="error", .id = Id.Keyword_error},
|
|
|
|
KeywordId{.bytes="export", .id = Id.Keyword_export},
|
|
|
|
KeywordId{.bytes="extern", .id = Id.Keyword_extern},
|
|
|
|
KeywordId{.bytes="false", .id = Id.Keyword_false},
|
|
|
|
KeywordId{.bytes="fn", .id = Id.Keyword_fn},
|
|
|
|
KeywordId{.bytes="for", .id = Id.Keyword_for},
|
|
|
|
KeywordId{.bytes="goto", .id = Id.Keyword_goto},
|
|
|
|
KeywordId{.bytes="if", .id = Id.Keyword_if},
|
|
|
|
KeywordId{.bytes="inline", .id = Id.Keyword_inline},
|
|
|
|
KeywordId{.bytes="nakedcc", .id = Id.Keyword_nakedcc},
|
|
|
|
KeywordId{.bytes="noalias", .id = Id.Keyword_noalias},
|
|
|
|
KeywordId{.bytes="null", .id = Id.Keyword_null},
|
|
|
|
KeywordId{.bytes="or", .id = Id.Keyword_or},
|
|
|
|
KeywordId{.bytes="packed", .id = Id.Keyword_packed},
|
|
|
|
KeywordId{.bytes="pub", .id = Id.Keyword_pub},
|
|
|
|
KeywordId{.bytes="return", .id = Id.Keyword_return},
|
|
|
|
KeywordId{.bytes="stdcallcc", .id = Id.Keyword_stdcallcc},
|
|
|
|
KeywordId{.bytes="struct", .id = Id.Keyword_struct},
|
|
|
|
KeywordId{.bytes="switch", .id = Id.Keyword_switch},
|
|
|
|
KeywordId{.bytes="test", .id = Id.Keyword_test},
|
|
|
|
KeywordId{.bytes="this", .id = Id.Keyword_this},
|
|
|
|
KeywordId{.bytes="true", .id = Id.Keyword_true},
|
|
|
|
KeywordId{.bytes="undefined", .id = Id.Keyword_undefined},
|
|
|
|
KeywordId{.bytes="union", .id = Id.Keyword_union},
|
|
|
|
KeywordId{.bytes="unreachable", .id = Id.Keyword_unreachable},
|
|
|
|
KeywordId{.bytes="use", .id = Id.Keyword_use},
|
|
|
|
KeywordId{.bytes="var", .id = Id.Keyword_var},
|
|
|
|
KeywordId{.bytes="volatile", .id = Id.Keyword_volatile},
|
|
|
|
KeywordId{.bytes="while", .id = Id.Keyword_while},
|
2017-12-04 20:09:03 -08:00
|
|
|
};
|
2017-10-21 14:31:06 -07:00
|
|
|
|
2017-12-04 21:20:23 -08:00
|
|
|
fn getKeyword(bytes: []const u8) -> ?Id {
|
|
|
|
for (keywords) |kw| {
|
|
|
|
if (mem.eql(u8, kw.bytes, bytes)) {
|
|
|
|
return kw.id;
|
2017-12-04 20:09:03 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2017-12-04 20:25:59 -08:00
|
|
|
const StrLitKind = enum {Normal, C};
|
2017-12-04 20:09:03 -08:00
|
|
|
|
|
|
|
const Id = union(enum) {
|
|
|
|
Invalid,
|
|
|
|
Identifier,
|
2017-12-04 20:25:59 -08:00
|
|
|
StringLiteral: StrLitKind,
|
2017-12-04 20:09:03 -08:00
|
|
|
Eof,
|
2017-12-04 20:25:59 -08:00
|
|
|
Builtin,
|
2017-12-04 20:29:39 -08:00
|
|
|
Equal,
|
|
|
|
LParen,
|
|
|
|
RParen,
|
|
|
|
Semicolon,
|
2017-12-04 20:40:33 -08:00
|
|
|
Percent,
|
|
|
|
LBrace,
|
|
|
|
RBrace,
|
|
|
|
Period,
|
2017-12-09 17:01:13 -08:00
|
|
|
Ellipsis2,
|
|
|
|
Ellipsis3,
|
2017-12-04 20:40:33 -08:00
|
|
|
Minus,
|
|
|
|
Arrow,
|
2017-12-08 20:15:43 -08:00
|
|
|
Colon,
|
|
|
|
Slash,
|
2017-12-08 20:56:07 -08:00
|
|
|
Comma,
|
|
|
|
Ampersand,
|
|
|
|
AmpersandEqual,
|
|
|
|
NumberLiteral,
|
2017-12-04 21:20:23 -08:00
|
|
|
Keyword_align,
|
|
|
|
Keyword_and,
|
|
|
|
Keyword_asm,
|
|
|
|
Keyword_break,
|
|
|
|
Keyword_coldcc,
|
|
|
|
Keyword_comptime,
|
|
|
|
Keyword_const,
|
|
|
|
Keyword_continue,
|
|
|
|
Keyword_defer,
|
|
|
|
Keyword_else,
|
|
|
|
Keyword_enum,
|
|
|
|
Keyword_error,
|
|
|
|
Keyword_export,
|
|
|
|
Keyword_extern,
|
|
|
|
Keyword_false,
|
|
|
|
Keyword_fn,
|
|
|
|
Keyword_for,
|
|
|
|
Keyword_goto,
|
|
|
|
Keyword_if,
|
|
|
|
Keyword_inline,
|
|
|
|
Keyword_nakedcc,
|
|
|
|
Keyword_noalias,
|
|
|
|
Keyword_null,
|
|
|
|
Keyword_or,
|
|
|
|
Keyword_packed,
|
|
|
|
Keyword_pub,
|
|
|
|
Keyword_return,
|
|
|
|
Keyword_stdcallcc,
|
|
|
|
Keyword_struct,
|
|
|
|
Keyword_switch,
|
|
|
|
Keyword_test,
|
|
|
|
Keyword_this,
|
|
|
|
Keyword_true,
|
|
|
|
Keyword_undefined,
|
|
|
|
Keyword_union,
|
|
|
|
Keyword_unreachable,
|
|
|
|
Keyword_use,
|
|
|
|
Keyword_var,
|
|
|
|
Keyword_volatile,
|
|
|
|
Keyword_while,
|
2017-12-04 20:09:03 -08:00
|
|
|
};
|
2017-12-04 19:05:27 -08:00
|
|
|
};
|
2017-10-24 07:08:20 -07:00
|
|
|
|
2017-12-04 19:05:27 -08:00
|
|
|
const Tokenizer = struct {
|
2017-12-04 20:09:03 -08:00
|
|
|
buffer: []const u8,
|
|
|
|
index: usize,
|
2017-12-04 19:05:27 -08:00
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
pub const Location = struct {
|
|
|
|
line: usize,
|
|
|
|
column: usize,
|
|
|
|
line_start: usize,
|
|
|
|
line_end: usize,
|
|
|
|
};
|
|
|
|
|
|
|
|
pub fn getTokenLocation(self: &Tokenizer, token: &const Token) -> Location {
|
|
|
|
var loc = Location {
|
|
|
|
.line = 0,
|
|
|
|
.column = 0,
|
|
|
|
.line_start = 0,
|
|
|
|
.line_end = 0,
|
|
|
|
};
|
|
|
|
for (self.buffer) |c, i| {
|
|
|
|
if (i == token.start) {
|
|
|
|
loc.line_end = i;
|
|
|
|
while (loc.line_end < self.buffer.len and self.buffer[loc.line_end] != '\n') : (loc.line_end += 1) {}
|
|
|
|
return loc;
|
|
|
|
}
|
|
|
|
if (c == '\n') {
|
|
|
|
loc.line += 1;
|
|
|
|
loc.column = 0;
|
|
|
|
loc.line_start = i;
|
|
|
|
} else {
|
|
|
|
loc.column += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return loc;
|
|
|
|
}
|
|
|
|
|
2017-12-04 20:09:03 -08:00
|
|
|
pub fn dump(self: &Tokenizer, token: &const Token) {
|
|
|
|
warn("{} \"{}\"\n", @tagName(token.id), self.buffer[token.start..token.end]);
|
|
|
|
}
|
2017-11-02 21:00:57 -07:00
|
|
|
|
2017-12-04 20:09:03 -08:00
|
|
|
pub fn init(buffer: []const u8) -> Tokenizer {
|
|
|
|
return Tokenizer {
|
|
|
|
.buffer = buffer,
|
|
|
|
.index = 0,
|
|
|
|
};
|
2017-10-21 14:31:06 -07:00
|
|
|
}
|
|
|
|
|
2017-12-04 20:09:03 -08:00
|
|
|
const State = enum {
|
|
|
|
Start,
|
|
|
|
Identifier,
|
2017-12-04 20:25:59 -08:00
|
|
|
Builtin,
|
|
|
|
C,
|
|
|
|
StringLiteral,
|
|
|
|
StringLiteralBackslash,
|
2017-12-04 20:40:33 -08:00
|
|
|
Minus,
|
2017-12-08 20:15:43 -08:00
|
|
|
Slash,
|
|
|
|
LineComment,
|
2017-12-08 20:56:07 -08:00
|
|
|
Zero,
|
|
|
|
NumberLiteral,
|
|
|
|
NumberDot,
|
|
|
|
FloatFraction,
|
|
|
|
FloatExponentUnsigned,
|
|
|
|
FloatExponentNumber,
|
|
|
|
Ampersand,
|
2017-12-09 17:01:13 -08:00
|
|
|
Period,
|
|
|
|
Period2,
|
2017-12-04 20:09:03 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
pub fn next(self: &Tokenizer) -> Token {
|
|
|
|
var state = State.Start;
|
|
|
|
var result = Token {
|
2017-12-06 18:41:38 -08:00
|
|
|
.id = Token.Id.Eof,
|
2017-12-04 20:09:03 -08:00
|
|
|
.start = self.index,
|
|
|
|
.end = undefined,
|
|
|
|
};
|
|
|
|
while (self.index < self.buffer.len) : (self.index += 1) {
|
|
|
|
const c = self.buffer[self.index];
|
|
|
|
switch (state) {
|
|
|
|
State.Start => switch (c) {
|
|
|
|
' ', '\n' => {
|
|
|
|
result.start = self.index + 1;
|
|
|
|
},
|
2017-12-04 20:25:59 -08:00
|
|
|
'c' => {
|
|
|
|
state = State.C;
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Identifier;
|
2017-12-04 20:25:59 -08:00
|
|
|
},
|
|
|
|
'"' => {
|
|
|
|
state = State.StringLiteral;
|
|
|
|
result.id = Token.Id { .StringLiteral = Token.StrLitKind.Normal };
|
|
|
|
},
|
|
|
|
'a'...'b', 'd'...'z', 'A'...'Z', '_' => {
|
2017-12-04 20:09:03 -08:00
|
|
|
state = State.Identifier;
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Identifier;
|
2017-12-04 20:09:03 -08:00
|
|
|
},
|
2017-12-04 20:25:59 -08:00
|
|
|
'@' => {
|
|
|
|
state = State.Builtin;
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Builtin;
|
2017-12-04 20:25:59 -08:00
|
|
|
},
|
2017-12-04 20:29:39 -08:00
|
|
|
'=' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Equal;
|
2017-12-04 20:29:39 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
'(' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.LParen;
|
2017-12-04 20:29:39 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
')' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.RParen;
|
2017-12-04 20:29:39 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
';' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Semicolon;
|
2017-12-04 20:29:39 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
2017-12-08 20:56:07 -08:00
|
|
|
',' => {
|
|
|
|
result.id = Token.Id.Comma;
|
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
2017-12-08 20:15:43 -08:00
|
|
|
':' => {
|
|
|
|
result.id = Token.Id.Colon;
|
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
2017-12-04 20:40:33 -08:00
|
|
|
'%' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Percent;
|
2017-12-04 20:40:33 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
'{' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.LBrace;
|
2017-12-04 20:40:33 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
'}' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.RBrace;
|
2017-12-04 20:40:33 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
'.' => {
|
2017-12-09 17:01:13 -08:00
|
|
|
state = State.Period;
|
2017-12-04 20:40:33 -08:00
|
|
|
},
|
|
|
|
'-' => {
|
|
|
|
state = State.Minus;
|
|
|
|
},
|
2017-12-08 20:15:43 -08:00
|
|
|
'/' => {
|
|
|
|
state = State.Slash;
|
|
|
|
},
|
2017-12-08 20:56:07 -08:00
|
|
|
'&' => {
|
|
|
|
state = State.Ampersand;
|
|
|
|
},
|
|
|
|
'0' => {
|
|
|
|
state = State.Zero;
|
|
|
|
result.id = Token.Id.NumberLiteral;
|
|
|
|
},
|
|
|
|
'1'...'9' => {
|
|
|
|
state = State.NumberLiteral;
|
|
|
|
result.id = Token.Id.NumberLiteral;
|
|
|
|
},
|
2017-12-04 20:09:03 -08:00
|
|
|
else => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Invalid;
|
2017-12-04 20:09:03 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
},
|
2017-12-08 20:56:07 -08:00
|
|
|
State.Ampersand => switch (c) {
|
|
|
|
'=' => {
|
|
|
|
result.id = Token.Id.AmpersandEqual;
|
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.Ampersand;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
},
|
2017-12-04 20:09:03 -08:00
|
|
|
State.Identifier => switch (c) {
|
|
|
|
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
|
|
|
|
else => {
|
2017-12-04 21:20:23 -08:00
|
|
|
if (Token.getKeyword(self.buffer[result.start..self.index])) |id| {
|
|
|
|
result.id = id;
|
2017-12-04 20:09:03 -08:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
},
|
2017-12-04 20:25:59 -08:00
|
|
|
State.Builtin => switch (c) {
|
|
|
|
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
|
|
|
|
else => break,
|
|
|
|
},
|
|
|
|
State.C => switch (c) {
|
|
|
|
'\\' => @panic("TODO"),
|
|
|
|
'"' => {
|
|
|
|
state = State.StringLiteral;
|
|
|
|
result.id = Token.Id { .StringLiteral = Token.StrLitKind.C };
|
|
|
|
},
|
|
|
|
'a'...'z', 'A'...'Z', '_', '0'...'9' => {
|
|
|
|
state = State.Identifier;
|
|
|
|
},
|
|
|
|
else => break,
|
|
|
|
},
|
|
|
|
State.StringLiteral => switch (c) {
|
|
|
|
'\\' => {
|
|
|
|
state = State.StringLiteralBackslash;
|
|
|
|
},
|
|
|
|
'"' => {
|
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
'\n' => break, // Look for this error later.
|
|
|
|
else => {},
|
|
|
|
},
|
2017-12-04 20:40:33 -08:00
|
|
|
|
2017-12-04 20:25:59 -08:00
|
|
|
State.StringLiteralBackslash => switch (c) {
|
|
|
|
'\n' => break, // Look for this error later.
|
2017-12-04 20:40:33 -08:00
|
|
|
else => {
|
|
|
|
state = State.StringLiteral;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
|
|
|
|
State.Minus => switch (c) {
|
|
|
|
'>' => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Arrow;
|
2017-12-04 20:40:33 -08:00
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
else => {
|
2017-12-06 18:41:38 -08:00
|
|
|
result.id = Token.Id.Minus;
|
2017-12-04 20:40:33 -08:00
|
|
|
break;
|
|
|
|
},
|
2017-12-04 20:25:59 -08:00
|
|
|
},
|
2017-12-09 17:01:13 -08:00
|
|
|
|
|
|
|
State.Period => switch (c) {
|
|
|
|
'.' => {
|
|
|
|
state = State.Period2;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.Period;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
|
|
|
|
State.Period2 => switch (c) {
|
|
|
|
'.' => {
|
|
|
|
result.id = Token.Id.Ellipsis3;
|
|
|
|
self.index += 1;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.Ellipsis2;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
State.Slash => switch (c) {
|
|
|
|
'/' => {
|
|
|
|
result.id = undefined;
|
|
|
|
state = State.LineComment;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
result.id = Token.Id.Slash;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
State.LineComment => switch (c) {
|
|
|
|
'\n' => {
|
|
|
|
state = State.Start;
|
|
|
|
result = Token {
|
|
|
|
.id = Token.Id.Eof,
|
|
|
|
.start = self.index + 1,
|
|
|
|
.end = undefined,
|
|
|
|
};
|
|
|
|
},
|
|
|
|
else => {},
|
|
|
|
},
|
2017-12-08 20:56:07 -08:00
|
|
|
State.Zero => switch (c) {
|
|
|
|
'b', 'o', 'x' => {
|
|
|
|
state = State.NumberLiteral;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
// reinterpret as a normal number
|
|
|
|
self.index -= 1;
|
|
|
|
state = State.NumberLiteral;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
State.NumberLiteral => switch (c) {
|
|
|
|
'.' => {
|
|
|
|
state = State.NumberDot;
|
|
|
|
},
|
|
|
|
'p', 'P', 'e', 'E' => {
|
|
|
|
state = State.FloatExponentUnsigned;
|
|
|
|
},
|
|
|
|
'0'...'9', 'a'...'f', 'A'...'F' => {},
|
|
|
|
else => break,
|
|
|
|
},
|
|
|
|
State.NumberDot => switch (c) {
|
|
|
|
'.' => {
|
|
|
|
self.index -= 1;
|
|
|
|
state = State.Start;
|
|
|
|
break;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
self.index -= 1;
|
|
|
|
state = State.FloatFraction;
|
|
|
|
},
|
|
|
|
},
|
|
|
|
State.FloatFraction => switch (c) {
|
|
|
|
'p', 'P', 'e', 'E' => {
|
|
|
|
state = State.FloatExponentUnsigned;
|
|
|
|
},
|
|
|
|
'0'...'9', 'a'...'f', 'A'...'F' => {},
|
|
|
|
else => break,
|
|
|
|
},
|
|
|
|
State.FloatExponentUnsigned => switch (c) {
|
|
|
|
'+', '-' => {
|
|
|
|
state = State.FloatExponentNumber;
|
|
|
|
},
|
|
|
|
else => {
|
|
|
|
// reinterpret as a normal exponent number
|
|
|
|
self.index -= 1;
|
|
|
|
state = State.FloatExponentNumber;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
State.FloatExponentNumber => switch (c) {
|
|
|
|
'0'...'9', 'a'...'f', 'A'...'F' => {},
|
|
|
|
else => break,
|
|
|
|
},
|
2017-12-04 20:09:03 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
result.end = self.index;
|
2017-12-08 20:15:43 -08:00
|
|
|
// TODO check state when returning EOF
|
2017-12-04 20:09:03 -08:00
|
|
|
return result;
|
|
|
|
}
|
2017-12-04 19:05:27 -08:00
|
|
|
};
|
2017-10-21 14:31:06 -07:00
|
|
|
|
2017-12-09 17:01:13 -08:00
|
|
|
const Comptime = enum { No, Yes };
|
|
|
|
const NoAlias = enum { No, Yes };
|
|
|
|
const Extern = enum { No, Yes };
|
|
|
|
const VarArgs = enum { No, Yes };
|
|
|
|
const Mutability = enum { Const, Var };
|
2017-12-09 17:50:31 -08:00
|
|
|
const Volatile = enum { No, Yes };
|
2017-12-09 17:01:13 -08:00
|
|
|
|
|
|
|
const Inline = enum {
|
|
|
|
Auto,
|
|
|
|
Always,
|
|
|
|
Never,
|
|
|
|
};
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
const Visibility = enum {
|
|
|
|
Private,
|
|
|
|
Pub,
|
|
|
|
Export,
|
|
|
|
};
|
|
|
|
|
2017-12-09 17:01:13 -08:00
|
|
|
const CallingConvention = enum {
|
|
|
|
Auto,
|
|
|
|
C,
|
|
|
|
Cold,
|
|
|
|
Naked,
|
|
|
|
Stdcall,
|
2017-12-08 20:15:43 -08:00
|
|
|
};
|
|
|
|
|
2017-12-04 21:20:23 -08:00
|
|
|
const AstNode = struct {
|
2017-12-08 20:15:43 -08:00
|
|
|
id: Id,
|
|
|
|
|
|
|
|
const Id = enum {
|
|
|
|
Root,
|
|
|
|
VarDecl,
|
|
|
|
Identifier,
|
2017-12-09 17:01:13 -08:00
|
|
|
FnProto,
|
|
|
|
ParamDecl,
|
2017-12-09 17:50:31 -08:00
|
|
|
AddrOfExpr,
|
2017-12-08 20:15:43 -08:00
|
|
|
};
|
2017-12-04 21:20:23 -08:00
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
fn iterate(base: &AstNode, index: usize) -> ?&AstNode {
|
|
|
|
return switch (base.id) {
|
|
|
|
Id.Root => @fieldParentPtr(AstNodeRoot, "base", base).iterate(index),
|
|
|
|
Id.VarDecl => @fieldParentPtr(AstNodeVarDecl, "base", base).iterate(index),
|
|
|
|
Id.Identifier => @fieldParentPtr(AstNodeIdentifier, "base", base).iterate(index),
|
2017-12-09 17:01:13 -08:00
|
|
|
Id.FnProto => @fieldParentPtr(AstNodeFnProto, "base", base).iterate(index),
|
|
|
|
Id.ParamDecl => @fieldParentPtr(AstNodeParamDecl, "base", base).iterate(index),
|
2017-12-09 17:50:31 -08:00
|
|
|
Id.AddrOfExpr => @fieldParentPtr(AstNodeAddrOfExpr, "base", base).iterate(index),
|
2017-12-08 20:15:43 -08:00
|
|
|
};
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const AstNodeRoot = struct {
|
|
|
|
base: AstNode,
|
|
|
|
decls: ArrayList(&AstNode),
|
|
|
|
|
|
|
|
fn iterate(self: &AstNodeRoot, index: usize) -> ?&AstNode {
|
|
|
|
if (index < self.decls.len) {
|
|
|
|
return self.decls.items[index];
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
2017-12-04 21:20:23 -08:00
|
|
|
};
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
const AstNodeVarDecl = struct {
|
|
|
|
base: AstNode,
|
|
|
|
visib: Visibility,
|
|
|
|
name_token: Token,
|
|
|
|
eq_token: Token,
|
|
|
|
mut: Mutability,
|
2017-12-09 17:01:13 -08:00
|
|
|
is_comptime: Comptime,
|
|
|
|
is_extern: Extern,
|
|
|
|
lib_name: ?&AstNode,
|
2017-12-08 20:15:43 -08:00
|
|
|
type_node: ?&AstNode,
|
|
|
|
align_node: ?&AstNode,
|
|
|
|
init_node: ?&AstNode,
|
|
|
|
|
|
|
|
fn iterate(self: &AstNodeVarDecl, index: usize) -> ?&AstNode {
|
|
|
|
var i = index;
|
|
|
|
|
|
|
|
if (self.type_node) |type_node| {
|
|
|
|
if (i < 1) return type_node;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (self.align_node) |align_node| {
|
|
|
|
if (i < 1) return align_node;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (self.init_node) |init_node| {
|
|
|
|
if (i < 1) return init_node;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const AstNodeIdentifier = struct {
|
|
|
|
base: AstNode,
|
|
|
|
name_token: Token,
|
|
|
|
|
|
|
|
fn iterate(self: &AstNodeIdentifier, index: usize) -> ?&AstNode {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-12-09 17:01:13 -08:00
|
|
|
const AstNodeFnProto = struct {
|
|
|
|
base: AstNode,
|
|
|
|
visib: Visibility,
|
|
|
|
fn_token: Token,
|
|
|
|
name_token: ?Token,
|
|
|
|
params: ArrayList(&AstNode),
|
|
|
|
return_type: ?&AstNode,
|
|
|
|
var_args: VarArgs,
|
|
|
|
is_extern: Extern,
|
|
|
|
is_inline: Inline,
|
|
|
|
cc: CallingConvention,
|
|
|
|
fn_def_node: ?&AstNode,
|
|
|
|
lib_name: ?&AstNode, // populated if this is an extern declaration
|
|
|
|
align_expr: ?&AstNode, // populated if align(A) is present
|
|
|
|
|
|
|
|
fn iterate(self: &AstNodeFnProto, index: usize) -> ?&AstNode {
|
|
|
|
var i = index;
|
|
|
|
|
|
|
|
if (i < self.params.len) return self.params.items[i];
|
|
|
|
i -= self.params.len;
|
|
|
|
|
|
|
|
if (self.return_type) |return_type| {
|
|
|
|
if (i < 1) return return_type;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (self.fn_def_node) |fn_def_node| {
|
|
|
|
if (i < 1) return fn_def_node;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (self.lib_name) |lib_name| {
|
|
|
|
if (i < 1) return lib_name;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (self.align_expr) |align_expr| {
|
|
|
|
if (i < 1) return align_expr;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const AstNodeParamDecl = struct {
|
|
|
|
base: AstNode,
|
|
|
|
comptime_token: ?Token,
|
|
|
|
noalias_token: ?Token,
|
|
|
|
name_token: ?Token,
|
|
|
|
type_node: &AstNode,
|
|
|
|
var_args_token: ?Token,
|
|
|
|
|
|
|
|
fn iterate(self: &AstNodeParamDecl, index: usize) -> ?&AstNode {
|
|
|
|
var i = index;
|
|
|
|
|
|
|
|
if (i < 1) return self.type_node;
|
|
|
|
i -= 1;
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-12-09 17:50:31 -08:00
|
|
|
const AstNodeAddrOfExpr = struct {
|
|
|
|
base: AstNode,
|
|
|
|
align_expr: ?&AstNode,
|
|
|
|
op_token: Token,
|
|
|
|
bit_offset_start_token: ?Token,
|
|
|
|
bit_offset_end_token: ?Token,
|
|
|
|
const_token: ?Token,
|
|
|
|
volatile_token: ?Token,
|
|
|
|
op_expr: &AstNode,
|
|
|
|
|
|
|
|
fn iterate(self: &AstNodeAddrOfExpr, index: usize) -> ?&AstNode {
|
|
|
|
var i = index;
|
|
|
|
|
|
|
|
if (self.align_expr) |align_expr| {
|
|
|
|
if (i < 1) return align_expr;
|
|
|
|
i -= 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (i < 1) return self.op_expr;
|
|
|
|
i -= 1;
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
error ParseError;
|
|
|
|
|
2017-12-04 21:20:23 -08:00
|
|
|
const Parser = struct {
|
|
|
|
tokenizer: &Tokenizer,
|
|
|
|
allocator: &mem.Allocator,
|
2017-12-09 17:01:13 -08:00
|
|
|
put_back_tokens: [2]Token,
|
2017-12-08 20:15:43 -08:00
|
|
|
put_back_count: usize,
|
|
|
|
source_file_name: []const u8,
|
2017-12-04 21:20:23 -08:00
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
fn init(tokenizer: &Tokenizer, allocator: &mem.Allocator, source_file_name: []const u8) -> Parser {
|
2017-12-04 21:20:23 -08:00
|
|
|
return Parser {
|
|
|
|
.tokenizer = tokenizer,
|
|
|
|
.allocator = allocator,
|
2017-12-08 20:15:43 -08:00
|
|
|
.put_back_tokens = undefined,
|
|
|
|
.put_back_count = 0,
|
|
|
|
.source_file_name = source_file_name,
|
2017-12-04 21:20:23 -08:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
const State = union(enum) {
|
2017-12-04 21:20:23 -08:00
|
|
|
TopLevel,
|
2017-12-08 20:15:43 -08:00
|
|
|
TopLevelModifier: Visibility,
|
2017-12-09 17:01:13 -08:00
|
|
|
TopLevelExtern: Visibility,
|
|
|
|
Expression: &&AstNode,
|
|
|
|
GroupedExpression: &&AstNode,
|
|
|
|
UnwrapExpression: &&AstNode,
|
|
|
|
BoolOrExpression: &&AstNode,
|
|
|
|
BoolAndExpression: &&AstNode,
|
|
|
|
ComparisonExpression: &&AstNode,
|
|
|
|
BinaryOrExpression: &&AstNode,
|
|
|
|
BinaryXorExpression: &&AstNode,
|
|
|
|
BinaryAndExpression: &&AstNode,
|
|
|
|
BitShiftExpression: &&AstNode,
|
|
|
|
AdditionExpression: &&AstNode,
|
|
|
|
MultiplyExpression: &&AstNode,
|
|
|
|
BraceSuffixExpression: &&AstNode,
|
|
|
|
PrefixOpExpression: &&AstNode,
|
|
|
|
SuffixOpExpression: &&AstNode,
|
|
|
|
PrimaryExpression: &&AstNode,
|
|
|
|
TypeExpr: &&AstNode,
|
2017-12-08 20:15:43 -08:00
|
|
|
VarDecl: &AstNodeVarDecl,
|
|
|
|
VarDeclAlign: &AstNodeVarDecl,
|
|
|
|
VarDeclEq: &AstNodeVarDecl,
|
2017-12-09 17:01:13 -08:00
|
|
|
ExpectToken: @TagType(Token.Id),
|
|
|
|
FnProto: &AstNodeFnProto,
|
|
|
|
FnProtoAlign: &AstNodeFnProto,
|
|
|
|
ParamDecl: &AstNodeFnProto,
|
|
|
|
ParamDeclComma,
|
2017-12-04 21:20:23 -08:00
|
|
|
};
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
pub fn parse(self: &Parser) -> %&AstNode {
|
|
|
|
var stack = ArrayList(State).init(self.allocator);
|
2017-12-04 21:20:23 -08:00
|
|
|
defer stack.deinit();
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
%return stack.append(State.TopLevel);
|
|
|
|
|
|
|
|
const root_node = %return self.createRoot();
|
|
|
|
// TODO %defer self.freeAst();
|
|
|
|
|
2017-12-04 21:20:23 -08:00
|
|
|
while (true) {
|
2017-12-08 20:15:43 -08:00
|
|
|
// This gives us 1 free append that can't fail
|
|
|
|
const state = stack.pop();
|
|
|
|
|
2017-12-04 21:20:23 -08:00
|
|
|
switch (state) {
|
2017-12-08 20:15:43 -08:00
|
|
|
State.TopLevel => {
|
|
|
|
const token = self.getNextToken();
|
|
|
|
switch (token.id) {
|
|
|
|
Token.Id.Keyword_pub => {
|
|
|
|
stack.append(State {.TopLevelModifier = Visibility.Pub }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.Keyword_export => {
|
|
|
|
stack.append(State {.TopLevelModifier = Visibility.Export }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.Keyword_const => {
|
|
|
|
stack.append(State.TopLevel) %% unreachable;
|
2017-12-09 17:01:13 -08:00
|
|
|
const var_decl_node = {
|
|
|
|
const var_decl_node = %return self.createVarDecl(Visibility.Private, Mutability.Const, Comptime.No, Extern.No);
|
|
|
|
%defer self.allocator.destroy(var_decl_node);
|
|
|
|
%return root_node.decls.append(&var_decl_node.base);
|
|
|
|
var_decl_node
|
|
|
|
};
|
2017-12-08 20:15:43 -08:00
|
|
|
%return stack.append(State { .VarDecl = var_decl_node });
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.Keyword_var => {
|
|
|
|
stack.append(State.TopLevel) %% unreachable;
|
2017-12-09 17:01:13 -08:00
|
|
|
const var_decl_node = {
|
|
|
|
const var_decl_node = %return self.createVarDecl(Visibility.Private, Mutability.Var, Comptime.No, Extern.No);
|
|
|
|
%defer self.allocator.destroy(var_decl_node);
|
|
|
|
%return root_node.decls.append(&var_decl_node.base);
|
|
|
|
var_decl_node
|
|
|
|
};
|
2017-12-08 20:15:43 -08:00
|
|
|
%return stack.append(State { .VarDecl = var_decl_node });
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.Eof => return &root_node.base,
|
2017-12-09 17:01:13 -08:00
|
|
|
Token.Id.Keyword_extern => {
|
|
|
|
stack.append(State { .TopLevelExtern = Visibility.Private }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
2017-12-08 20:15:43 -08:00
|
|
|
else => return self.parseError(token, "expected top level declaration, found {}", @tagName(token.id)),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
State.TopLevelModifier => |visib| {
|
|
|
|
const token = self.getNextToken();
|
|
|
|
switch (token.id) {
|
|
|
|
Token.Id.Keyword_const => {
|
|
|
|
stack.append(State.TopLevel) %% unreachable;
|
2017-12-09 17:01:13 -08:00
|
|
|
const var_decl_node = {
|
|
|
|
const var_decl_node = %return self.createVarDecl(visib, Mutability.Const, Comptime.No, Extern.No);
|
|
|
|
%defer self.allocator.destroy(var_decl_node);
|
|
|
|
%return root_node.decls.append(&var_decl_node.base);
|
|
|
|
var_decl_node
|
|
|
|
};
|
2017-12-08 20:15:43 -08:00
|
|
|
%return stack.append(State { .VarDecl = var_decl_node });
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.Keyword_var => {
|
|
|
|
stack.append(State.TopLevel) %% unreachable;
|
2017-12-09 17:01:13 -08:00
|
|
|
const var_decl_node = {
|
|
|
|
const var_decl_node = %return self.createVarDecl(visib, Mutability.Var, Comptime.No, Extern.No);
|
|
|
|
%defer self.allocator.destroy(var_decl_node);
|
|
|
|
%return root_node.decls.append(&var_decl_node.base);
|
|
|
|
var_decl_node
|
|
|
|
};
|
2017-12-08 20:15:43 -08:00
|
|
|
%return stack.append(State { .VarDecl = var_decl_node });
|
|
|
|
continue;
|
|
|
|
},
|
2017-12-09 17:01:13 -08:00
|
|
|
Token.Id.Keyword_extern => {
|
|
|
|
stack.append(State { .TopLevelExtern = visib }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
2017-12-08 20:15:43 -08:00
|
|
|
else => return self.parseError(token, "expected top level declaration, found {}", @tagName(token.id)),
|
|
|
|
}
|
|
|
|
},
|
2017-12-09 17:01:13 -08:00
|
|
|
State.TopLevelExtern => |visib| {
|
|
|
|
const token = self.getNextToken();
|
|
|
|
switch (token.id) {
|
|
|
|
Token.Id.Keyword_var => {
|
|
|
|
stack.append(State.TopLevel) %% unreachable;
|
|
|
|
const var_decl_node = {
|
|
|
|
const var_decl_node = %return self.createVarDecl(visib, Mutability.Var, Comptime.No, Extern.Yes);
|
|
|
|
%defer self.allocator.destroy(var_decl_node);
|
|
|
|
%return root_node.decls.append(&var_decl_node.base);
|
|
|
|
var_decl_node
|
|
|
|
};
|
|
|
|
%return stack.append(State { .VarDecl = var_decl_node });
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.Keyword_fn => {
|
|
|
|
stack.append(State.TopLevel) %% unreachable;
|
2017-12-09 17:50:31 -08:00
|
|
|
%return stack.append(State { .ExpectToken = Token.Id.Semicolon });
|
2017-12-09 17:01:13 -08:00
|
|
|
const fn_proto_node = %return self.createAttachFnProto(&root_node.decls, token,
|
|
|
|
Extern.Yes, CallingConvention.Auto, visib, Inline.Auto);
|
|
|
|
%return stack.append(State { .FnProto = fn_proto_node });
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.StringLiteral => {
|
|
|
|
@panic("TODO extern with string literal");
|
|
|
|
},
|
|
|
|
Token.Id.Keyword_coldcc, Token.Id.Keyword_nakedcc, Token.Id.Keyword_stdcallcc => {
|
|
|
|
stack.append(State.TopLevel) %% unreachable;
|
2017-12-09 17:50:31 -08:00
|
|
|
%return stack.append(State { .ExpectToken = Token.Id.Semicolon });
|
2017-12-09 17:01:13 -08:00
|
|
|
const cc = switch (token.id) {
|
|
|
|
Token.Id.Keyword_coldcc => CallingConvention.Cold,
|
|
|
|
Token.Id.Keyword_nakedcc => CallingConvention.Naked,
|
|
|
|
Token.Id.Keyword_stdcallcc => CallingConvention.Stdcall,
|
|
|
|
else => unreachable,
|
|
|
|
};
|
|
|
|
const fn_token = %return self.eatToken(Token.Id.Keyword_fn);
|
|
|
|
const fn_proto_node = %return self.createAttachFnProto(&root_node.decls, fn_token,
|
|
|
|
Extern.Yes, cc, visib, Inline.Auto);
|
|
|
|
%return stack.append(State { .FnProto = fn_proto_node });
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
else => return self.parseError(token, "expected variable declaration or function, found {}", @tagName(token.id)),
|
|
|
|
}
|
|
|
|
},
|
2017-12-08 20:15:43 -08:00
|
|
|
State.VarDecl => |var_decl| {
|
|
|
|
var_decl.name_token = %return self.eatToken(Token.Id.Identifier);
|
|
|
|
stack.append(State { .VarDeclAlign = var_decl }) %% unreachable;
|
|
|
|
|
|
|
|
const next_token = self.getNextToken();
|
|
|
|
if (next_token.id == Token.Id.Colon) {
|
2017-12-09 17:01:13 -08:00
|
|
|
%return stack.append(State { .TypeExpr = removeNullCast(&var_decl.type_node) });
|
2017-12-08 20:15:43 -08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.putBackToken(next_token);
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
State.VarDeclAlign => |var_decl| {
|
|
|
|
stack.append(State { .VarDeclEq = var_decl }) %% unreachable;
|
|
|
|
|
|
|
|
const next_token = self.getNextToken();
|
|
|
|
if (next_token.id == Token.Id.Keyword_align) {
|
2017-12-09 17:01:13 -08:00
|
|
|
%return stack.append(State { .GroupedExpression = removeNullCast(&var_decl.align_node) });
|
2017-12-08 20:15:43 -08:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.putBackToken(next_token);
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
State.VarDeclEq => |var_decl| {
|
|
|
|
var_decl.eq_token = %return self.eatToken(Token.Id.Equal);
|
2017-12-09 17:01:13 -08:00
|
|
|
stack.append(State { .ExpectToken = Token.Id.Semicolon }) %% unreachable;
|
2017-12-08 20:15:43 -08:00
|
|
|
%return stack.append(State {
|
2017-12-09 17:01:13 -08:00
|
|
|
.Expression = removeNullCast(&var_decl.init_node),
|
2017-12-08 20:15:43 -08:00
|
|
|
});
|
|
|
|
continue;
|
|
|
|
},
|
2017-12-09 17:01:13 -08:00
|
|
|
State.ExpectToken => |token_id| {
|
|
|
|
_ = %return self.eatToken(token_id);
|
2017-12-08 20:15:43 -08:00
|
|
|
continue;
|
2017-12-04 21:20:23 -08:00
|
|
|
},
|
2017-12-08 20:15:43 -08:00
|
|
|
State.Expression => |result_ptr| {
|
2017-12-09 17:01:13 -08:00
|
|
|
stack.append(State {.UnwrapExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.UnwrapExpression => |result_ptr| {
|
|
|
|
stack.append(State {.BoolOrExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.BoolOrExpression => |result_ptr| {
|
|
|
|
stack.append(State {.BoolAndExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.BoolAndExpression => |result_ptr| {
|
|
|
|
stack.append(State {.ComparisonExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.ComparisonExpression => |result_ptr| {
|
|
|
|
stack.append(State {.BinaryOrExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.BinaryOrExpression => |result_ptr| {
|
|
|
|
stack.append(State {.BinaryXorExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.BinaryXorExpression => |result_ptr| {
|
|
|
|
stack.append(State {.BinaryAndExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.BinaryAndExpression => |result_ptr| {
|
|
|
|
stack.append(State {.BitShiftExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.BitShiftExpression => |result_ptr| {
|
|
|
|
stack.append(State {.AdditionExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.AdditionExpression => |result_ptr| {
|
2017-12-09 17:50:31 -08:00
|
|
|
stack.append(State {.MultiplyExpression = result_ptr}) %% unreachable;
|
2017-12-09 17:01:13 -08:00
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.MultiplyExpression => |result_ptr| {
|
|
|
|
stack.append(State {.BraceSuffixExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.BraceSuffixExpression => |result_ptr| {
|
|
|
|
stack.append(State {.PrefixOpExpression = result_ptr}) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.PrefixOpExpression => |result_ptr| {
|
2017-12-09 17:50:31 -08:00
|
|
|
const first_token = self.getNextToken();
|
|
|
|
if (first_token.id == Token.Id.Ampersand) {
|
|
|
|
const addr_of_expr = %return self.createAttachAddrOfExpr(result_ptr, first_token);
|
|
|
|
var token = self.getNextToken();
|
|
|
|
if (token.id == Token.Id.Keyword_align) {
|
|
|
|
@panic("TODO align");
|
|
|
|
}
|
|
|
|
if (token.id == Token.Id.Keyword_const) {
|
|
|
|
addr_of_expr.const_token = token;
|
|
|
|
token = self.getNextToken();
|
|
|
|
}
|
|
|
|
if (token.id == Token.Id.Keyword_volatile) {
|
|
|
|
addr_of_expr.volatile_token = token;
|
|
|
|
token = self.getNextToken();
|
|
|
|
}
|
|
|
|
self.putBackToken(token);
|
|
|
|
stack.append(State { .PrefixOpExpression = &addr_of_expr.op_expr }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
self.putBackToken(first_token);
|
2017-12-09 17:01:13 -08:00
|
|
|
stack.append(State { .SuffixOpExpression = result_ptr }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.SuffixOpExpression => |result_ptr| {
|
|
|
|
stack.append(State { .PrimaryExpression = result_ptr }) %% unreachable;
|
2017-12-08 20:15:43 -08:00
|
|
|
continue;
|
|
|
|
},
|
2017-12-09 17:01:13 -08:00
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
State.PrimaryExpression => |result_ptr| {
|
|
|
|
const token = self.getNextToken();
|
|
|
|
switch (token.id) {
|
|
|
|
Token.Id.Identifier => {
|
|
|
|
const identifier = %return self.createIdentifier(token);
|
|
|
|
*result_ptr = &identifier.base;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
else => return self.parseError(token, "expected primary expression, found {}", @tagName(token.id)),
|
|
|
|
}
|
|
|
|
},
|
2017-12-09 17:01:13 -08:00
|
|
|
|
|
|
|
State.TypeExpr => |result_ptr| {
|
|
|
|
const token = self.getNextToken();
|
|
|
|
if (token.id == Token.Id.Keyword_var) {
|
|
|
|
@panic("TODO param with type var");
|
|
|
|
}
|
|
|
|
self.putBackToken(token);
|
|
|
|
|
|
|
|
stack.append(State { .PrefixOpExpression = result_ptr }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.FnProto => |fn_proto| {
|
|
|
|
stack.append(State { .FnProtoAlign = fn_proto }) %% unreachable;
|
|
|
|
%return stack.append(State { .ParamDecl = fn_proto });
|
|
|
|
%return stack.append(State { .ExpectToken = Token.Id.LParen });
|
|
|
|
|
|
|
|
const next_token = self.getNextToken();
|
|
|
|
if (next_token.id == Token.Id.Identifier) {
|
|
|
|
fn_proto.name_token = next_token;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
self.putBackToken(next_token);
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.FnProtoAlign => |fn_proto| {
|
2017-12-09 17:50:31 -08:00
|
|
|
const token = self.getNextToken();
|
|
|
|
if (token.id == Token.Id.Keyword_align) {
|
|
|
|
@panic("TODO fn proto align");
|
|
|
|
}
|
|
|
|
if (token.id == Token.Id.Arrow) {
|
|
|
|
stack.append(State { .TypeExpr = removeNullCast(&fn_proto.return_type) }) %% unreachable;
|
|
|
|
continue;
|
|
|
|
} else {
|
|
|
|
self.putBackToken(token);
|
|
|
|
continue;
|
|
|
|
}
|
2017-12-09 17:01:13 -08:00
|
|
|
},
|
|
|
|
|
|
|
|
State.ParamDecl => |fn_proto| {
|
|
|
|
var token = self.getNextToken();
|
|
|
|
if (token.id == Token.Id.RParen) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
const param_decl = %return self.createAttachParamDecl(&fn_proto.params);
|
|
|
|
if (token.id == Token.Id.Keyword_comptime) {
|
|
|
|
param_decl.comptime_token = token;
|
|
|
|
token = self.getNextToken();
|
|
|
|
} else if (token.id == Token.Id.Keyword_noalias) {
|
|
|
|
param_decl.noalias_token = token;
|
|
|
|
token = self.getNextToken();
|
|
|
|
};
|
|
|
|
if (token.id == Token.Id.Identifier) {
|
|
|
|
const next_token = self.getNextToken();
|
|
|
|
if (next_token.id == Token.Id.Colon) {
|
|
|
|
param_decl.name_token = token;
|
|
|
|
token = self.getNextToken();
|
|
|
|
} else {
|
|
|
|
self.putBackToken(next_token);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (token.id == Token.Id.Ellipsis3) {
|
|
|
|
param_decl.var_args_token = token;
|
|
|
|
} else {
|
|
|
|
self.putBackToken(token);
|
|
|
|
}
|
|
|
|
|
|
|
|
stack.append(State { .ParamDecl = fn_proto }) %% unreachable;
|
|
|
|
%return stack.append(State.ParamDeclComma);
|
|
|
|
%return stack.append(State { .TypeExpr = ¶m_decl.type_node });
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
|
|
|
|
State.ParamDeclComma => {
|
|
|
|
const token = self.getNextToken();
|
|
|
|
switch (token.id) {
|
|
|
|
Token.Id.RParen => {
|
|
|
|
_ = stack.pop(); // pop off the ParamDecl
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Token.Id.Comma => continue,
|
|
|
|
else => return self.parseError(token, "expected ',' or ')', found {}", @tagName(token.id)),
|
|
|
|
}
|
|
|
|
},
|
|
|
|
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
State.GroupedExpression => @panic("TODO"),
|
|
|
|
}
|
|
|
|
unreachable;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn createRoot(self: &Parser) -> %&AstNodeRoot {
|
|
|
|
const node = %return self.allocator.create(AstNodeRoot);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
|
|
|
|
*node = AstNodeRoot {
|
|
|
|
.base = AstNode {.id = AstNode.Id.Root},
|
|
|
|
.decls = ArrayList(&AstNode).init(self.allocator),
|
|
|
|
};
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
2017-12-09 17:01:13 -08:00
|
|
|
fn createVarDecl(self: &Parser, visib: Visibility, mut: Mutability, is_comptime: Comptime,
|
|
|
|
is_extern: Extern) -> %&AstNodeVarDecl
|
|
|
|
{
|
2017-12-08 20:15:43 -08:00
|
|
|
const node = %return self.allocator.create(AstNodeVarDecl);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
|
|
|
|
*node = AstNodeVarDecl {
|
|
|
|
.base = AstNode {.id = AstNode.Id.VarDecl},
|
|
|
|
.visib = visib,
|
|
|
|
.mut = mut,
|
|
|
|
.is_comptime = is_comptime,
|
2017-12-09 17:01:13 -08:00
|
|
|
.is_extern = is_extern,
|
2017-12-08 20:15:43 -08:00
|
|
|
.type_node = null,
|
|
|
|
.align_node = null,
|
|
|
|
.init_node = null,
|
2017-12-09 17:01:13 -08:00
|
|
|
.lib_name = null,
|
2017-12-08 20:15:43 -08:00
|
|
|
// initialized later
|
|
|
|
.name_token = undefined,
|
|
|
|
.eq_token = undefined,
|
|
|
|
};
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn createIdentifier(self: &Parser, name_token: &const Token) -> %&AstNodeIdentifier {
|
|
|
|
const node = %return self.allocator.create(AstNodeIdentifier);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
|
|
|
|
*node = AstNodeIdentifier {
|
|
|
|
.base = AstNode {.id = AstNode.Id.Identifier},
|
|
|
|
.name_token = *name_token,
|
|
|
|
};
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
2017-12-09 17:01:13 -08:00
|
|
|
fn createFnProto(self: &Parser, fn_token: &const Token, is_extern: Extern,
|
|
|
|
cc: CallingConvention, visib: Visibility, is_inline: Inline) -> %&AstNodeFnProto
|
|
|
|
{
|
|
|
|
const node = %return self.allocator.create(AstNodeFnProto);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
|
|
|
|
*node = AstNodeFnProto {
|
|
|
|
.base = AstNode {.id = AstNode.Id.FnProto},
|
|
|
|
.visib = visib,
|
|
|
|
.name_token = null,
|
|
|
|
.fn_token = *fn_token,
|
|
|
|
.params = ArrayList(&AstNode).init(self.allocator),
|
|
|
|
.return_type = null,
|
|
|
|
.var_args = VarArgs.No,
|
|
|
|
.is_extern = is_extern,
|
|
|
|
.is_inline = is_inline,
|
|
|
|
.cc = cc,
|
|
|
|
.fn_def_node = null,
|
|
|
|
.lib_name = null,
|
|
|
|
.align_expr = null,
|
|
|
|
};
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn createParamDecl(self: &Parser) -> %&AstNodeParamDecl {
|
|
|
|
const node = %return self.allocator.create(AstNodeParamDecl);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
|
|
|
|
*node = AstNodeParamDecl {
|
|
|
|
.base = AstNode {.id = AstNode.Id.ParamDecl},
|
|
|
|
.comptime_token = null,
|
|
|
|
.noalias_token = null,
|
|
|
|
.name_token = null,
|
|
|
|
.type_node = undefined,
|
|
|
|
.var_args_token = null,
|
|
|
|
};
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
2017-12-09 17:50:31 -08:00
|
|
|
fn createAddrOfExpr(self: &Parser, op_token: &const Token) -> %&AstNodeAddrOfExpr {
|
|
|
|
const node = %return self.allocator.create(AstNodeAddrOfExpr);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
|
|
|
|
*node = AstNodeAddrOfExpr {
|
|
|
|
.base = AstNode {.id = AstNode.Id.AddrOfExpr},
|
|
|
|
.align_expr = null,
|
|
|
|
.op_token = *op_token,
|
|
|
|
.bit_offset_start_token = null,
|
|
|
|
.bit_offset_end_token = null,
|
|
|
|
.const_token = null,
|
|
|
|
.volatile_token = null,
|
|
|
|
.op_expr = undefined,
|
|
|
|
};
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn createAttachAddrOfExpr(self: &Parser, result_ptr: &&AstNode, op_token: &const Token) -> %&AstNodeAddrOfExpr {
|
|
|
|
const node = %return self.createAddrOfExpr(op_token);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
*result_ptr = &node.base;
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
2017-12-09 17:01:13 -08:00
|
|
|
fn createAttachParamDecl(self: &Parser, list: &ArrayList(&AstNode)) -> %&AstNodeParamDecl {
|
|
|
|
const node = %return self.createParamDecl();
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
%return list.append(&node.base);
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn createAttachFnProto(self: &Parser, list: &ArrayList(&AstNode), fn_token: &const Token,
|
|
|
|
is_extern: Extern, cc: CallingConvention, visib: Visibility, is_inline: Inline) -> %&AstNodeFnProto
|
|
|
|
{
|
|
|
|
const node = %return self.createFnProto(fn_token, is_extern, cc, visib, is_inline);
|
|
|
|
%defer self.allocator.destroy(node);
|
|
|
|
%return list.append(&node.base);
|
|
|
|
return node;
|
|
|
|
}
|
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
fn parseError(self: &Parser, token: &const Token, comptime fmt: []const u8, args: ...) -> error {
|
|
|
|
const loc = self.tokenizer.getTokenLocation(token);
|
|
|
|
warn("{}:{}:{}: error: " ++ fmt ++ "\n", self.source_file_name, loc.line + 1, loc.column + 1, args);
|
|
|
|
warn("{}\n", self.tokenizer.buffer[loc.line_start..loc.line_end]);
|
|
|
|
{
|
|
|
|
var i: usize = 0;
|
|
|
|
while (i < loc.column) : (i += 1) {
|
|
|
|
warn(" ");
|
2017-12-04 21:20:23 -08:00
|
|
|
}
|
|
|
|
}
|
2017-12-08 20:15:43 -08:00
|
|
|
{
|
|
|
|
const caret_count = token.end - token.start;
|
|
|
|
var i: usize = 0;
|
|
|
|
while (i < caret_count) : (i += 1) {
|
|
|
|
warn("~");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
warn("\n");
|
|
|
|
return error.ParseError;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn expectToken(self: &Parser, token: &const Token, id: @TagType(Token.Id)) -> %void {
|
|
|
|
if (token.id != id) {
|
|
|
|
return self.parseError(token, "expected {}, found {}", @tagName(id), @tagName(token.id));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn eatToken(self: &Parser, id: @TagType(Token.Id)) -> %Token {
|
|
|
|
const token = self.getNextToken();
|
|
|
|
%return self.expectToken(token, id);
|
|
|
|
return token;
|
2017-12-04 21:20:23 -08:00
|
|
|
}
|
2017-12-08 20:15:43 -08:00
|
|
|
|
|
|
|
fn putBackToken(self: &Parser, token: &const Token) {
|
|
|
|
self.put_back_tokens[self.put_back_count] = *token;
|
|
|
|
self.put_back_count += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
fn getNextToken(self: &Parser) -> Token {
|
|
|
|
return if (self.put_back_count != 0) {
|
|
|
|
const put_back_index = self.put_back_count - 1;
|
|
|
|
const put_back_token = self.put_back_tokens[put_back_index];
|
|
|
|
self.put_back_count = put_back_index;
|
|
|
|
put_back_token
|
|
|
|
} else {
|
|
|
|
self.tokenizer.next()
|
|
|
|
};
|
|
|
|
}
|
2017-12-04 21:20:23 -08:00
|
|
|
};
|
|
|
|
|
2017-12-04 19:05:27 -08:00
|
|
|
|
|
|
|
pub fn main() -> %void {
|
|
|
|
main2() %% |err| {
|
|
|
|
warn("{}\n", @errorName(err));
|
2017-10-21 14:31:06 -07:00
|
|
|
return err;
|
|
|
|
};
|
2017-12-04 19:05:27 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn main2() -> %void {
|
|
|
|
var incrementing_allocator = %return heap.IncrementingAllocator.init(10 * 1024 * 1024);
|
2017-10-21 14:31:06 -07:00
|
|
|
defer incrementing_allocator.deinit();
|
|
|
|
|
|
|
|
const allocator = &incrementing_allocator.allocator;
|
2017-10-24 07:08:20 -07:00
|
|
|
|
2017-12-06 15:22:52 -08:00
|
|
|
const args = %return os.argsAlloc(allocator);
|
|
|
|
defer os.argsFree(allocator, args);
|
|
|
|
|
|
|
|
const target_file = args[1];
|
2017-10-24 07:08:20 -07:00
|
|
|
|
2017-12-04 19:05:27 -08:00
|
|
|
const target_file_buf = %return io.readFileAlloc(target_file, allocator);
|
2017-10-21 14:31:06 -07:00
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
warn("====input:====\n");
|
|
|
|
|
2017-12-04 19:05:27 -08:00
|
|
|
warn("{}", target_file_buf);
|
2017-12-04 20:09:03 -08:00
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
warn("====tokenization:====\n");
|
2017-12-04 21:20:23 -08:00
|
|
|
{
|
|
|
|
var tokenizer = Tokenizer.init(target_file_buf);
|
|
|
|
while (true) {
|
|
|
|
const token = tokenizer.next();
|
|
|
|
tokenizer.dump(token);
|
2017-12-06 18:41:38 -08:00
|
|
|
if (token.id == Token.Id.Eof) {
|
2017-12-04 21:20:23 -08:00
|
|
|
break;
|
|
|
|
}
|
2017-12-04 20:09:03 -08:00
|
|
|
}
|
|
|
|
}
|
2017-12-04 21:20:23 -08:00
|
|
|
|
2017-12-08 20:15:43 -08:00
|
|
|
warn("====parse:====\n");
|
|
|
|
|
2017-12-04 21:20:23 -08:00
|
|
|
var tokenizer = Tokenizer.init(target_file_buf);
|
2017-12-08 20:15:43 -08:00
|
|
|
var parser = Parser.init(&tokenizer, allocator, target_file);
|
|
|
|
const node = %return parser.parse();
|
|
|
|
|
|
|
|
|
|
|
|
render(node, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn render(node: &AstNode, indent: usize) {
|
|
|
|
{
|
|
|
|
var i: usize = 0;
|
|
|
|
while (i < indent) : (i += 1) {
|
|
|
|
warn(" ");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
warn("{}\n", @tagName(node.id));
|
|
|
|
var i: usize = 0;
|
|
|
|
while (node.iterate(i)) |child| : (i += 1) {
|
|
|
|
render(child, indent + 2);
|
|
|
|
}
|
2017-10-21 14:31:06 -07:00
|
|
|
}
|
2017-12-09 17:01:13 -08:00
|
|
|
|
|
|
|
fn removeNullCast(x: var) -> {const InnerPtr = @typeOf(x).Child.Child; &InnerPtr} {
|
|
|
|
comptime assert(@typeId(@typeOf(x)) == builtin.TypeId.Pointer);
|
|
|
|
comptime assert(@typeId(@typeOf(x).Child) == builtin.TypeId.Nullable);
|
|
|
|
comptime assert(@typeId(@typeOf(x).Child.Child) == builtin.TypeId.Pointer);
|
|
|
|
const InnerPtr = @typeOf(x).Child.Child;
|
|
|
|
return @ptrCast(&InnerPtr, x);
|
|
|
|
}
|