Remove StrLitKind enum

I was looking at the tokenizer specifically fn testTokenize and the
this statement looked odd:

  if (@TagType(Token.Id)(token.id) != @TagType(Token.Id)(expected_token_id)) {

I then saw the TODO and thought I'd remove StrLitKind figuring that
would make testTokenize simpler. It did so I thought I'd prepare this PR.

The tests are still working and stage2 zig seems to work, it compiles and
I was able to use the fmt command.
master
Wink Saville 2018-09-23 15:53:52 -07:00 committed by Andrew Kelley
parent 4241cd666d
commit 0e6c18c820
3 changed files with 25 additions and 40 deletions

View File

@ -211,7 +211,7 @@ pub const Error = union(enum) {
pub const ExpectedToken = struct {
token: TokenIndex,
expected_id: @TagType(Token.Id),
expected_id: Token.Id,
pub fn render(self: *const ExpectedToken, tokens: *Tree.TokenList, stream: var) !void {
const token_name = @tagName(tokens.at(self.token).id);
@ -221,7 +221,7 @@ pub const Error = union(enum) {
pub const ExpectedCommaOrEnd = struct {
token: TokenIndex,
end_id: @TagType(Token.Id),
end_id: Token.Id,
pub fn render(self: *const ExpectedCommaOrEnd, tokens: *Tree.TokenList, stream: var) !void {
const token_name = @tagName(tokens.at(self.token).id);

View File

@ -2846,12 +2846,12 @@ const ContainerKindCtx = struct {
};
const ExpectTokenSave = struct {
id: @TagType(Token.Id),
id: Token.Id,
ptr: *TokenIndex,
};
const OptionalTokenSave = struct {
id: @TagType(Token.Id),
id: Token.Id,
ptr: *?TokenIndex,
};
@ -3066,9 +3066,9 @@ const State = union(enum) {
Identifier: OptionalCtx,
ErrorTag: **ast.Node,
IfToken: @TagType(Token.Id),
IfToken: Token.Id,
IfTokenSave: ExpectTokenSave,
ExpectToken: @TagType(Token.Id),
ExpectToken: Token.Id,
ExpectTokenSave: ExpectTokenSave,
OptionalTokenSave: OptionalTokenSave,
};
@ -3243,7 +3243,7 @@ const ExpectCommaOrEndResult = union(enum) {
parse_error: Error,
};
fn expectCommaOrEnd(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, end: @TagType(Token.Id)) ExpectCommaOrEndResult {
fn expectCommaOrEnd(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, end: Token.Id) ExpectCommaOrEndResult {
const token = nextToken(tok_it, tree);
const token_index = token.index;
const token_ptr = token.ptr;
@ -3288,7 +3288,7 @@ fn tokenIdToAssignment(id: *const Token.Id) ?ast.Node.InfixOp.Op {
};
}
fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
fn tokenIdToUnwrapExpr(id: Token.Id) ?ast.Node.InfixOp.Op {
return switch (id) {
Token.Id.Keyword_catch => ast.Node.InfixOp.Op{ .Catch = null },
Token.Id.Keyword_orelse => ast.Node.InfixOp.Op{ .UnwrapOptional = void{} },
@ -3296,7 +3296,7 @@ fn tokenIdToUnwrapExpr(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
};
}
fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
fn tokenIdToComparison(id: Token.Id) ?ast.Node.InfixOp.Op {
return switch (id) {
Token.Id.BangEqual => ast.Node.InfixOp.Op{ .BangEqual = void{} },
Token.Id.EqualEqual => ast.Node.InfixOp.Op{ .EqualEqual = void{} },
@ -3308,7 +3308,7 @@ fn tokenIdToComparison(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
};
}
fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
fn tokenIdToBitShift(id: Token.Id) ?ast.Node.InfixOp.Op {
return switch (id) {
Token.Id.AngleBracketAngleBracketLeft => ast.Node.InfixOp.Op{ .BitShiftLeft = void{} },
Token.Id.AngleBracketAngleBracketRight => ast.Node.InfixOp.Op{ .BitShiftRight = void{} },
@ -3316,7 +3316,7 @@ fn tokenIdToBitShift(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
};
}
fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
fn tokenIdToAddition(id: Token.Id) ?ast.Node.InfixOp.Op {
return switch (id) {
Token.Id.Minus => ast.Node.InfixOp.Op{ .Sub = void{} },
Token.Id.MinusPercent => ast.Node.InfixOp.Op{ .SubWrap = void{} },
@ -3327,7 +3327,7 @@ fn tokenIdToAddition(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
};
}
fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
fn tokenIdToMultiply(id: Token.Id) ?ast.Node.InfixOp.Op {
return switch (id) {
Token.Id.Slash => ast.Node.InfixOp.Op{ .Div = void{} },
Token.Id.Asterisk => ast.Node.InfixOp.Op{ .Mult = void{} },
@ -3339,7 +3339,7 @@ fn tokenIdToMultiply(id: @TagType(Token.Id)) ?ast.Node.InfixOp.Op {
};
}
fn tokenIdToPrefixOp(id: @TagType(Token.Id)) ?ast.Node.PrefixOp.Op {
fn tokenIdToPrefixOp(id: Token.Id) ?ast.Node.PrefixOp.Op {
return switch (id) {
Token.Id.Bang => ast.Node.PrefixOp.Op{ .BoolNot = void{} },
Token.Id.Tilde => ast.Node.PrefixOp.Op{ .BitNot = void{} },
@ -3374,7 +3374,7 @@ fn createToCtxLiteral(arena: *mem.Allocator, opt_ctx: *const OptionalCtx, compti
return node;
}
fn eatToken(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, id: @TagType(Token.Id)) ?TokenIndex {
fn eatToken(tok_it: *ast.Tree.TokenList.Iterator, tree: *ast.Tree, id: Token.Id) ?TokenIndex {
const token = tok_it.peek().?;
if (token.id == id) {

View File

@ -73,17 +73,11 @@ pub const Token = struct {
return null;
}
/// TODO remove this enum
const StrLitKind = enum {
Normal,
C,
};
pub const Id = union(enum) {
pub const Id = enum {
Invalid,
Identifier,
StringLiteral: StrLitKind,
MultilineStringLiteralLine: StrLitKind,
StringLiteral,
MultilineStringLiteralLine,
CharLiteral,
Eof,
Builtin,
@ -311,7 +305,7 @@ pub const Tokenizer = struct {
},
'"' => {
state = State.StringLiteral;
result.id = Token.Id{ .StringLiteral = Token.StrLitKind.Normal };
result.id = Token.Id.StringLiteral;
},
'\'' => {
state = State.CharLiteral;
@ -390,7 +384,7 @@ pub const Tokenizer = struct {
},
'\\' => {
state = State.Backslash;
result.id = Token.Id{ .MultilineStringLiteralLine = Token.StrLitKind.Normal };
result.id = Token.Id.MultilineStringLiteralLine;
},
'{' => {
result.id = Token.Id.LBrace;
@ -591,11 +585,11 @@ pub const Tokenizer = struct {
State.C => switch (c) {
'\\' => {
state = State.Backslash;
result.id = Token.Id{ .MultilineStringLiteralLine = Token.StrLitKind.C };
result.id = Token.Id.MultilineStringLiteralLine;
},
'"' => {
state = State.StringLiteral;
result.id = Token.Id{ .StringLiteral = Token.StrLitKind.C };
result.id = Token.Id.StringLiteral;
},
'a'...'z', 'A'...'Z', '_', '0'...'9' => {
state = State.Identifier;
@ -1218,7 +1212,7 @@ test "tokenizer - invalid token characters" {
test "tokenizer - invalid literal/comment characters" {
testTokenize("\"\x00\"", []Token.Id{
Token.Id{ .StringLiteral = Token.StrLitKind.Normal },
Token.Id.StringLiteral,
Token.Id.Invalid,
});
testTokenize("//\x00", []Token.Id{
@ -1304,7 +1298,7 @@ test "tokenizer - string identifier and builtin fns" {
Token.Id.Equal,
Token.Id.Builtin,
Token.Id.LParen,
Token.Id{ .StringLiteral = Token.StrLitKind.Normal },
Token.Id.StringLiteral,
Token.Id.RParen,
Token.Id.Semicolon,
});
@ -1344,17 +1338,8 @@ fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void {
var tokenizer = Tokenizer.init(source);
for (expected_tokens) |expected_token_id| {
const token = tokenizer.next();
if (@TagType(Token.Id)(token.id) != @TagType(Token.Id)(expected_token_id)) {
std.debug.panic("expected {}, found {}\n", @tagName(@TagType(Token.Id)(expected_token_id)), @tagName(@TagType(Token.Id)(token.id)));
}
switch (expected_token_id) {
Token.Id.StringLiteral => |expected_kind| {
std.debug.assert(expected_kind == switch (token.id) {
Token.Id.StringLiteral => |kind| kind,
else => unreachable,
});
},
else => {},
if (token.id != expected_token_id) {
std.debug.panic("expected {}, found {}\n", @tagName(expected_token_id), @tagName(token.id));
}
}
const last_token = tokenizer.next();