Cleanup c_tokenizer.zig tests

This commit is contained in:
frmdstryr 2019-12-29 10:41:26 -05:00 committed by Vexu
parent fcc82a219a
commit 6df9e9fe47
No known key found for this signature in database
GPG Key ID: 59AEB8936E16A6AC

View File

@ -24,21 +24,26 @@ pub const CToken = struct {
RParen,
Eof,
Dot,
Asterisk,
Ampersand,
And,
Or,
Bang,
Tilde,
Shl,
Shr,
Lt,
Gt,
Increment,
Decrement,
Asterisk, // *
Ampersand, // &
And, // &&
Assign, // =
Or, // ||
Bang, // !
Tilde, // ~
Shl, // <<
Shr, // >>
Lt, // <
Lte, // <=
Gt, // >
Gte, // >=
Eq, // ==
Ne, // !=
Increment, // ++
Decrement, // --
Comma,
Fn,
Arrow,
Arrow, // ->
LBrace,
RBrace,
Pipe,
@ -239,6 +244,8 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
GotMinus,
GotAmpersand,
GotPipe,
GotBang,
GotEq,
CharLit,
OpenComment,
Comment,
@ -298,6 +305,8 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
.GotPlus,
.GotAmpersand,
.GotPipe,
.GotBang,
.GotEq,
=> {
return result;
},
@ -387,12 +396,16 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
},
'!' => {
result.id = .Bang;
state = .Done;
state = .GotBang;
},
'~' => {
result.id = .Tilde;
state = .Done;
},
'=' => {
result.id = .Assign;
state = .GotEq;
},
',' => {
result.id = .Comma;
state = .Done;
@ -438,9 +451,7 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
result.id = .Decrement;
state = .Done;
},
else => {
return result;
},
else => return result,
}
},
.GotPlus => {
@ -449,9 +460,7 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
result.id = .Increment;
state = .Done;
},
else => {
return result;
},
else => return result,
}
},
.GotLt => {
@ -460,9 +469,11 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
result.id = .Shl;
state = .Done;
},
else => {
return result;
'=' => {
result.id = .Lte;
state = .Done;
},
else => return result,
}
},
.GotGt => {
@ -471,9 +482,11 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
result.id = .Shr;
state = .Done;
},
else => {
return result;
'=' => {
result.id = .Gte;
state = .Done;
},
else => return result,
}
},
.GotPipe => {
@ -482,9 +495,7 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
result.id = .Or;
state = .Done;
},
else => {
return result;
},
else => return result,
}
},
.GotAmpersand => {
@ -493,9 +504,25 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
result.id = .And;
state = .Done;
},
else => {
return result;
else => return result,
}
},
.GotBang => {
switch (c) {
'=' => {
result.id = .Ne;
state = .Done;
},
else => return result,
}
},
.GotEq => {
switch (c) {
'=' => {
result.id = .Eq;
state = .Done;
},
else => return result,
}
},
.Float => {
@ -802,57 +829,101 @@ fn next(ctx: *Context, loc: ZigClangSourceLocation, name: []const u8, chars: [*:
unreachable;
}
fn expectTokens(tl: *TokenList, src: [*:0]const u8, expected: []CToken) void {
tokenizeCMacro(tl, src) catch unreachable;
var it = tl.iterator(0);
for (expected) |t| {
var tok = it.next().?;
std.testing.expectEqual(t.id, tok.id);
if (t.bytes.len > 0) {
//std.debug.warn(" {} = {}\n", .{tok.bytes, t.bytes});
std.testing.expectEqualSlices(u8, tok.bytes, t.bytes);
}
if (t.num_lit_suffix != .None) {
std.testing.expectEqual(t.num_lit_suffix, tok.num_lit_suffix);
}
}
std.testing.expect(it.next() == null);
tl.shrink(0);
}
test "tokenize macro" {
var tl = TokenList.init(std.heap.page_allocator);
defer tl.deinit();
const src = "TEST(0\n";
try tokenizeCMacro(&tl, src);
var it = tl.iterator(0);
expect(it.next().?.id == .Identifier);
expect(it.next().?.id == .Fn);
expect(it.next().?.id == .LParen);
expect(std.mem.eql(u8, it.next().?.bytes, "0"));
expect(it.next().?.id == .Eof);
expect(it.next() == null);
tl.shrink(0);
expectTokens(&tl, "TEST(0\n", &[_]CToken{
ctoken(.Identifier, "TEST"),
ctoken(.Fn, ""),
ctoken(.LParen, ""),
ctoken(.NumLitInt, "0"),
ctoken(.Eof, ""),
});
const src2 = "__FLT_MIN_10_EXP__ -37\n";
try tokenizeCMacro(&tl, src2);
it = tl.iterator(0);
expect(std.mem.eql(u8, it.next().?.bytes, "__FLT_MIN_10_EXP__"));
expect(it.next().?.id == .Minus);
expect(std.mem.eql(u8, it.next().?.bytes, "37"));
expect(it.next().?.id == .Eof);
expect(it.next() == null);
tl.shrink(0);
expectTokens(&tl, "__FLT_MIN_10_EXP__ -37\n", &[_]CToken{
ctoken(.Identifier, "__FLT_MIN_10_EXP__"),
ctoken(.Minus, ""),
ctoken(.NumLitInt, "37"),
ctoken(.Eof, ""),
});
const src3 = "__llvm__ 1\n#define";
try tokenizeCMacro(&tl, src3);
it = tl.iterator(0);
expect(std.mem.eql(u8, it.next().?.bytes, "__llvm__"));
expect(std.mem.eql(u8, it.next().?.bytes, "1"));
expect(it.next().?.id == .Eof);
expect(it.next() == null);
tl.shrink(0);
expectTokens(&tl, "__llvm__ 1\n#define", &[_]CToken{
ctoken(.Identifier, "__llvm__"),
ctoken(.NumLitInt, "1"),
ctoken(.Eof, ""),
const src4 = "TEST 2";
try tokenizeCMacro(&tl, src4);
it = tl.iterator(0);
expect(it.next().?.id == .Identifier);
expect(std.mem.eql(u8, it.next().?.bytes, "2"));
expect(it.next().?.id == .Eof);
expect(it.next() == null);
tl.shrink(0);
});
const src5 = "FOO 0ull";
try tokenizeCMacro(&tl, src5);
it = tl.iterator(0);
expect(it.next().?.id == .Identifier);
expect(std.mem.eql(u8, it.next().?.bytes, "0"));
expect(it.next().?.id == .Eof);
expect(it.next() == null);
tl.shrink(0);
expectTokens(&tl, "TEST 2", &[_]CToken{
ctoken(.Identifier, "TEST"),
ctoken(.NumLitInt, "2"),
ctoken(.Eof, ""),
});
expectTokens(&tl, "FOO 0ull", &[_]CToken{
ctoken(.Identifier, "FOO"),
cnumtoken(.LLU, "0"),
ctoken(.Eof, ""),
});
}
test "tokenize macro ops" {
var tl = TokenList.init(std.heap.page_allocator);
defer tl.deinit();
expectTokens(&tl, "ADD A + B", &[_]CToken{
ctoken(.Identifier, "ADD"),
ctoken(.Identifier, "A"),
ctoken(.Plus, ""),
ctoken(.Identifier, "B"),
ctoken(.Eof, ""),
});
expectTokens(&tl, "ADD (A) + B", &[_]CToken{
ctoken(.Identifier, "ADD"),
ctoken(.LParen, ""),
ctoken(.Identifier, "A"),
ctoken(.RParen, ""),
ctoken(.Plus, ""),
ctoken(.Identifier, "B"),
ctoken(.Eof, ""),
});
expectTokens(&tl, "ADD (A) + B", &[_]CToken{
ctoken(.Identifier, "ADD"),
ctoken(.LParen, ""),
ctoken(.Identifier, "A"),
ctoken(.RParen, ""),
ctoken(.Plus, ""),
ctoken(.Identifier, "B"),
ctoken(.Eof, ""),
});
}
test "escape sequences" {