translate-c: use ArrayList for macro tokens

This commit is contained in:
Vexu 2020-07-27 15:19:07 +03:00
parent 9f6401c692
commit e7007fa7bd
No known key found for this signature in database
GPG Key ID: 59AEB8936E16A6AC
3 changed files with 302 additions and 323 deletions

View File

@ -8,6 +8,10 @@ pub const Tokenizer = tokenizer.Tokenizer;
pub const parse = @import("c/parse.zig").parse; pub const parse = @import("c/parse.zig").parse;
pub const ast = @import("c/ast.zig"); pub const ast = @import("c/ast.zig");
test "" {
_ = tokenizer;
}
pub usingnamespace @import("os/bits.zig"); pub usingnamespace @import("os/bits.zig");
pub usingnamespace switch (std.Target.current.os.tag) { pub usingnamespace switch (std.Target.current.os.tag) {

View File

@ -1,19 +1,10 @@
const std = @import("std"); const std = @import("std");
const mem = std.mem; const mem = std.mem;
pub const Source = struct {
buffer: []const u8,
file_name: []const u8,
tokens: TokenList,
pub const TokenList = std.SegmentedList(Token, 64);
};
pub const Token = struct { pub const Token = struct {
id: Id, id: Id,
start: usize, start: usize,
end: usize, end: usize,
source: *Source,
pub const Id = union(enum) { pub const Id = union(enum) {
Invalid, Invalid,
@ -251,31 +242,6 @@ pub const Token = struct {
} }
}; };
pub fn eql(a: Token, b: Token) bool {
// do we really need this cast here
if (@as(@TagType(Id), a.id) != b.id) return false;
return mem.eql(u8, a.slice(), b.slice());
}
pub fn slice(tok: Token) []const u8 {
return tok.source.buffer[tok.start..tok.end];
}
pub const Keyword = struct {
bytes: []const u8,
id: Id,
hash: u32,
fn init(bytes: []const u8, id: Id) Keyword {
@setEvalBranchQuota(2000);
return .{
.bytes = bytes,
.id = id,
.hash = std.hash_map.hashString(bytes),
};
}
};
// TODO extensions // TODO extensions
pub const keywords = std.ComptimeStringMap(Id, .{ pub const keywords = std.ComptimeStringMap(Id, .{
.{ "auto", .Keyword_auto }, .{ "auto", .Keyword_auto },
@ -355,26 +321,26 @@ pub const Token = struct {
} }
pub const NumSuffix = enum { pub const NumSuffix = enum {
None, none,
F, f,
L, l,
U, u,
LU, lu,
LL, ll,
LLU, llu,
}; };
pub const StrKind = enum { pub const StrKind = enum {
None, none,
Wide, wide,
Utf8, utf_8,
Utf16, utf_16,
Utf32, utf_32,
}; };
}; };
pub const Tokenizer = struct { pub const Tokenizer = struct {
source: *Source, buffer: []const u8,
index: usize = 0, index: usize = 0,
prev_tok_id: @TagType(Token.Id) = .Invalid, prev_tok_id: @TagType(Token.Id) = .Invalid,
pp_directive: bool = false, pp_directive: bool = false,
@ -385,7 +351,6 @@ pub const Tokenizer = struct {
.id = .Eof, .id = .Eof,
.start = self.index, .start = self.index,
.end = undefined, .end = undefined,
.source = self.source,
}; };
var state: enum { var state: enum {
Start, Start,
@ -446,8 +411,8 @@ pub const Tokenizer = struct {
} = .Start; } = .Start;
var string = false; var string = false;
var counter: u32 = 0; var counter: u32 = 0;
while (self.index < self.source.buffer.len) : (self.index += 1) { while (self.index < self.buffer.len) : (self.index += 1) {
const c = self.source.buffer[self.index]; const c = self.buffer[self.index];
switch (state) { switch (state) {
.Start => switch (c) { .Start => switch (c) {
'\n' => { '\n' => {
@ -460,11 +425,11 @@ pub const Tokenizer = struct {
state = .Cr; state = .Cr;
}, },
'"' => { '"' => {
result.id = .{ .StringLiteral = .None }; result.id = .{ .StringLiteral = .none };
state = .StringLiteral; state = .StringLiteral;
}, },
'\'' => { '\'' => {
result.id = .{ .CharLiteral = .None }; result.id = .{ .CharLiteral = .none };
state = .CharLiteralStart; state = .CharLiteralStart;
}, },
'u' => { 'u' => {
@ -641,11 +606,11 @@ pub const Tokenizer = struct {
state = .u8; state = .u8;
}, },
'\'' => { '\'' => {
result.id = .{ .CharLiteral = .Utf16 }; result.id = .{ .CharLiteral = .utf_16 };
state = .CharLiteralStart; state = .CharLiteralStart;
}, },
'\"' => { '\"' => {
result.id = .{ .StringLiteral = .Utf16 }; result.id = .{ .StringLiteral = .utf_16 };
state = .StringLiteral; state = .StringLiteral;
}, },
else => { else => {
@ -655,7 +620,7 @@ pub const Tokenizer = struct {
}, },
.u8 => switch (c) { .u8 => switch (c) {
'\"' => { '\"' => {
result.id = .{ .StringLiteral = .Utf8 }; result.id = .{ .StringLiteral = .utf_8 };
state = .StringLiteral; state = .StringLiteral;
}, },
else => { else => {
@ -665,11 +630,11 @@ pub const Tokenizer = struct {
}, },
.U => switch (c) { .U => switch (c) {
'\'' => { '\'' => {
result.id = .{ .CharLiteral = .Utf32 }; result.id = .{ .CharLiteral = .utf_32 };
state = .CharLiteralStart; state = .CharLiteralStart;
}, },
'\"' => { '\"' => {
result.id = .{ .StringLiteral = .Utf32 }; result.id = .{ .StringLiteral = .utf_32 };
state = .StringLiteral; state = .StringLiteral;
}, },
else => { else => {
@ -679,11 +644,11 @@ pub const Tokenizer = struct {
}, },
.L => switch (c) { .L => switch (c) {
'\'' => { '\'' => {
result.id = .{ .CharLiteral = .Wide }; result.id = .{ .CharLiteral = .wide };
state = .CharLiteralStart; state = .CharLiteralStart;
}, },
'\"' => { '\"' => {
result.id = .{ .StringLiteral = .Wide }; result.id = .{ .StringLiteral = .wide };
state = .StringLiteral; state = .StringLiteral;
}, },
else => { else => {
@ -808,7 +773,7 @@ pub const Tokenizer = struct {
.Identifier => switch (c) { .Identifier => switch (c) {
'a'...'z', 'A'...'Z', '_', '0'...'9' => {}, 'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
else => { else => {
result.id = Token.getKeyword(self.source.buffer[result.start..self.index], self.prev_tok_id == .Hash and !self.pp_directive) orelse .Identifier; result.id = Token.getKeyword(self.buffer[result.start..self.index], self.prev_tok_id == .Hash and !self.pp_directive) orelse .Identifier;
if (self.prev_tok_id == .Hash) if (self.prev_tok_id == .Hash)
self.pp_directive = true; self.pp_directive = true;
break; break;
@ -1137,7 +1102,7 @@ pub const Tokenizer = struct {
state = .IntegerSuffixL; state = .IntegerSuffixL;
}, },
else => { else => {
result.id = .{ .IntegerLiteral = .None }; result.id = .{ .IntegerLiteral = .none };
break; break;
}, },
}, },
@ -1146,7 +1111,7 @@ pub const Tokenizer = struct {
state = .IntegerSuffixUL; state = .IntegerSuffixUL;
}, },
else => { else => {
result.id = .{ .IntegerLiteral = .U }; result.id = .{ .IntegerLiteral = .u };
break; break;
}, },
}, },
@ -1155,34 +1120,34 @@ pub const Tokenizer = struct {
state = .IntegerSuffixLL; state = .IntegerSuffixLL;
}, },
'u', 'U' => { 'u', 'U' => {
result.id = .{ .IntegerLiteral = .LU }; result.id = .{ .IntegerLiteral = .lu };
self.index += 1; self.index += 1;
break; break;
}, },
else => { else => {
result.id = .{ .IntegerLiteral = .L }; result.id = .{ .IntegerLiteral = .l };
break; break;
}, },
}, },
.IntegerSuffixLL => switch (c) { .IntegerSuffixLL => switch (c) {
'u', 'U' => { 'u', 'U' => {
result.id = .{ .IntegerLiteral = .LLU }; result.id = .{ .IntegerLiteral = .llu };
self.index += 1; self.index += 1;
break; break;
}, },
else => { else => {
result.id = .{ .IntegerLiteral = .LL }; result.id = .{ .IntegerLiteral = .ll };
break; break;
}, },
}, },
.IntegerSuffixUL => switch (c) { .IntegerSuffixUL => switch (c) {
'l', 'L' => { 'l', 'L' => {
result.id = .{ .IntegerLiteral = .LLU }; result.id = .{ .IntegerLiteral = .llu };
self.index += 1; self.index += 1;
break; break;
}, },
else => { else => {
result.id = .{ .IntegerLiteral = .LU }; result.id = .{ .IntegerLiteral = .lu };
break; break;
}, },
}, },
@ -1230,26 +1195,26 @@ pub const Tokenizer = struct {
}, },
.FloatSuffix => switch (c) { .FloatSuffix => switch (c) {
'l', 'L' => { 'l', 'L' => {
result.id = .{ .FloatLiteral = .L }; result.id = .{ .FloatLiteral = .l };
self.index += 1; self.index += 1;
break; break;
}, },
'f', 'F' => { 'f', 'F' => {
result.id = .{ .FloatLiteral = .F }; result.id = .{ .FloatLiteral = .f };
self.index += 1; self.index += 1;
break; break;
}, },
else => { else => {
result.id = .{ .FloatLiteral = .None }; result.id = .{ .FloatLiteral = .none };
break; break;
}, },
}, },
} }
} else if (self.index == self.source.buffer.len) { } else if (self.index == self.buffer.len) {
switch (state) { switch (state) {
.Start => {}, .Start => {},
.u, .u8, .U, .L, .Identifier => { .u, .u8, .U, .L, .Identifier => {
result.id = Token.getKeyword(self.source.buffer[result.start..self.index], self.prev_tok_id == .Hash and !self.pp_directive) orelse .Identifier; result.id = Token.getKeyword(self.buffer[result.start..self.index], self.prev_tok_id == .Hash and !self.pp_directive) orelse .Identifier;
}, },
.Cr, .Cr,
@ -1270,11 +1235,11 @@ pub const Tokenizer = struct {
.MacroString, .MacroString,
=> result.id = .Invalid, => result.id = .Invalid,
.FloatExponentDigits => result.id = if (counter == 0) .Invalid else .{ .FloatLiteral = .None }, .FloatExponentDigits => result.id = if (counter == 0) .Invalid else .{ .FloatLiteral = .none },
.FloatFraction, .FloatFraction,
.FloatFractionHex, .FloatFractionHex,
=> result.id = .{ .FloatLiteral = .None }, => result.id = .{ .FloatLiteral = .none },
.IntegerLiteralOct, .IntegerLiteralOct,
.IntegerLiteralBinary, .IntegerLiteralBinary,
@ -1282,13 +1247,13 @@ pub const Tokenizer = struct {
.IntegerLiteral, .IntegerLiteral,
.IntegerSuffix, .IntegerSuffix,
.Zero, .Zero,
=> result.id = .{ .IntegerLiteral = .None }, => result.id = .{ .IntegerLiteral = .none },
.IntegerSuffixU => result.id = .{ .IntegerLiteral = .U }, .IntegerSuffixU => result.id = .{ .IntegerLiteral = .u },
.IntegerSuffixL => result.id = .{ .IntegerLiteral = .L }, .IntegerSuffixL => result.id = .{ .IntegerLiteral = .l },
.IntegerSuffixLL => result.id = .{ .IntegerLiteral = .LL }, .IntegerSuffixLL => result.id = .{ .IntegerLiteral = .ll },
.IntegerSuffixUL => result.id = .{ .IntegerLiteral = .LU }, .IntegerSuffixUL => result.id = .{ .IntegerLiteral = .lu },
.FloatSuffix => result.id = .{ .FloatLiteral = .None }, .FloatSuffix => result.id = .{ .FloatLiteral = .none },
.Equal => result.id = .Equal, .Equal => result.id = .Equal,
.Bang => result.id = .Bang, .Bang => result.id = .Bang,
.Minus => result.id = .Minus, .Minus => result.id = .Minus,
@ -1466,7 +1431,7 @@ test "preprocessor keywords" {
.Hash, .Hash,
.Identifier, .Identifier,
.AngleBracketLeft, .AngleBracketLeft,
.{ .IntegerLiteral = .None }, .{ .IntegerLiteral = .none },
.Nl, .Nl,
.Hash, .Hash,
.Keyword_ifdef, .Keyword_ifdef,
@ -1499,18 +1464,18 @@ test "line continuation" {
.Identifier, .Identifier,
.Identifier, .Identifier,
.Nl, .Nl,
.{ .StringLiteral = .None }, .{ .StringLiteral = .none },
.Nl, .Nl,
.Hash, .Hash,
.Keyword_define, .Keyword_define,
.{ .StringLiteral = .None }, .{ .StringLiteral = .none },
.Nl, .Nl,
.{ .StringLiteral = .None }, .{ .StringLiteral = .none },
.Nl, .Nl,
.Hash, .Hash,
.Keyword_define, .Keyword_define,
.{ .StringLiteral = .None }, .{ .StringLiteral = .none },
.{ .StringLiteral = .None }, .{ .StringLiteral = .none },
}); });
} }
@ -1527,23 +1492,23 @@ test "string prefix" {
\\L'foo' \\L'foo'
\\ \\
, &[_]Token.Id{ , &[_]Token.Id{
.{ .StringLiteral = .None }, .{ .StringLiteral = .none },
.Nl, .Nl,
.{ .StringLiteral = .Utf16 }, .{ .StringLiteral = .utf_16 },
.Nl, .Nl,
.{ .StringLiteral = .Utf8 }, .{ .StringLiteral = .utf_8 },
.Nl, .Nl,
.{ .StringLiteral = .Utf32 }, .{ .StringLiteral = .utf_32 },
.Nl, .Nl,
.{ .StringLiteral = .Wide }, .{ .StringLiteral = .wide },
.Nl, .Nl,
.{ .CharLiteral = .None }, .{ .CharLiteral = .none },
.Nl, .Nl,
.{ .CharLiteral = .Utf16 }, .{ .CharLiteral = .utf_16 },
.Nl, .Nl,
.{ .CharLiteral = .Utf32 }, .{ .CharLiteral = .utf_32 },
.Nl, .Nl,
.{ .CharLiteral = .Wide }, .{ .CharLiteral = .wide },
.Nl, .Nl,
}); });
} }
@ -1555,33 +1520,29 @@ test "num suffixes" {
\\ 1u 1ul 1ull 1 \\ 1u 1ul 1ull 1
\\ \\
, &[_]Token.Id{ , &[_]Token.Id{
.{ .FloatLiteral = .F }, .{ .FloatLiteral = .f },
.{ .FloatLiteral = .L }, .{ .FloatLiteral = .l },
.{ .FloatLiteral = .None }, .{ .FloatLiteral = .none },
.{ .FloatLiteral = .None }, .{ .FloatLiteral = .none },
.{ .FloatLiteral = .None }, .{ .FloatLiteral = .none },
.Nl, .Nl,
.{ .IntegerLiteral = .L }, .{ .IntegerLiteral = .l },
.{ .IntegerLiteral = .LU }, .{ .IntegerLiteral = .lu },
.{ .IntegerLiteral = .LL }, .{ .IntegerLiteral = .ll },
.{ .IntegerLiteral = .LLU }, .{ .IntegerLiteral = .llu },
.{ .IntegerLiteral = .None }, .{ .IntegerLiteral = .none },
.Nl, .Nl,
.{ .IntegerLiteral = .U }, .{ .IntegerLiteral = .u },
.{ .IntegerLiteral = .LU }, .{ .IntegerLiteral = .lu },
.{ .IntegerLiteral = .LLU }, .{ .IntegerLiteral = .llu },
.{ .IntegerLiteral = .None }, .{ .IntegerLiteral = .none },
.Nl, .Nl,
}); });
} }
fn expectTokens(source: []const u8, expected_tokens: []const Token.Id) void { fn expectTokens(source: []const u8, expected_tokens: []const Token.Id) void {
var tokenizer = Tokenizer{ var tokenizer = Tokenizer{
.source = &Source{ .buffer = source,
.buffer = source,
.file_name = undefined,
.tokens = undefined,
},
}; };
for (expected_tokens) |expected_token_id| { for (expected_tokens) |expected_token_id| {
const token = tokenizer.next(); const token = tokenizer.next();

View File

@ -8,7 +8,6 @@ const Token = std.zig.Token;
usingnamespace @import("clang.zig"); usingnamespace @import("clang.zig");
const ctok = std.c.tokenizer; const ctok = std.c.tokenizer;
const CToken = std.c.Token; const CToken = std.c.Token;
const CTokenList = std.c.tokenizer.Source.TokenList;
const mem = std.mem; const mem = std.mem;
const math = std.math; const math = std.math;
@ -5196,16 +5195,39 @@ pub fn freeErrors(errors: []ClangErrMsg) void {
ZigClangErrorMsg_delete(errors.ptr, errors.len); ZigClangErrorMsg_delete(errors.ptr, errors.len);
} }
const CTokIterator = struct {
source: []const u8,
list: []const CToken,
i: usize = 0,
fn peek(self: *CTokIterator) ?CToken.Id {
if (self.i >= self.list.len) return null;
return self.list[self.i + 1].id;
}
fn next(self: *CTokIterator) ?CToken.Id {
if (self.i >= self.list.len) return null;
self.i += 1;
return self.list[self.i].id;
}
fn slice(self: *CTokIterator, index: usize) []const u8 {
const tok = self.list[index];
return self.source[tok.start..tok.end];
}
};
fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void { fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
// TODO if we see #undef, delete it from the table // TODO if we see #undef, delete it from the table
var it = ZigClangASTUnit_getLocalPreprocessingEntities_begin(unit); var it = ZigClangASTUnit_getLocalPreprocessingEntities_begin(unit);
const it_end = ZigClangASTUnit_getLocalPreprocessingEntities_end(unit); const it_end = ZigClangASTUnit_getLocalPreprocessingEntities_end(unit);
var tok_list = CTokenList.init(c.arena); var tok_list = std.ArrayList(CToken).init(c.gpa);
defer tok_list.deinit();
const scope = c.global_scope; const scope = c.global_scope;
while (it.I != it_end.I) : (it.I += 1) { while (it.I != it_end.I) : (it.I += 1) {
const entity = ZigClangPreprocessingRecord_iterator_deref(it); const entity = ZigClangPreprocessingRecord_iterator_deref(it);
tok_list.shrink(0); tok_list.items.len = 0;
switch (ZigClangPreprocessedEntity_getKind(entity)) { switch (ZigClangPreprocessedEntity_getKind(entity)) {
.MacroDefinitionKind => { .MacroDefinitionKind => {
const macro = @ptrCast(*ZigClangMacroDefinitionRecord, entity); const macro = @ptrCast(*ZigClangMacroDefinitionRecord, entity);
@ -5223,38 +5245,34 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
const begin_c = ZigClangSourceManager_getCharacterData(c.source_manager, begin_loc); const begin_c = ZigClangSourceManager_getCharacterData(c.source_manager, begin_loc);
const slice = begin_c[0..mem.len(begin_c)]; const slice = begin_c[0..mem.len(begin_c)];
tok_list.shrink(0);
var tokenizer = std.c.Tokenizer{ var tokenizer = std.c.Tokenizer{
.source = &std.c.tokenizer.Source{ .buffer = slice,
.buffer = slice,
.file_name = undefined,
.tokens = undefined,
},
}; };
while (true) { while (true) {
const tok = tokenizer.next(); const tok = tokenizer.next();
switch (tok.id) { switch (tok.id) {
.Nl, .Eof => { .Nl, .Eof => {
try tok_list.push(tok); try tok_list.append(tok);
break; break;
}, },
.LineComment, .MultiLineComment => continue, .LineComment, .MultiLineComment => continue,
else => {}, else => {},
} }
try tok_list.push(tok); try tok_list.append(tok);
} }
var tok_it = tok_list.iterator(0); var tok_it = CTokIterator{
const first_tok = tok_it.next().?; .source = slice,
assert(mem.eql(u8, slice[first_tok.start..first_tok.end], name)); .list = tok_list.items,
};
assert(mem.eql(u8, tok_it.slice(0), name));
var macro_fn = false; var macro_fn = false;
const next = tok_it.peek().?; switch (tok_it.peek().?) {
switch (next.id) {
.Identifier => { .Identifier => {
// if it equals itself, ignore. for example, from stdio.h: // if it equals itself, ignore. for example, from stdio.h:
// #define stdin stdin // #define stdin stdin
if (mem.eql(u8, name, slice[next.start..next.end])) { if (mem.eql(u8, name, tok_it.slice(1))) {
continue; continue;
} }
}, },
@ -5265,15 +5283,15 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
}, },
.LParen => { .LParen => {
// if the name is immediately followed by a '(' then it is a function // if the name is immediately followed by a '(' then it is a function
macro_fn = first_tok.end == next.start; macro_fn = tok_it.list[0].end == tok_it.list[1].start;
}, },
else => {}, else => {},
} }
(if (macro_fn) (if (macro_fn)
transMacroFnDefine(c, &tok_it, slice, mangled_name, begin_loc) transMacroFnDefine(c, &tok_it, mangled_name, begin_loc)
else else
transMacroDefine(c, &tok_it, slice, mangled_name, begin_loc)) catch |err| switch (err) { transMacroDefine(c, &tok_it, mangled_name, begin_loc)) catch |err| switch (err) {
error.ParseError => continue, error.ParseError => continue,
error.OutOfMemory => |e| return e, error.OutOfMemory => |e| return e,
}; };
@ -5283,7 +5301,7 @@ fn transPreprocessorEntities(c: *Context, unit: *ZigClangASTUnit) Error!void {
} }
} }
fn transMacroDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void { fn transMacroDefine(c: *Context, it: *CTokIterator, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void {
const scope = &c.global_scope.base; const scope = &c.global_scope.base;
const visib_tok = try appendToken(c, .Keyword_pub, "pub"); const visib_tok = try appendToken(c, .Keyword_pub, "pub");
@ -5291,15 +5309,15 @@ fn transMacroDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, n
const name_tok = try appendIdentifier(c, name); const name_tok = try appendIdentifier(c, name);
const eq_token = try appendToken(c, .Equal, "="); const eq_token = try appendToken(c, .Equal, "=");
const init_node = try parseCExpr(c, it, source, source_loc, scope); const init_node = try parseCExpr(c, it, source_loc, scope);
const last = it.next().?; const last = it.next().?;
if (last.id != .Eof and last.id != .Nl) if (last != .Eof and last != .Nl)
return failDecl( return failDecl(
c, c,
source_loc, source_loc,
name, name,
"unable to translate C expr: unexpected token .{}", "unable to translate C expr: unexpected token .{}",
.{@tagName(last.id)}, .{@tagName(last)},
); );
const semicolon_token = try appendToken(c, .Semicolon, ";"); const semicolon_token = try appendToken(c, .Semicolon, ";");
@ -5315,7 +5333,7 @@ fn transMacroDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, n
_ = try c.global_scope.macro_table.put(name, &node.base); _ = try c.global_scope.macro_table.put(name, &node.base);
} }
fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void { fn transMacroFnDefine(c: *Context, it: *CTokIterator, name: []const u8, source_loc: ZigClangSourceLocation) ParseError!void {
var block_scope = try Scope.Block.init(c, &c.global_scope.base, null); var block_scope = try Scope.Block.init(c, &c.global_scope.base, null);
defer block_scope.deinit(); defer block_scope.deinit();
const scope = &block_scope.base; const scope = &block_scope.base;
@ -5326,7 +5344,7 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const name_tok = try appendIdentifier(c, name); const name_tok = try appendIdentifier(c, name);
_ = try appendToken(c, .LParen, "("); _ = try appendToken(c, .LParen, "(");
if (it.next().?.id != .LParen) { if (it.next().? != .LParen) {
return failDecl( return failDecl(
c, c,
source_loc, source_loc,
@ -5340,8 +5358,7 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
defer fn_params.deinit(); defer fn_params.deinit();
while (true) { while (true) {
const param_tok = it.next().?; if (it.next().? != .Identifier) {
if (param_tok.id != .Identifier) {
return failDecl( return failDecl(
c, c,
source_loc, source_loc,
@ -5351,7 +5368,7 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
); );
} }
const mangled_name = try block_scope.makeMangledName(c, source[param_tok.start..param_tok.end]); const mangled_name = try block_scope.makeMangledName(c, it.slice(it.i));
const param_name_tok = try appendIdentifier(c, mangled_name); const param_name_tok = try appendIdentifier(c, mangled_name);
_ = try appendToken(c, .Colon, ":"); _ = try appendToken(c, .Colon, ":");
@ -5369,13 +5386,13 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.param_type = .{ .any_type = &any_type.base }, .param_type = .{ .any_type = &any_type.base },
}; };
if (it.peek().?.id != .Comma) if (it.peek().? != .Comma)
break; break;
_ = it.next(); _ = it.next();
_ = try appendToken(c, .Comma, ","); _ = try appendToken(c, .Comma, ",");
} }
if (it.next().?.id != .RParen) { if (it.next().? != .RParen) {
return failDecl( return failDecl(
c, c,
source_loc, source_loc,
@ -5390,15 +5407,15 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const type_of = try c.createBuiltinCall("@TypeOf", 1); const type_of = try c.createBuiltinCall("@TypeOf", 1);
const return_kw = try appendToken(c, .Keyword_return, "return"); const return_kw = try appendToken(c, .Keyword_return, "return");
const expr = try parseCExpr(c, it, source, source_loc, scope); const expr = try parseCExpr(c, it, source_loc, scope);
const last = it.next().?; const last = it.next().?;
if (last.id != .Eof and last.id != .Nl) if (last != .Eof and last != .Nl)
return failDecl( return failDecl(
c, c,
source_loc, source_loc,
name, name,
"unable to translate C expr: unexpected token .{}", "unable to translate C expr: unexpected token .{}",
.{@tagName(last.id)}, .{@tagName(last)},
); );
_ = try appendToken(c, .Semicolon, ";"); _ = try appendToken(c, .Semicolon, ";");
const type_of_arg = if (expr.tag != .Block) expr else blk: { const type_of_arg = if (expr.tag != .Block) expr else blk: {
@ -5435,28 +5452,27 @@ fn transMacroFnDefine(c: *Context, it: *CTokenList.Iterator, source: []const u8,
const ParseError = Error || error{ParseError}; const ParseError = Error || error{ParseError};
fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node { fn parseCExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
const node = try parseCPrefixOpExpr(c, it, source, source_loc, scope); const node = try parseCPrefixOpExpr(c, it, source_loc, scope);
switch (it.next().?.id) { switch (it.next().?) {
.QuestionMark => { .QuestionMark => {
// must come immediately after expr // must come immediately after expr
_ = try appendToken(c, .RParen, ")"); _ = try appendToken(c, .RParen, ")");
const if_node = try transCreateNodeIf(c); const if_node = try transCreateNodeIf(c);
if_node.condition = node; if_node.condition = node;
if_node.body = try parseCPrimaryExpr(c, it, source, source_loc, scope); if_node.body = try parseCPrimaryExpr(c, it, source_loc, scope);
if (it.next().?.id != .Colon) { if (it.next().? != .Colon) {
const first_tok = it.list.at(0);
try failDecl( try failDecl(
c, c,
source_loc, source_loc,
source[first_tok.start..first_tok.end], it.slice(0),
"unable to translate C expr: expected ':'", "unable to translate C expr: expected ':'",
.{}, .{},
); );
return error.ParseError; return error.ParseError;
} }
if_node.@"else" = try transCreateNodeElse(c); if_node.@"else" = try transCreateNodeElse(c);
if_node.@"else".?.body = try parseCPrimaryExpr(c, it, source, source_loc, scope); if_node.@"else".?.body = try parseCPrimaryExpr(c, it, source_loc, scope);
return &if_node.base; return &if_node.base;
}, },
.Comma => { .Comma => {
@ -5479,10 +5495,10 @@ fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_
}; };
try block_scope.statements.append(&op_node.base); try block_scope.statements.append(&op_node.base);
last = try parseCPrefixOpExpr(c, it, source, source_loc, scope); last = try parseCPrefixOpExpr(c, it, source_loc, scope);
_ = try appendToken(c, .Semicolon, ";"); _ = try appendToken(c, .Semicolon, ";");
if (it.next().?.id != .Comma) { if (it.next().? != .Comma) {
_ = it.prev(); it.i -= 1;
break; break;
} }
} }
@ -5493,70 +5509,74 @@ fn parseCExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_
return &block_node.base; return &block_node.base;
}, },
else => { else => {
_ = it.prev(); it.i -= 1;
return node; return node;
}, },
} }
} }
fn parseCNumLit(c: *Context, tok: *CToken, source: []const u8, source_loc: ZigClangSourceLocation) ParseError!*ast.Node { fn parseCNumLit(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation) ParseError!*ast.Node {
var lit_bytes = source[tok.start..tok.end]; var lit_bytes = it.slice(it.i);
if (tok.id == .IntegerLiteral) { switch (it.list[it.i].id) {
if (lit_bytes.len > 2 and lit_bytes[0] == '0') { .IntegerLiteral => |suffix| {
switch (lit_bytes[1]) { if (lit_bytes.len > 2 and lit_bytes[0] == '0') {
'0'...'7' => { switch (lit_bytes[1]) {
// Octal '0'...'7' => {
lit_bytes = try std.fmt.allocPrint(c.arena, "0o{}", .{lit_bytes}); // Octal
}, lit_bytes = try std.fmt.allocPrint(c.arena, "0o{}", .{lit_bytes});
'X' => { },
// Hexadecimal with capital X, valid in C but not in Zig 'X' => {
lit_bytes = try std.fmt.allocPrint(c.arena, "0x{}", .{lit_bytes[2..]}); // Hexadecimal with capital X, valid in C but not in Zig
}, lit_bytes = try std.fmt.allocPrint(c.arena, "0x{}", .{lit_bytes[2..]});
else => {}, },
else => {},
}
} }
}
if (tok.id.IntegerLiteral == .None) { if (suffix == .none) {
return transCreateNodeInt(c, lit_bytes); return transCreateNodeInt(c, lit_bytes);
} }
const cast_node = try c.createBuiltinCall("@as", 2); const cast_node = try c.createBuiltinCall("@as", 2);
cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (tok.id.IntegerLiteral) { cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (suffix) {
.U => "c_uint", .u => "c_uint",
.L => "c_long", .l => "c_long",
.LU => "c_ulong", .lu => "c_ulong",
.LL => "c_longlong", .ll => "c_longlong",
.LLU => "c_ulonglong", .llu => "c_ulonglong",
else => unreachable, else => unreachable,
}); });
lit_bytes = lit_bytes[0 .. lit_bytes.len - switch (tok.id.IntegerLiteral) { lit_bytes = lit_bytes[0 .. lit_bytes.len - switch (suffix) {
.U, .L => @as(u8, 1), .u, .l => @as(u8, 1),
.LU, .LL => 2, .lu, .ll => 2,
.LLU => 3, .llu => 3,
else => unreachable, else => unreachable,
}]; }];
_ = try appendToken(c, .Comma, ","); _ = try appendToken(c, .Comma, ",");
cast_node.params()[1] = try transCreateNodeInt(c, lit_bytes); cast_node.params()[1] = try transCreateNodeInt(c, lit_bytes);
cast_node.rparen_token = try appendToken(c, .RParen, ")"); cast_node.rparen_token = try appendToken(c, .RParen, ")");
return &cast_node.base; return &cast_node.base;
} else if (tok.id == .FloatLiteral) { },
if (lit_bytes[0] == '.') .FloatLiteral => |suffix| {
lit_bytes = try std.fmt.allocPrint(c.arena, "0{}", .{lit_bytes}); if (lit_bytes[0] == '.')
if (tok.id.FloatLiteral == .None) { lit_bytes = try std.fmt.allocPrint(c.arena, "0{}", .{lit_bytes});
return transCreateNodeFloat(c, lit_bytes); if (suffix == .none) {
} return transCreateNodeFloat(c, lit_bytes);
const cast_node = try c.createBuiltinCall("@as", 2); }
cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (tok.id.FloatLiteral) { const cast_node = try c.createBuiltinCall("@as", 2);
.F => "f32", cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (suffix) {
.L => "c_longdouble", .f => "f32",
else => unreachable, .l => "c_longdouble",
}); else => unreachable,
_ = try appendToken(c, .Comma, ","); });
cast_node.params()[1] = try transCreateNodeFloat(c, lit_bytes[0 .. lit_bytes.len - 1]); _ = try appendToken(c, .Comma, ",");
cast_node.rparen_token = try appendToken(c, .RParen, ")"); cast_node.params()[1] = try transCreateNodeFloat(c, lit_bytes[0 .. lit_bytes.len - 1]);
return &cast_node.base; cast_node.rparen_token = try appendToken(c, .RParen, ")");
} else unreachable; return &cast_node.base;
},
else => unreachable,
}
} }
fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const u8, source_loc: ZigClangSourceLocation) ![]const u8 { fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const u8, source_loc: ZigClangSourceLocation) ![]const u8 {
@ -5719,13 +5739,13 @@ fn zigifyEscapeSequences(ctx: *Context, source_bytes: []const u8, name: []const
return bytes[0..i]; return bytes[0..i];
} }
fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node { fn parseCPrimaryExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
const tok = it.next().?; const tok = it.next().?;
switch (tok.id) { const slice = it.slice(it.i);
switch (tok) {
.CharLiteral => { .CharLiteral => {
const first_tok = it.list.at(0); if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) {
if (source[tok.start] != '\'' or source[tok.start + 1] == '\\' or tok.end - tok.start == 3) { const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, slice, it.slice(0), source_loc));
const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, source[tok.start..tok.end], source[first_tok.start..first_tok.end], source_loc));
const node = try c.arena.create(ast.Node.OneToken); const node = try c.arena.create(ast.Node.OneToken);
node.* = .{ node.* = .{
.base = .{ .tag = .CharLiteral }, .base = .{ .tag = .CharLiteral },
@ -5733,7 +5753,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
}; };
return &node.base; return &node.base;
} else { } else {
const token = try appendTokenFmt(c, .IntegerLiteral, "0x{x}", .{source[tok.start + 1 .. tok.end - 1]}); const token = try appendTokenFmt(c, .IntegerLiteral, "0x{x}", .{slice[1 .. slice.len - 1]});
const node = try c.arena.create(ast.Node.OneToken); const node = try c.arena.create(ast.Node.OneToken);
node.* = .{ node.* = .{
.base = .{ .tag = .IntegerLiteral }, .base = .{ .tag = .IntegerLiteral },
@ -5743,8 +5763,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
} }
}, },
.StringLiteral => { .StringLiteral => {
const first_tok = it.list.at(0); const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, slice, it.slice(0), source_loc));
const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, source[tok.start..tok.end], source[first_tok.start..first_tok.end], source_loc));
const node = try c.arena.create(ast.Node.OneToken); const node = try c.arena.create(ast.Node.OneToken);
node.* = .{ node.* = .{
.base = .{ .tag = .StringLiteral }, .base = .{ .tag = .StringLiteral },
@ -5753,7 +5772,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
return &node.base; return &node.base;
}, },
.IntegerLiteral, .FloatLiteral => { .IntegerLiteral, .FloatLiteral => {
return parseCNumLit(c, tok, source, source_loc); return parseCNumLit(c, it, source_loc);
}, },
// eventually this will be replaced by std.c.parse which will handle these correctly // eventually this will be replaced by std.c.parse which will handle these correctly
.Keyword_void => return transCreateNodeIdentifierUnchecked(c, "c_void"), .Keyword_void => return transCreateNodeIdentifierUnchecked(c, "c_void"),
@ -5763,37 +5782,50 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
.Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"), .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"),
.Keyword_float => return transCreateNodeIdentifierUnchecked(c, "f32"), .Keyword_float => return transCreateNodeIdentifierUnchecked(c, "f32"),
.Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"), .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"),
.Keyword_char => return transCreateNodeIdentifierUnchecked(c, "c_char"), .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"),
.Keyword_unsigned => if (it.next()) |t| { .Keyword_unsigned => if (it.next()) |t| switch (t) {
switch (t.id) { .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"),
.Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_ushort"), .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_ushort"),
.Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_uint"), .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_uint"),
.Keyword_long => if (it.peek() != null and it.peek().?.id == .Keyword_long) { .Keyword_long => if (it.peek() != null and it.peek().? == .Keyword_long) {
_ = it.next(); _ = it.next();
return transCreateNodeIdentifierUnchecked(c, "c_ulonglong"); return transCreateNodeIdentifierUnchecked(c, "c_ulonglong");
} else return transCreateNodeIdentifierUnchecked(c, "c_ulong"), } else return transCreateNodeIdentifierUnchecked(c, "c_ulong"),
else => { else => {
_ = it.prev(); it.i -= 1;
return transCreateNodeIdentifierUnchecked(c, "c_uint"); return transCreateNodeIdentifierUnchecked(c, "c_uint");
}, },
}
} else { } else {
return transCreateNodeIdentifierUnchecked(c, "c_uint"); return transCreateNodeIdentifierUnchecked(c, "c_uint");
}, },
.Keyword_signed => if (it.next()) |t| switch (t) {
.Keyword_char => return transCreateNodeIdentifierUnchecked(c, "i8"),
.Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"),
.Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"),
.Keyword_long => if (it.peek() != null and it.peek().? == .Keyword_long) {
_ = it.next();
return transCreateNodeIdentifierUnchecked(c, "c_longlong");
} else return transCreateNodeIdentifierUnchecked(c, "c_long"),
else => {
it.i -= 1;
return transCreateNodeIdentifierUnchecked(c, "c_int");
},
} else {
return transCreateNodeIdentifierUnchecked(c, "c_int");
},
.Identifier => { .Identifier => {
const mangled_name = scope.getAlias(source[tok.start..tok.end]); const mangled_name = scope.getAlias(it.slice(it.i));
return transCreateNodeIdentifier(c, mangled_name); return transCreateNodeIdentifier(c, mangled_name);
}, },
.LParen => { .LParen => {
const inner_node = try parseCExpr(c, it, source, source_loc, scope); const inner_node = try parseCExpr(c, it, source_loc, scope);
const next_id = it.next().?.id; const next_id = it.next().?;
if (next_id != .RParen) { if (next_id != .RParen) {
const first_tok = it.list.at(0);
try failDecl( try failDecl(
c, c,
source_loc, source_loc,
source[first_tok.start..first_tok.end], it.slice(0),
"unable to translate C expr: expected ')'' instead got: {}", "unable to translate C expr: expected ')'' instead got: {}",
.{@tagName(next_id)}, .{@tagName(next_id)},
); );
@ -5801,7 +5833,7 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
} }
var saw_l_paren = false; var saw_l_paren = false;
var saw_integer_literal = false; var saw_integer_literal = false;
switch (it.peek().?.id) { switch (it.peek().?) {
// (type)(to_cast) // (type)(to_cast)
.LParen => { .LParen => {
saw_l_paren = true; saw_l_paren = true;
@ -5819,14 +5851,13 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
// hack to get zig fmt to render a comma in builtin calls // hack to get zig fmt to render a comma in builtin calls
_ = try appendToken(c, .Comma, ","); _ = try appendToken(c, .Comma, ",");
const node_to_cast = try parseCExpr(c, it, source, source_loc, scope); const node_to_cast = try parseCExpr(c, it, source_loc, scope);
if (saw_l_paren and it.next().?.id != .RParen) { if (saw_l_paren and it.next().? != .RParen) {
const first_tok = it.list.at(0);
try failDecl( try failDecl(
c, c,
source_loc, source_loc,
source[first_tok.start..first_tok.end], it.slice(0),
"unable to translate C expr: expected ')''", "unable to translate C expr: expected ')''",
.{}, .{},
); );
@ -5857,13 +5888,12 @@ fn parseCPrimaryExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
return &group_node.base; return &group_node.base;
}, },
else => { else => {
const first_tok = it.list.at(0);
try failDecl( try failDecl(
c, c,
source_loc, source_loc,
source[first_tok.start..first_tok.end], it.slice(0),
"unable to translate C expr: unexpected token .{}", "unable to translate C expr: unexpected token .{}",
.{@tagName(tok.id)}, .{@tagName(tok)},
); );
return error.ParseError; return error.ParseError;
}, },
@ -5971,61 +6001,52 @@ fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
return &group_node.base; return &group_node.base;
} }
fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node { fn parseCSuffixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
var node = try parseCPrimaryExpr(c, it, source, source_loc, scope); var node = try parseCPrimaryExpr(c, it, source_loc, scope);
while (true) { while (true) {
const tok = it.next().?;
var op_token: ast.TokenIndex = undefined; var op_token: ast.TokenIndex = undefined;
var op_id: ast.Node.Tag = undefined; var op_id: ast.Node.Tag = undefined;
var bool_op = false; var bool_op = false;
switch (tok.id) { switch (it.next().?) {
.Period => { .Period => {
const name_tok = it.next().?; if (it.next().? != .Identifier) {
if (name_tok.id != .Identifier) {
const first_tok = it.list.at(0);
try failDecl( try failDecl(
c, c,
source_loc, source_loc,
source[first_tok.start..first_tok.end], it.slice(0),
"unable to translate C expr: expected identifier", "unable to translate C expr: expected identifier",
.{}, .{},
); );
return error.ParseError; return error.ParseError;
} }
node = try transCreateNodeFieldAccess(c, node, source[name_tok.start..name_tok.end]); node = try transCreateNodeFieldAccess(c, node, it.slice(it.i));
continue; continue;
}, },
.Arrow => { .Arrow => {
const name_tok = it.next().?; if (it.next().? != .Identifier) {
if (name_tok.id != .Identifier) {
const first_tok = it.list.at(0);
try failDecl( try failDecl(
c, c,
source_loc, source_loc,
source[first_tok.start..first_tok.end], it.slice(0),
"unable to translate C expr: expected identifier", "unable to translate C expr: expected identifier",
.{}, .{},
); );
return error.ParseError; return error.ParseError;
} }
const deref = try transCreateNodePtrDeref(c, node); const deref = try transCreateNodePtrDeref(c, node);
node = try transCreateNodeFieldAccess(c, deref, source[name_tok.start..name_tok.end]); node = try transCreateNodeFieldAccess(c, deref, it.slice(it.i));
continue; continue;
}, },
.Asterisk => { .Asterisk => {
if (it.peek().?.id == .RParen) { if (it.peek().? == .RParen) {
// type *) // type *)
// hack to get zig fmt to render a comma in builtin calls // hack to get zig fmt to render a comma in builtin calls
_ = try appendToken(c, .Comma, ","); _ = try appendToken(c, .Comma, ",");
// * token
_ = it.prev();
// last token of `node` // last token of `node`
const prev_id = it.prev().?.id; const prev_id = it.list[it.i - 1].id;
_ = it.next();
_ = it.next();
if (prev_id == .Keyword_void) { if (prev_id == .Keyword_void) {
const ptr = try transCreateNodePtrType(c, false, false, .Asterisk); const ptr = try transCreateNodePtrType(c, false, false, .Asterisk);
@ -6096,15 +6117,14 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
}, },
.LBracket => { .LBracket => {
const arr_node = try transCreateNodeArrayAccess(c, node); const arr_node = try transCreateNodeArrayAccess(c, node);
arr_node.index_expr = try parseCPrefixOpExpr(c, it, source, source_loc, scope); arr_node.index_expr = try parseCPrefixOpExpr(c, it, source_loc, scope);
arr_node.rtoken = try appendToken(c, .RBracket, "]"); arr_node.rtoken = try appendToken(c, .RBracket, "]");
node = &arr_node.base; node = &arr_node.base;
if (it.next().?.id != .RBracket) { if (it.next().? != .RBracket) {
const first_tok = it.list.at(0);
try failDecl( try failDecl(
c, c,
source_loc, source_loc,
source[first_tok.start..first_tok.end], it.slice(0),
"unable to translate C expr: expected ']'", "unable to translate C expr: expected ']'",
.{}, .{},
); );
@ -6117,23 +6137,21 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
var call_params = std.ArrayList(*ast.Node).init(c.gpa); var call_params = std.ArrayList(*ast.Node).init(c.gpa);
defer call_params.deinit(); defer call_params.deinit();
while (true) { while (true) {
const arg = try parseCPrefixOpExpr(c, it, source, source_loc, scope); const arg = try parseCPrefixOpExpr(c, it, source_loc, scope);
try call_params.append(arg); try call_params.append(arg);
const next = it.next().?; switch (it.next().?) {
if (next.id == .Comma) .Comma => _ = try appendToken(c, .Comma, ","),
_ = try appendToken(c, .Comma, ",") .RParen => break,
else if (next.id == .RParen) else => {
break try failDecl(
else { c,
const first_tok = it.list.at(0); source_loc,
try failDecl( it.slice(0),
c, "unable to translate C expr: expected ',' or ')'",
source_loc, .{},
source[first_tok.start..first_tok.end], );
"unable to translate C expr: expected ',' or ')'", return error.ParseError;
.{}, },
);
return error.ParseError;
} }
} }
const call_node = try ast.Node.Call.alloc(c.arena, call_params.items.len); const call_node = try ast.Node.Call.alloc(c.arena, call_params.items.len);
@ -6158,23 +6176,21 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
defer init_vals.deinit(); defer init_vals.deinit();
while (true) { while (true) {
const val = try parseCPrefixOpExpr(c, it, source, source_loc, scope); const val = try parseCPrefixOpExpr(c, it, source_loc, scope);
try init_vals.append(val); try init_vals.append(val);
const next = it.next().?; switch (it.next().?) {
if (next.id == .Comma) .Comma => _ = try appendToken(c, .Comma, ","),
_ = try appendToken(c, .Comma, ",") .RBrace => break,
else if (next.id == .RBrace) else => {
break try failDecl(
else { c,
const first_tok = it.list.at(0); source_loc,
try failDecl( it.slice(0),
c, "unable to translate C expr: expected ',' or '}}'",
source_loc, .{},
source[first_tok.start..first_tok.end], );
"unable to translate C expr: expected ',' or '}}'", return error.ParseError;
.{}, },
);
return error.ParseError;
} }
} }
const tuple_node = try ast.Node.StructInitializerDot.alloc(c.arena, init_vals.items.len); const tuple_node = try ast.Node.StructInitializerDot.alloc(c.arena, init_vals.items.len);
@ -6221,22 +6237,22 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
op_id = .ArrayCat; op_id = .ArrayCat;
op_token = try appendToken(c, .PlusPlus, "++"); op_token = try appendToken(c, .PlusPlus, "++");
_ = it.prev(); it.i -= 1;
}, },
.Identifier => { .Identifier => {
op_id = .ArrayCat; op_id = .ArrayCat;
op_token = try appendToken(c, .PlusPlus, "++"); op_token = try appendToken(c, .PlusPlus, "++");
_ = it.prev(); it.i -= 1;
}, },
else => { else => {
_ = it.prev(); it.i -= 1;
return node; return node;
}, },
} }
const cast_fn = if (bool_op) macroIntToBool else macroBoolToInt; const cast_fn = if (bool_op) macroIntToBool else macroBoolToInt;
const lhs_node = try cast_fn(c, node); const lhs_node = try cast_fn(c, node);
const rhs_node = try parseCPrefixOpExpr(c, it, source, source_loc, scope); const rhs_node = try parseCPrefixOpExpr(c, it, source_loc, scope);
const op_node = try c.arena.create(ast.Node.SimpleInfixOp); const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
op_node.* = .{ op_node.* = .{
.base = .{ .tag = op_id }, .base = .{ .tag = op_id },
@ -6248,38 +6264,36 @@ fn parseCSuffixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8,
} }
} }
fn parseCPrefixOpExpr(c: *Context, it: *CTokenList.Iterator, source: []const u8, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node { fn parseCPrefixOpExpr(c: *Context, it: *CTokIterator, source_loc: ZigClangSourceLocation, scope: *Scope) ParseError!*ast.Node {
const op_tok = it.next().?; switch (it.next().?) {
switch (op_tok.id) {
.Bang => { .Bang => {
const node = try transCreateNodeSimplePrefixOp(c, .BoolNot, .Bang, "!"); const node = try transCreateNodeSimplePrefixOp(c, .BoolNot, .Bang, "!");
node.rhs = try parseCPrefixOpExpr(c, it, source, source_loc, scope); node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
return &node.base; return &node.base;
}, },
.Minus => { .Minus => {
const node = try transCreateNodeSimplePrefixOp(c, .Negation, .Minus, "-"); const node = try transCreateNodeSimplePrefixOp(c, .Negation, .Minus, "-");
node.rhs = try parseCPrefixOpExpr(c, it, source, source_loc, scope); node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
return &node.base; return &node.base;
}, },
.Plus => return try parseCPrefixOpExpr(c, it, source, source_loc, scope), .Plus => return try parseCPrefixOpExpr(c, it, source_loc, scope),
.Tilde => { .Tilde => {
const node = try transCreateNodeSimplePrefixOp(c, .BitNot, .Tilde, "~"); const node = try transCreateNodeSimplePrefixOp(c, .BitNot, .Tilde, "~");
node.rhs = try parseCPrefixOpExpr(c, it, source, source_loc, scope); node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
return &node.base; return &node.base;
}, },
.Asterisk => { .Asterisk => {
const node = try parseCPrefixOpExpr(c, it, source, source_loc, scope); const node = try parseCPrefixOpExpr(c, it, source_loc, scope);
return try transCreateNodePtrDeref(c, node); return try transCreateNodePtrDeref(c, node);
}, },
.Ampersand => { .Ampersand => {
const node = try transCreateNodeSimplePrefixOp(c, .AddressOf, .Ampersand, "&"); const node = try transCreateNodeSimplePrefixOp(c, .AddressOf, .Ampersand, "&");
node.rhs = try parseCPrefixOpExpr(c, it, source, source_loc, scope); node.rhs = try parseCPrefixOpExpr(c, it, source_loc, scope);
return &node.base; return &node.base;
}, },
else => { else => {
_ = it.prev(); it.i -= 1;
return try parseCSuffixOpExpr(c, it, source, source_loc, scope); return try parseCSuffixOpExpr(c, it, source_loc, scope);
}, },
} }
} }