fix regressions found by test suite

This commit is contained in:
Andrew Kelley 2020-03-10 20:22:30 -04:00
parent 9abee660dc
commit 2bff0dda79
No known key found for this signature in database
GPG Key ID: 7C5F548F728501A9
11 changed files with 103 additions and 128 deletions

View File

@ -40,12 +40,9 @@ pub fn main() !void {
var out_file = try fs.cwd().createFile(out_file_name, .{});
defer out_file.close();
var file_in_stream = in_file.inStream();
const input_file_bytes = try in_file.inStream().readAllAlloc(allocator, max_doc_file_size);
const input_file_bytes = try file_in_stream.stream.readAllAlloc(allocator, max_doc_file_size);
var file_out_stream = out_file.outStream();
var buffered_out_stream = io.BufferedOutStream(fs.File.WriteError).init(&file_out_stream.stream);
var buffered_out_stream = io.bufferedOutStream(out_file.outStream());
var tokenizer = Tokenizer.init(in_file_name, input_file_bytes);
var toc = try genToc(allocator, &tokenizer);
@ -53,7 +50,7 @@ pub fn main() !void {
try fs.cwd().makePath(tmp_dir_name);
defer fs.deleteTree(tmp_dir_name) catch {};
try genHtml(allocator, &tokenizer, &toc, &buffered_out_stream.stream, zig_exe);
try genHtml(allocator, &tokenizer, &toc, buffered_out_stream.outStream(), zig_exe);
try buffered_out_stream.flush();
}
@ -327,8 +324,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
var toc_buf = try std.Buffer.initSize(allocator, 0);
defer toc_buf.deinit();
var toc_buf_adapter = io.BufferOutStream.init(&toc_buf);
var toc = &toc_buf_adapter.stream;
var toc = toc_buf.outStream();
var nodes = std.ArrayList(Node).init(allocator);
defer nodes.deinit();
@ -342,7 +338,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
if (header_stack_size != 0) {
return parseError(tokenizer, token, "unbalanced headers", .{});
}
try toc.write(" </ul>\n");
try toc.writeAll(" </ul>\n");
break;
},
Token.Id.Content => {
@ -407,7 +403,7 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
if (last_columns) |n| {
try toc.print("<ul style=\"columns: {}\">\n", .{n});
} else {
try toc.write("<ul>\n");
try toc.writeAll("<ul>\n");
}
} else {
last_action = Action.Open;
@ -424,9 +420,9 @@ fn genToc(allocator: *mem.Allocator, tokenizer: *Tokenizer) !Toc {
if (last_action == Action.Close) {
try toc.writeByteNTimes(' ', 8 + header_stack_size * 4);
try toc.write("</ul></li>\n");
try toc.writeAll("</ul></li>\n");
} else {
try toc.write("</li>\n");
try toc.writeAll("</li>\n");
last_action = Action.Close;
}
} else if (mem.eql(u8, tag_name, "see_also")) {
@ -614,8 +610,7 @@ fn urlize(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();
var buf_adapter = io.BufferOutStream.init(&buf);
var out = &buf_adapter.stream;
const out = buf.outStream();
for (input) |c| {
switch (c) {
'a'...'z', 'A'...'Z', '_', '-', '0'...'9' => {
@ -634,8 +629,7 @@ fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();
var buf_adapter = io.BufferOutStream.init(&buf);
var out = &buf_adapter.stream;
const out = buf.outStream();
try writeEscaped(out, input);
return buf.toOwnedSlice();
}
@ -643,10 +637,10 @@ fn escapeHtml(allocator: *mem.Allocator, input: []const u8) ![]u8 {
fn writeEscaped(out: var, input: []const u8) !void {
for (input) |c| {
try switch (c) {
'&' => out.write("&amp;"),
'<' => out.write("&lt;"),
'>' => out.write("&gt;"),
'"' => out.write("&quot;"),
'&' => out.writeAll("&amp;"),
'<' => out.writeAll("&lt;"),
'>' => out.writeAll("&gt;"),
'"' => out.writeAll("&quot;"),
else => out.writeByte(c),
};
}
@ -681,8 +675,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
var buf = try std.Buffer.initSize(allocator, 0);
defer buf.deinit();
var buf_adapter = io.BufferOutStream.init(&buf);
var out = &buf_adapter.stream;
var out = buf.outStream();
var number_start_index: usize = undefined;
var first_number: usize = undefined;
var second_number: usize = undefined;
@ -743,7 +736,7 @@ fn termColor(allocator: *mem.Allocator, input: []const u8) ![]u8 {
'm' => {
state = TermState.Start;
while (open_span_count != 0) : (open_span_count -= 1) {
try out.write("</span>");
try out.writeAll("</span>");
}
if (first_number != 0 or second_number != 0) {
try out.print("<span class=\"t{}_{}\">", .{ first_number, second_number });
@ -774,7 +767,7 @@ fn isType(name: []const u8) bool {
fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Token, raw_src: []const u8) !void {
const src = mem.trim(u8, raw_src, " \n");
try out.write("<code class=\"zig\">");
try out.writeAll("<code class=\"zig\">");
var tokenizer = std.zig.Tokenizer.init(src);
var index: usize = 0;
var next_tok_is_fn = false;
@ -835,15 +828,15 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.Keyword_allowzero,
.Keyword_while,
=> {
try out.write("<span class=\"tok-kw\">");
try out.writeAll("<span class=\"tok-kw\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},
.Keyword_fn => {
try out.write("<span class=\"tok-kw\">");
try out.writeAll("<span class=\"tok-kw\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
next_tok_is_fn = true;
},
@ -852,24 +845,24 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.Keyword_true,
.Keyword_false,
=> {
try out.write("<span class=\"tok-null\">");
try out.writeAll("<span class=\"tok-null\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},
.StringLiteral,
.MultilineStringLiteralLine,
.CharLiteral,
=> {
try out.write("<span class=\"tok-str\">");
try out.writeAll("<span class=\"tok-str\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},
.Builtin => {
try out.write("<span class=\"tok-builtin\">");
try out.writeAll("<span class=\"tok-builtin\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},
.LineComment,
@ -877,16 +870,16 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.ContainerDocComment,
.ShebangLine,
=> {
try out.write("<span class=\"tok-comment\">");
try out.writeAll("<span class=\"tok-comment\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},
.Identifier => {
if (prev_tok_was_fn) {
try out.write("<span class=\"tok-fn\">");
try out.writeAll("<span class=\"tok-fn\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
} else {
const is_int = blk: {
if (src[token.start] != 'i' and src[token.start] != 'u')
@ -901,9 +894,9 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
break :blk true;
};
if (is_int or isType(src[token.start..token.end])) {
try out.write("<span class=\"tok-type\">");
try out.writeAll("<span class=\"tok-type\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
} else {
try writeEscaped(out, src[token.start..token.end]);
}
@ -913,9 +906,9 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.IntegerLiteral,
.FloatLiteral,
=> {
try out.write("<span class=\"tok-number\">");
try out.writeAll("<span class=\"tok-number\">");
try writeEscaped(out, src[token.start..token.end]);
try out.write("</span>");
try out.writeAll("</span>");
},
.Bang,
@ -983,7 +976,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
}
index = token.end;
}
try out.write("</code>");
try out.writeAll("</code>");
}
fn tokenizeAndPrint(docgen_tokenizer: *Tokenizer, out: var, source_token: Token) !void {
@ -1002,7 +995,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
for (toc.nodes) |node| {
switch (node) {
.Content => |data| {
try out.write(data);
try out.writeAll(data);
},
.Link => |info| {
if (!toc.urls.contains(info.url)) {
@ -1011,12 +1004,12 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
try out.print("<a href=\"#{}\">{}</a>", .{ info.url, info.name });
},
.Nav => {
try out.write(toc.toc);
try out.writeAll(toc.toc);
},
.Builtin => |tok| {
try out.write("<pre>");
try out.writeAll("<pre>");
try tokenizeAndPrintRaw(tokenizer, out, tok, builtin_code);
try out.write("</pre>");
try out.writeAll("</pre>");
},
.HeaderOpen => |info| {
try out.print(
@ -1025,7 +1018,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
);
},
.SeeAlso => |items| {
try out.write("<p>See also:</p><ul>\n");
try out.writeAll("<p>See also:</p><ul>\n");
for (items) |item| {
const url = try urlize(allocator, item.name);
if (!toc.urls.contains(url)) {
@ -1033,7 +1026,7 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
}
try out.print("<li><a href=\"#{}\">{}</a></li>\n", .{ url, item.name });
}
try out.write("</ul>\n");
try out.writeAll("</ul>\n");
},
.Syntax => |content_tok| {
try tokenizeAndPrint(tokenizer, out, content_tok);
@ -1047,9 +1040,9 @@ fn genHtml(allocator: *mem.Allocator, tokenizer: *Tokenizer, toc: *Toc, out: var
if (!code.is_inline) {
try out.print("<p class=\"file\">{}.zig</p>", .{code.name});
}
try out.write("<pre>");
try out.writeAll("<pre>");
try tokenizeAndPrint(tokenizer, out, code.source_token);
try out.write("</pre>");
try out.writeAll("</pre>");
const name_plus_ext = try std.fmt.allocPrint(allocator, "{}.zig", .{code.name});
const tmp_source_file_name = try fs.path.join(
allocator,

View File

@ -230,7 +230,7 @@
const std = @import("std");
pub fn main() !void {
const stdout = &std.io.getStdOut().outStream().stream;
const stdout = std.io.getStdOut().outStream();
try stdout.print("Hello, {}!\n", .{"world"});
}
{#code_end#}

View File

@ -926,8 +926,7 @@ pub const Builder = struct {
try child.spawn();
var stdout_file_in_stream = child.stdout.?.inStream();
const stdout = try stdout_file_in_stream.stream.readAllAlloc(self.allocator, max_output_size);
const stdout = try child.stdout.?.inStream().readAllAlloc(self.allocator, max_output_size);
errdefer self.allocator.free(stdout);
const term = try child.wait();

View File

@ -175,8 +175,7 @@ pub const RunStep = struct {
switch (self.stdout_action) {
.expect_exact, .expect_matches => {
var stdout_file_in_stream = child.stdout.?.inStream();
stdout = stdout_file_in_stream.stream.readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable;
stdout = child.stdout.?.inStream().readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable;
},
.inherit, .ignore => {},
}
@ -186,8 +185,7 @@ pub const RunStep = struct {
switch (self.stderr_action) {
.expect_exact, .expect_matches => {
var stderr_file_in_stream = child.stderr.?.inStream();
stderr = stderr_file_in_stream.stream.readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable;
stderr = child.stderr.?.inStream().readAllAlloc(self.builder.allocator, max_stdout_size) catch unreachable;
},
.inherit, .ignore => {},
}

View File

@ -56,8 +56,7 @@ pub const Coff = struct {
pub fn loadHeader(self: *Coff) !void {
const pe_pointer_offset = 0x3C;
var file_stream = self.in_file.inStream();
const in = &file_stream.stream;
const in = self.in_file.inStream();
var magic: [2]u8 = undefined;
try in.readNoEof(magic[0..]);
@ -89,11 +88,11 @@ pub const Coff = struct {
else => return error.InvalidMachine,
}
try self.loadOptionalHeader(&file_stream);
try self.loadOptionalHeader();
}
fn loadOptionalHeader(self: *Coff, file_stream: *File.InStream) !void {
const in = &file_stream.stream;
fn loadOptionalHeader(self: *Coff) !void {
const in = self.in_file.inStream();
self.pe_header.magic = try in.readIntLittle(u16);
// For now we're only interested in finding the reference to the .pdb,
// so we'll skip most of this header, which size is different in 32
@ -136,8 +135,7 @@ pub const Coff = struct {
const debug_dir = &self.pe_header.data_directory[DEBUG_DIRECTORY];
const file_offset = debug_dir.virtual_address - header.virtual_address + header.pointer_to_raw_data;
var file_stream = self.in_file.inStream();
const in = &file_stream.stream;
const in = self.in_file.inStream();
try self.in_file.seekTo(file_offset);
// Find the correct DebugDirectoryEntry, and where its data is stored.
@ -188,8 +186,7 @@ pub const Coff = struct {
try self.sections.ensureCapacity(self.coff_header.number_of_sections);
var file_stream = self.in_file.inStream();
const in = &file_stream.stream;
const in = self.in_file.inStream();
var name: [8]u8 = undefined;

View File

@ -475,15 +475,15 @@ fn populateModule(di: *ModuleDebugInfo, mod: *Module) !void {
const modi = di.pdb.getStreamById(mod.mod_info.ModuleSymStream) orelse return error.MissingDebugInfo;
const signature = try modi.stream.readIntLittle(u32);
const signature = try modi.inStream().readIntLittle(u32);
if (signature != 4)
return error.InvalidDebugInfo;
mod.symbols = try allocator.alloc(u8, mod.mod_info.SymByteSize - 4);
try modi.stream.readNoEof(mod.symbols);
try modi.inStream().readNoEof(mod.symbols);
mod.subsect_info = try allocator.alloc(u8, mod.mod_info.C13ByteSize);
try modi.stream.readNoEof(mod.subsect_info);
try modi.inStream().readNoEof(mod.subsect_info);
var sect_offset: usize = 0;
var skip_len: usize = undefined;
@ -656,11 +656,11 @@ fn openCoffDebugInfo(allocator: *mem.Allocator, coff_file_path: [:0]const u16) !
try di.pdb.openFile(di.coff, path);
var pdb_stream = di.pdb.getStream(pdb.StreamType.Pdb) orelse return error.InvalidDebugInfo;
const version = try pdb_stream.stream.readIntLittle(u32);
const signature = try pdb_stream.stream.readIntLittle(u32);
const age = try pdb_stream.stream.readIntLittle(u32);
const version = try pdb_stream.inStream().readIntLittle(u32);
const signature = try pdb_stream.inStream().readIntLittle(u32);
const age = try pdb_stream.inStream().readIntLittle(u32);
var guid: [16]u8 = undefined;
try pdb_stream.stream.readNoEof(&guid);
try pdb_stream.inStream().readNoEof(&guid);
if (version != 20000404) // VC70, only value observed by LLVM team
return error.UnknownPDBVersion;
if (!mem.eql(u8, &di.coff.guid, &guid) or di.coff.age != age)
@ -668,9 +668,9 @@ fn openCoffDebugInfo(allocator: *mem.Allocator, coff_file_path: [:0]const u16) !
// We validated the executable and pdb match.
const string_table_index = str_tab_index: {
const name_bytes_len = try pdb_stream.stream.readIntLittle(u32);
const name_bytes_len = try pdb_stream.inStream().readIntLittle(u32);
const name_bytes = try allocator.alloc(u8, name_bytes_len);
try pdb_stream.stream.readNoEof(name_bytes);
try pdb_stream.inStream().readNoEof(name_bytes);
const HashTableHeader = packed struct {
Size: u32,
@ -680,17 +680,17 @@ fn openCoffDebugInfo(allocator: *mem.Allocator, coff_file_path: [:0]const u16) !
return cap * 2 / 3 + 1;
}
};
const hash_tbl_hdr = try pdb_stream.stream.readStruct(HashTableHeader);
const hash_tbl_hdr = try pdb_stream.inStream().readStruct(HashTableHeader);
if (hash_tbl_hdr.Capacity == 0)
return error.InvalidDebugInfo;
if (hash_tbl_hdr.Size > HashTableHeader.maxLoad(hash_tbl_hdr.Capacity))
return error.InvalidDebugInfo;
const present = try readSparseBitVector(&pdb_stream.stream, allocator);
const present = try readSparseBitVector(&pdb_stream.inStream(), allocator);
if (present.len != hash_tbl_hdr.Size)
return error.InvalidDebugInfo;
const deleted = try readSparseBitVector(&pdb_stream.stream, allocator);
const deleted = try readSparseBitVector(&pdb_stream.inStream(), allocator);
const Bucket = struct {
first: u32,
@ -698,8 +698,8 @@ fn openCoffDebugInfo(allocator: *mem.Allocator, coff_file_path: [:0]const u16) !
};
const bucket_list = try allocator.alloc(Bucket, present.len);
for (present) |_| {
const name_offset = try pdb_stream.stream.readIntLittle(u32);
const name_index = try pdb_stream.stream.readIntLittle(u32);
const name_offset = try pdb_stream.inStream().readIntLittle(u32);
const name_index = try pdb_stream.inStream().readIntLittle(u32);
const name = mem.toSlice(u8, @ptrCast([*:0]u8, name_bytes.ptr + name_offset));
if (mem.eql(u8, name, "/names")) {
break :str_tab_index name_index;
@ -714,7 +714,7 @@ fn openCoffDebugInfo(allocator: *mem.Allocator, coff_file_path: [:0]const u16) !
const dbi = di.pdb.dbi;
// Dbi Header
const dbi_stream_header = try dbi.stream.readStruct(pdb.DbiStreamHeader);
const dbi_stream_header = try dbi.inStream().readStruct(pdb.DbiStreamHeader);
if (dbi_stream_header.VersionHeader != 19990903) // V70, only value observed by LLVM team
return error.UnknownPDBVersion;
if (dbi_stream_header.Age != age)
@ -728,7 +728,7 @@ fn openCoffDebugInfo(allocator: *mem.Allocator, coff_file_path: [:0]const u16) !
// Module Info Substream
var mod_info_offset: usize = 0;
while (mod_info_offset != mod_info_size) {
const mod_info = try dbi.stream.readStruct(pdb.ModInfo);
const mod_info = try dbi.inStream().readStruct(pdb.ModInfo);
var this_record_len: usize = @sizeOf(pdb.ModInfo);
const module_name = try dbi.readNullTermString(allocator);
@ -766,14 +766,14 @@ fn openCoffDebugInfo(allocator: *mem.Allocator, coff_file_path: [:0]const u16) !
var sect_contribs = ArrayList(pdb.SectionContribEntry).init(allocator);
var sect_cont_offset: usize = 0;
if (section_contrib_size != 0) {
const ver = @intToEnum(pdb.SectionContrSubstreamVersion, try dbi.stream.readIntLittle(u32));
const ver = @intToEnum(pdb.SectionContrSubstreamVersion, try dbi.inStream().readIntLittle(u32));
if (ver != pdb.SectionContrSubstreamVersion.Ver60)
return error.InvalidDebugInfo;
sect_cont_offset += @sizeOf(u32);
}
while (sect_cont_offset != section_contrib_size) {
const entry = try sect_contribs.addOne();
entry.* = try dbi.stream.readStruct(pdb.SectionContribEntry);
entry.* = try dbi.inStream().readStruct(pdb.SectionContribEntry);
sect_cont_offset += @sizeOf(pdb.SectionContribEntry);
if (sect_cont_offset > section_contrib_size)
@ -827,7 +827,7 @@ pub fn openElfDebugInfo(allocator: *mem.Allocator, elf_file_path: []const u8) !M
const str_section_off = shoff + @as(u64, hdr.e_shentsize) * @as(u64, hdr.e_shstrndx);
const str_shdr = @ptrCast(
*const elf.Shdr,
@alignCast(@alignOf(elf.Shdr), &mapped_mem[str_section_off]),
@alignCast(@alignOf(elf.Shdr), &mapped_mem[try math.cast(usize, str_section_off)]),
);
const header_strings = mapped_mem[str_shdr.sh_offset .. str_shdr.sh_offset + str_shdr.sh_size];
const shdrs = @ptrCast(

View File

@ -495,8 +495,7 @@ const Msf = struct {
streams: []MsfStream,
fn openFile(self: *Msf, allocator: *mem.Allocator, file: File) !void {
var file_stream = file.inStream();
const in = &file_stream.stream;
const in = file.inStream();
const superblock = try in.readStruct(SuperBlock);
@ -529,7 +528,7 @@ const Msf = struct {
);
const begin = self.directory.pos;
const stream_count = try self.directory.stream.readIntLittle(u32);
const stream_count = try self.directory.inStream().readIntLittle(u32);
const stream_sizes = try allocator.alloc(u32, stream_count);
defer allocator.free(stream_sizes);
@ -538,7 +537,7 @@ const Msf = struct {
// and must be taken into account when resolving stream indices.
const Nil = 0xFFFFFFFF;
for (stream_sizes) |*s, i| {
const size = try self.directory.stream.readIntLittle(u32);
const size = try self.directory.inStream().readIntLittle(u32);
s.* = if (size == Nil) 0 else blockCountFromSize(size, superblock.BlockSize);
}
@ -553,7 +552,7 @@ const Msf = struct {
var blocks = try allocator.alloc(u32, size);
var j: u32 = 0;
while (j < size) : (j += 1) {
const block_id = try self.directory.stream.readIntLittle(u32);
const block_id = try self.directory.inStream().readIntLittle(u32);
const n = (block_id % superblock.BlockSize);
// 0 is for SuperBlock, 1 and 2 for FPMs.
if (block_id == 0 or n == 1 or n == 2 or block_id * superblock.BlockSize > try file.getEndPos())
@ -648,7 +647,7 @@ const MsfStream = struct {
fn readNullTermString(self: *MsfStream, allocator: *mem.Allocator) ![]u8 {
var list = ArrayList(u8).init(allocator);
while (true) {
const byte = try self.stream.readByte();
const byte = try self.inStream().readByte();
if (byte == 0) {
return list.toSlice();
}
@ -662,8 +661,7 @@ const MsfStream = struct {
var offset = self.pos % self.block_size;
try self.in_file.seekTo(block * self.block_size + offset);
var file_stream = self.in_file.inStream();
const in = &file_stream.stream;
const in = self.in_file.inStream();
var size: usize = 0;
var rem_buffer = buffer;

View File

@ -42,8 +42,8 @@ pub fn main() !void {
var targets = ArrayList([]const u8).init(allocator);
const stderr_stream = &io.getStdErr().outStream().stream;
const stdout_stream = &io.getStdOut().outStream().stream;
const stderr_stream = io.getStdErr().outStream();
const stdout_stream = io.getStdOut().outStream();
while (nextArg(args, &arg_idx)) |arg| {
if (mem.startsWith(u8, arg, "-D")) {
@ -159,7 +159,7 @@ fn usage(builder: *Builder, already_ran_build: bool, out_stream: var) !void {
try out_stream.print(" {s:22} {}\n", .{ name, top_level_step.description });
}
try out_stream.write(
try out_stream.writeAll(
\\
\\General Options:
\\ --help Print this help and exit
@ -184,7 +184,7 @@ fn usage(builder: *Builder, already_ran_build: bool, out_stream: var) !void {
}
}
try out_stream.write(
try out_stream.writeAll(
\\
\\Advanced Options:
\\ --build-file [file] Override path to build.zig

View File

@ -22,7 +22,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\
\\pub fn main() void {
\\ privateFunction();
\\ const stdout = &getStdOut().outStream().stream;
\\ const stdout = getStdOut().outStream();
\\ stdout.print("OK 2\n", .{}) catch unreachable;
\\}
\\
@ -37,7 +37,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\// purposefully conflicting function with main.zig
\\// but it's private so it should be OK
\\fn privateFunction() void {
\\ const stdout = &getStdOut().outStream().stream;
\\ const stdout = getStdOut().outStream();
\\ stdout.print("OK 1\n", .{}) catch unreachable;
\\}
\\
@ -63,7 +63,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
tc.addSourceFile("foo.zig",
\\usingnamespace @import("std").io;
\\pub fn foo_function() void {
\\ const stdout = &getStdOut().outStream().stream;
\\ const stdout = getStdOut().outStream();
\\ stdout.print("OK\n", .{}) catch unreachable;
\\}
);
@ -74,7 +74,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\
\\pub fn bar_function() void {
\\ if (foo_function()) {
\\ const stdout = &getStdOut().outStream().stream;
\\ const stdout = getStdOut().outStream();
\\ stdout.print("OK\n", .{}) catch unreachable;
\\ }
\\}
@ -106,7 +106,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\pub const a_text = "OK\n";
\\
\\pub fn ok() void {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print(b_text, .{}) catch unreachable;
\\}
);
@ -124,7 +124,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\const io = @import("std").io;
\\
\\pub fn main() void {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print("Hello, world!\n{d:4} {x:3} {c}\n", .{@as(u32, 12), @as(u16, 0x12), @as(u8, 'a')}) catch unreachable;
\\}
, "Hello, world!\n 12 12 a\n");
@ -267,7 +267,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ var x_local : i32 = print_ok(x);
\\}
\\fn print_ok(val: @TypeOf(x)) @TypeOf(foo) {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print("OK\n", .{}) catch unreachable;
\\ return 0;
\\}
@ -349,7 +349,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\pub fn main() void {
\\ const bar = Bar {.field2 = 13,};
\\ const foo = Foo {.field1 = bar,};
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ if (!foo.method()) {
\\ stdout.print("BAD\n", .{}) catch unreachable;
\\ }
@ -363,7 +363,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
cases.add("defer with only fallthrough",
\\const io = @import("std").io;
\\pub fn main() void {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ defer stdout.print("defer2\n", .{}) catch unreachable;
@ -376,7 +376,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\const io = @import("std").io;
\\const os = @import("std").os;
\\pub fn main() void {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ defer stdout.print("defer2\n", .{}) catch unreachable;
@ -393,7 +393,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ do_test() catch return;
\\}
\\fn do_test() !void {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ errdefer stdout.print("deferErr\n", .{}) catch unreachable;
@ -412,7 +412,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\ do_test() catch return;
\\}
\\fn do_test() !void {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print("before\n", .{}) catch unreachable;
\\ defer stdout.print("defer1\n", .{}) catch unreachable;
\\ errdefer stdout.print("deferErr\n", .{}) catch unreachable;
@ -429,7 +429,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\const io = @import("std").io;
\\
\\pub fn main() void {
\\ const stdout = &io.getStdOut().outStream().stream;
\\ const stdout = io.getStdOut().outStream();
\\ stdout.print(foo_txt, .{}) catch unreachable;
\\}
, "1234\nabcd\n");
@ -448,9 +448,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\
\\pub fn main() !void {
\\ var args_it = std.process.args();
\\ var stdout_file = io.getStdOut();
\\ var stdout_adapter = stdout_file.outStream();
\\ const stdout = &stdout_adapter.stream;
\\ const stdout = io.getStdOut().outStream();
\\ var index: usize = 0;
\\ _ = args_it.skip();
\\ while (args_it.next(allocator)) |arg_or_err| : (index += 1) {
@ -489,9 +487,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\
\\pub fn main() !void {
\\ var args_it = std.process.args();
\\ var stdout_file = io.getStdOut();
\\ var stdout_adapter = stdout_file.outStream();
\\ const stdout = &stdout_adapter.stream;
\\ const stdout = io.getStdOut().outStream();
\\ var index: usize = 0;
\\ _ = args_it.skip();
\\ while (args_it.next(allocator)) |arg_or_err| : (index += 1) {

View File

@ -4,7 +4,7 @@ const io = std.io;
const fmt = std.fmt;
pub fn main() !void {
const stdout = &io.getStdOut().outStream().stream;
const stdout = io.getStdOut().outStream();
const stdin = io.getStdIn();
try stdout.print("Welcome to the Guess Number Game in Zig.\n", .{});

View File

@ -566,12 +566,9 @@ pub const StackTracesContext = struct {
}
child.spawn() catch |err| debug.panic("Unable to spawn {}: {}\n", .{ full_exe_path, @errorName(err) });
var stdout_file_in_stream = child.stdout.?.inStream();
var stderr_file_in_stream = child.stderr.?.inStream();
const stdout = stdout_file_in_stream.stream.readAllAlloc(b.allocator, max_stdout_size) catch unreachable;
const stdout = child.stdout.?.inStream().readAllAlloc(b.allocator, max_stdout_size) catch unreachable;
defer b.allocator.free(stdout);
const stderr = stderr_file_in_stream.stream.readAllAlloc(b.allocator, max_stdout_size) catch unreachable;
const stderr = child.stderr.?.inStream().readAllAlloc(b.allocator, max_stdout_size) catch unreachable;
defer b.allocator.free(stderr);
const term = child.wait() catch |err| {
@ -798,11 +795,8 @@ pub const CompileErrorContext = struct {
var stdout_buf = Buffer.initNull(b.allocator);
var stderr_buf = Buffer.initNull(b.allocator);
var stdout_file_in_stream = child.stdout.?.inStream();
var stderr_file_in_stream = child.stderr.?.inStream();
stdout_file_in_stream.stream.readAllBuffer(&stdout_buf, max_stdout_size) catch unreachable;
stderr_file_in_stream.stream.readAllBuffer(&stderr_buf, max_stdout_size) catch unreachable;
child.stdout.?.inStream().readAllBuffer(&stdout_buf, max_stdout_size) catch unreachable;
child.stderr.?.inStream().readAllBuffer(&stderr_buf, max_stdout_size) catch unreachable;
const term = child.wait() catch |err| {
debug.panic("Unable to spawn {}: {}\n", .{ zig_args.items[0], @errorName(err) });