miscellaneous improvements to generated docs

* introduce std.json.WriteStream API for writing json
   data to a stream
 * add WIP tools/merge_anal_dumps.zig for merging multiple semantic
   analysis dumps into one. See #3028
 * add std.json.Array, improves generated docs
 * add test for `std.process.argsAlloc`, improves test coverage and
   generated docs
master
Andrew Kelley 2019-10-10 23:22:18 -04:00
parent 7b20205e68
commit 01b2c291d5
No known key found for this signature in database
GPG Key ID: 7C5F548F728501A9
5 changed files with 380 additions and 7 deletions

View File

@ -508,7 +508,7 @@ pub fn BitInStream(endian: builtin.Endian, comptime Error: type) type {
};
}
/// This is a simple OutStream that writes to a slice, and returns an error
/// This is a simple OutStream that writes to a fixed buffer, and returns an error
/// when it runs out of space.
pub const SliceOutStream = struct {
pub const Error = error{OutOfSpace};

View File

@ -8,6 +8,8 @@ const testing = std.testing;
const mem = std.mem;
const maxInt = std.math.maxInt;
pub const WriteStream = @import("json/write_stream.zig").WriteStream;
// A single token slice into the parent string.
//
// Use `token.slice()` on the input at the current position to get the current slice.
@ -1001,6 +1003,7 @@ pub const ValueTree = struct {
};
pub const ObjectMap = StringHashMap(Value);
pub const Array = ArrayList(Value);
pub const Value = union(enum) {
Null,
@ -1008,7 +1011,7 @@ pub const Value = union(enum) {
Integer: i64,
Float: f64,
String: []const u8,
Array: ArrayList(Value),
Array: Array,
Object: ObjectMap,
pub fn dump(self: Value) void {
@ -1134,7 +1137,7 @@ pub const Parser = struct {
state: State,
copy_strings: bool,
// Stores parent nodes and un-combined Values.
stack: ArrayList(Value),
stack: Array,
const State = enum {
ObjectKey,
@ -1148,7 +1151,7 @@ pub const Parser = struct {
.allocator = allocator,
.state = State.Simple,
.copy_strings = copy_strings,
.stack = ArrayList(Value).init(allocator),
.stack = Array.init(allocator),
};
}
@ -1210,7 +1213,7 @@ pub const Parser = struct {
p.state = State.ObjectKey;
},
Token.Id.ArrayBegin => {
try p.stack.append(Value{ .Array = ArrayList(Value).init(allocator) });
try p.stack.append(Value{ .Array = Array.init(allocator) });
p.state = State.ArrayValue;
},
Token.Id.String => {
@ -1260,7 +1263,7 @@ pub const Parser = struct {
p.state = State.ObjectKey;
},
Token.Id.ArrayBegin => {
try p.stack.append(Value{ .Array = ArrayList(Value).init(allocator) });
try p.stack.append(Value{ .Array = Array.init(allocator) });
p.state = State.ArrayValue;
},
Token.Id.String => {
@ -1289,7 +1292,7 @@ pub const Parser = struct {
p.state = State.ObjectKey;
},
Token.Id.ArrayBegin => {
try p.stack.append(Value{ .Array = ArrayList(Value).init(allocator) });
try p.stack.append(Value{ .Array = Array.init(allocator) });
p.state = State.ArrayValue;
},
Token.Id.String => {
@ -1405,3 +1408,50 @@ test "json.parser.dynamic" {
test "import more json tests" {
_ = @import("json/test.zig");
}
test "write json then parse it" {
var out_buffer: [1000]u8 = undefined;
var slice_out_stream = std.io.SliceOutStream.init(&out_buffer);
const out_stream = &slice_out_stream.stream;
var jw = WriteStream(@typeOf(out_stream).Child, 4).init(out_stream);
try jw.beginObject();
try jw.objectField("f");
try jw.emitBool(false);
try jw.objectField("t");
try jw.emitBool(true);
try jw.objectField("int");
try jw.emitNumber(i32(1234));
try jw.objectField("array");
try jw.beginArray();
try jw.arrayElem();
try jw.emitNull();
try jw.arrayElem();
try jw.emitNumber(f64(12.34));
try jw.endArray();
try jw.objectField("str");
try jw.emitString("hello");
try jw.endObject();
var mem_buffer: [1024 * 20]u8 = undefined;
const allocator = &std.heap.FixedBufferAllocator.init(&mem_buffer).allocator;
var parser = Parser.init(allocator, false);
const tree = try parser.parse(slice_out_stream.getWritten());
testing.expect(tree.root.Object.get("f").?.value.Bool == false);
testing.expect(tree.root.Object.get("t").?.value.Bool == true);
testing.expect(tree.root.Object.get("int").?.value.Integer == 1234);
testing.expect(tree.root.Object.get("array").?.value.Array.at(0).Null == {});
testing.expect(tree.root.Object.get("array").?.value.Array.at(1).Float == 12.34);
testing.expect(mem.eql(u8, tree.root.Object.get("str").?.value.String, "hello"));
}

View File

@ -0,0 +1,211 @@
const std = @import("../std.zig");
const assert = std.debug.assert;
const maxInt = std.math.maxInt;
const State = enum {
Complete,
Value,
ArrayStart,
Array,
ObjectStart,
Object,
};
/// Writes JSON ([RFC8259](https://tools.ietf.org/html/rfc8259)) formatted data
/// to a stream. `max_depth` is a comptime-known upper bound on the nesting depth.
/// TODO A future iteration of this API will allow passing `null` for this value,
/// and disable safety checks in release builds.
pub fn WriteStream(comptime OutStream: type, comptime max_depth: usize) type {
return struct {
const Self = @This();
pub const Stream = OutStream;
/// The string used for indenting.
one_indent: []const u8 = " ",
/// The string used as a newline character.
newline: []const u8 = "\n",
stream: *OutStream,
state_index: usize,
state: [max_depth]State,
pub fn init(stream: *OutStream) Self {
var self = Self{
.stream = stream,
.state_index = 1,
.state = undefined,
};
self.state[0] = .Complete;
self.state[1] = .Value;
return self;
}
pub fn beginArray(self: *Self) !void {
assert(self.state[self.state_index] == State.Value); // need to call arrayElem or objectField
try self.stream.writeByte('[');
self.state[self.state_index] = State.ArrayStart;
}
pub fn beginObject(self: *Self) !void {
assert(self.state[self.state_index] == State.Value); // need to call arrayElem or objectField
try self.stream.writeByte('{');
self.state[self.state_index] = State.ObjectStart;
}
pub fn arrayElem(self: *Self) !void {
const state = self.state[self.state_index];
switch (state) {
.Complete => unreachable,
.Value => unreachable,
.ObjectStart => unreachable,
.Object => unreachable,
.Array, .ArrayStart => {
if (state == .Array) {
try self.stream.writeByte(',');
}
self.state[self.state_index] = .Array;
self.pushState(.Value);
try self.indent();
},
}
}
pub fn objectField(self: *Self, name: []const u8) !void {
const state = self.state[self.state_index];
switch (state) {
.Complete => unreachable,
.Value => unreachable,
.ArrayStart => unreachable,
.Array => unreachable,
.Object, .ObjectStart => {
if (state == .Object) {
try self.stream.writeByte(',');
}
self.state[self.state_index] = .Object;
self.pushState(.Value);
try self.indent();
try self.writeEscapedString(name);
try self.stream.write(": ");
},
}
}
pub fn endArray(self: *Self) !void {
switch (self.state[self.state_index]) {
.Complete => unreachable,
.Value => unreachable,
.ObjectStart => unreachable,
.Object => unreachable,
.ArrayStart => {
try self.stream.writeByte(']');
self.popState();
},
.Array => {
try self.indent();
self.popState();
try self.stream.writeByte(']');
},
}
}
pub fn endObject(self: *Self) !void {
switch (self.state[self.state_index]) {
.Complete => unreachable,
.Value => unreachable,
.ArrayStart => unreachable,
.Array => unreachable,
.ObjectStart => {
try self.stream.writeByte('}');
self.popState();
},
.Object => {
try self.indent();
self.popState();
try self.stream.writeByte('}');
},
}
}
pub fn emitNull(self: *Self) !void {
assert(self.state[self.state_index] == State.Value);
try self.stream.write("null");
self.popState();
}
pub fn emitBool(self: *Self, value: bool) !void {
assert(self.state[self.state_index] == State.Value);
if (value) {
try self.stream.write("true");
} else {
try self.stream.write("false");
}
self.popState();
}
pub fn emitNumber(
self: *Self,
/// An integer, float, or `std.math.BigInt`. Emitted as a bare number if it fits losslessly
/// in a IEEE 754 double float, otherwise emitted as a string to the full precision.
value: var,
) !void {
assert(self.state[self.state_index] == State.Value);
switch (@typeInfo(@typeOf(value))) {
.Int => |info| if (info.bits < 53 or (value < 4503599627370496 and value > -4503599627370496)) {
try self.stream.print("{}", value);
self.popState();
return;
},
.Float => if (@floatCast(f64, value) == value) {
try self.stream.print("{}", value);
self.popState();
return;
},
else => {},
}
try self.stream.print("\"{}\"", value);
self.popState();
}
pub fn emitString(self: *Self, string: []const u8) !void {
try self.writeEscapedString(string);
self.popState();
}
fn writeEscapedString(self: *Self, string: []const u8) !void {
try self.stream.writeByte('"');
for (string) |s| {
switch (s) {
'"' => try self.stream.write("\\\""),
'\t' => try self.stream.write("\\t"),
'\r' => try self.stream.write("\\r"),
'\n' => try self.stream.write("\\n"),
8 => try self.stream.write("\\b"),
12 => try self.stream.write("\\f"),
'\\' => try self.stream.write("\\\\"),
else => try self.stream.writeByte(s),
}
}
try self.stream.writeByte('"');
}
fn indent(self: *Self) !void {
assert(self.state_index >= 1);
try self.stream.write(self.newline);
var i: usize = 0;
while (i < self.state_index - 1) : (i += 1) {
try self.stream.write(self.one_indent);
}
}
fn pushState(self: *Self, state: State) void {
self.state_index += 1;
self.state[self.state_index] = state;
}
fn popState(self: *Self) void {
self.state_index -= 1;
}
};
}

View File

@ -232,3 +232,8 @@ test "pipe" {
os.close(fds[1]);
os.close(fds[0]);
}
test "argsAlloc" {
var args = try std.process.argsAlloc(std.heap.direct_allocator);
std.heap.direct_allocator.free(args);
}

107
tools/merge_anal_dumps.zig Normal file
View File

@ -0,0 +1,107 @@
const builtin = @import("builtin");
const std = @import("std");
const json = std.json;
const mem = std.mem;
pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.direct_allocator);
defer arena.deinit();
const allocator = &arena.allocator;
const args = try std.process.argsAlloc(allocator);
var parser: json.Parser = undefined;
var dump = Dump.init(allocator);
for (args[1..]) |arg| {
parser = json.Parser.init(allocator, false);
const json_text = try std.io.readFileAlloc(allocator, arg);
const tree = try parser.parse(json_text);
try dump.mergeJson(tree.root);
}
const stdout = try std.io.getStdOut();
try dump.render(&stdout.outStream().stream);
}
const Dump = struct {
zig_id: ?[]const u8 = null,
zig_version: ?[]const u8 = null,
root_name: ?[]const u8 = null,
targets: std.ArrayList([]const u8),
files_list: std.ArrayList([]const u8),
files_map: std.StringHashMap(usize),
fn init(allocator: *mem.Allocator) Dump {
return Dump{
.targets = std.ArrayList([]const u8).init(allocator),
.files_list = std.ArrayList([]const u8).init(allocator),
.files_map = std.StringHashMap(usize).init(allocator),
};
}
fn mergeJson(self: *Dump, root: json.Value) !void {
const params = &root.Object.get("params").?.value.Object;
const zig_id = params.get("zigId").?.value.String;
const zig_version = params.get("zigVersion").?.value.String;
const root_name = params.get("rootName").?.value.String;
try mergeSameStrings(&self.zig_id, zig_id);
try mergeSameStrings(&self.zig_version, zig_version);
try mergeSameStrings(&self.root_name, root_name);
const target = params.get("target").?.value.String;
try self.targets.append(target);
// Merge files
const other_files = root.Object.get("files").?.value.Array.toSliceConst();
var other_file_to_mine = std.AutoHashMap(usize, usize).init(self.a());
for (other_files) |other_file, i| {
const gop = try self.files_map.getOrPut(other_file.String);
if (gop.found_existing) {
try other_file_to_mine.putNoClobber(i, gop.kv.value);
} else {
gop.kv.value = self.files_list.len;
try self.files_list.append(other_file.String);
}
}
const other_ast_nodes = root.Object.get("astNodes").?.value.Array.toSliceConst();
var other_ast_node_to_mine = std.AutoHashMap(usize, usize).init(self.a());
}
fn render(self: *Dump, stream: var) !void {
var jw = json.WriteStream(@typeOf(stream).Child, 10).init(stream);
try jw.beginObject();
try jw.objectField("typeKinds");
try jw.beginArray();
inline for (@typeInfo(builtin.TypeId).Enum.fields) |field| {
try jw.arrayElem();
try jw.emitString(field.name);
}
try jw.endArray();
try jw.objectField("files");
try jw.beginArray();
for (self.files_list.toSliceConst()) |file| {
try jw.arrayElem();
try jw.emitString(file);
}
try jw.endArray();
try jw.endObject();
}
fn a(self: Dump) *mem.Allocator {
return self.targets.allocator;
}
fn mergeSameStrings(opt_dest: *?[]const u8, src: []const u8) !void {
if (opt_dest.*) |dest| {
if (!mem.eql(u8, dest, src))
return error.MismatchedDumps;
} else {
opt_dest.* = src;
}
}
};