update std lib to new hash map API

master
Andrew Kelley 2020-07-04 01:31:29 +00:00
parent b3b6ccba50
commit 632acffcbd
8 changed files with 111 additions and 108 deletions

View File

@ -33,10 +33,10 @@ pub const BufMap = struct {
pub fn setMove(self: *BufMap, key: []u8, value: []u8) !void {
const get_or_put = try self.hash_map.getOrPut(key);
if (get_or_put.found_existing) {
self.free(get_or_put.kv.key);
get_or_put.kv.key = key;
self.free(get_or_put.entry.key);
get_or_put.entry.key = key;
}
get_or_put.kv.value = value;
get_or_put.entry.value = value;
}
/// `key` and `value` are copied into the BufMap.
@ -45,19 +45,18 @@ pub const BufMap = struct {
errdefer self.free(value_copy);
const get_or_put = try self.hash_map.getOrPut(key);
if (get_or_put.found_existing) {
self.free(get_or_put.kv.value);
self.free(get_or_put.entry.value);
} else {
get_or_put.kv.key = self.copy(key) catch |err| {
get_or_put.entry.key = self.copy(key) catch |err| {
_ = self.hash_map.remove(key);
return err;
};
}
get_or_put.kv.value = value_copy;
get_or_put.entry.value = value_copy;
}
pub fn get(self: BufMap, key: []const u8) ?[]const u8 {
const entry = self.hash_map.get(key) orelse return null;
return entry.value;
return self.hash_map.get(key);
}
pub fn delete(self: *BufMap, key: []const u8) void {

View File

@ -14,14 +14,12 @@ pub const BufSet = struct {
return self;
}
pub fn deinit(self: *const BufSet) void {
var it = self.hash_map.iterator();
while (true) {
const entry = it.next() orelse break;
pub fn deinit(self: *BufSet) void {
for (self.hash_map.items()) |entry| {
self.free(entry.key);
}
self.hash_map.deinit();
self.* = undefined;
}
pub fn put(self: *BufSet, key: []const u8) !void {

View File

@ -422,12 +422,12 @@ pub const Builder = struct {
.type_id = type_id,
.description = description,
};
if ((self.available_options_map.put(name, available_option) catch unreachable) != null) {
if ((self.available_options_map.fetchPut(name, available_option) catch unreachable) != null) {
panic("Option '{}' declared twice", .{name});
}
self.available_options_list.append(available_option) catch unreachable;
const entry = self.user_input_options.get(name) orelse return null;
const entry = self.user_input_options.getEntry(name) orelse return null;
entry.value.used = true;
switch (type_id) {
TypeId.Bool => switch (entry.value.value) {
@ -634,7 +634,7 @@ pub const Builder = struct {
pub fn addUserInputOption(self: *Builder, name: []const u8, value: []const u8) !bool {
const gop = try self.user_input_options.getOrPut(name);
if (!gop.found_existing) {
gop.kv.value = UserInputOption{
gop.entry.value = UserInputOption{
.name = name,
.value = UserValue{ .Scalar = value },
.used = false,
@ -643,7 +643,7 @@ pub const Builder = struct {
}
// option already exists
switch (gop.kv.value.value) {
switch (gop.entry.value.value) {
UserValue.Scalar => |s| {
// turn it into a list
var list = ArrayList([]const u8).init(self.allocator);
@ -675,7 +675,7 @@ pub const Builder = struct {
pub fn addUserInputFlag(self: *Builder, name: []const u8) !bool {
const gop = try self.user_input_options.getOrPut(name);
if (!gop.found_existing) {
gop.kv.value = UserInputOption{
gop.entry.value = UserInputOption{
.name = name,
.value = UserValue{ .Flag = {} },
.used = false,
@ -684,7 +684,7 @@ pub const Builder = struct {
}
// option already exists
switch (gop.kv.value.value) {
switch (gop.entry.value.value) {
UserValue.Scalar => |s| {
warn("Flag '-D{}' conflicts with option '-D{}={}'.\n", .{ name, name, s });
return true;

View File

@ -293,18 +293,22 @@ pub fn HashMapUnmanaged(
pub fn clearRetainingCapacity(self: *Self) void {
self.entries.items.len = 0;
if (self.header) |header| {
if (self.index_header) |header| {
header.max_distance_from_start_index = 0;
const indexes = header.indexes(u8);
@memset(indexes.ptr, 0xff, indexes.len);
switch (header.capacityIndexType()) {
.u8 => mem.set(Index(u8), header.indexes(u8), Index(u8).empty),
.u16 => mem.set(Index(u16), header.indexes(u16), Index(u16).empty),
.u32 => mem.set(Index(u32), header.indexes(u32), Index(u32).empty),
.usize => mem.set(Index(usize), header.indexes(usize), Index(usize).empty),
}
}
}
pub fn clearAndFree(self: *Self, allocator: *Allocator) void {
self.entries.shrink(allocator, 0);
if (self.header) |header| {
if (self.index_header) |header| {
header.free(allocator);
self.header = null;
self.index_header = null;
}
}
@ -378,13 +382,13 @@ pub fn HashMapUnmanaged(
try self.entries.ensureCapacity(allocator, new_capacity);
if (new_capacity <= linear_scan_max) return;
// Resize if indexes would be more than 75% full.
const needed_len = new_capacity * 4 / 3;
// Resize if indexes would be more than 60% full.
const needed_len = new_capacity * 5 / 3;
if (self.index_header) |header| {
if (needed_len > header.indexes_len) {
var new_indexes_len = header.indexes_len;
while (true) {
new_indexes_len += new_indexes_len / 2 + 8;
new_indexes_len *= new_indexes_len / 2 + 8;
if (new_indexes_len >= needed_len) break;
}
const new_header = try IndexHeader.alloc(allocator, new_indexes_len);
@ -789,6 +793,11 @@ fn Index(comptime I: type) type {
const Self = @This();
const empty = Self{
.entry_index = math.maxInt(I),
.distance_from_start_index = undefined,
};
fn isEmpty(idx: Self) bool {
return idx.entry_index == math.maxInt(I);
}

View File

@ -118,13 +118,12 @@ pub const Headers = struct {
};
}
pub fn deinit(self: Self) void {
pub fn deinit(self: *Self) void {
{
var it = self.index.iterator();
while (it.next()) |kv| {
var dex = &kv.value;
for (self.index.items()) |*entry| {
const dex = &entry.value;
dex.deinit();
self.allocator.free(kv.key);
self.allocator.free(entry.key);
}
self.index.deinit();
}
@ -134,6 +133,7 @@ pub const Headers = struct {
}
self.data.deinit();
}
self.* = undefined;
}
pub fn clone(self: Self, allocator: *Allocator) !Self {
@ -155,10 +155,10 @@ pub const Headers = struct {
const n = self.data.items.len + 1;
try self.data.ensureCapacity(n);
var entry: HeaderEntry = undefined;
if (self.index.get(name)) |kv| {
if (self.index.getEntry(name)) |kv| {
entry = try HeaderEntry.init(self.allocator, kv.key, value, never_index);
errdefer entry.deinit();
var dex = &kv.value;
const dex = &kv.value;
try dex.append(n - 1);
} else {
const name_dup = try mem.dupe(self.allocator, u8, name);
@ -195,7 +195,7 @@ pub const Headers = struct {
/// Returns boolean indicating if something was deleted.
pub fn delete(self: *Self, name: []const u8) bool {
if (self.index.remove(name)) |kv| {
var dex = &kv.value;
const dex = &kv.value;
// iterate backwards
var i = dex.items.len;
while (i > 0) {
@ -207,7 +207,7 @@ pub const Headers = struct {
}
dex.deinit();
self.allocator.free(kv.key);
self.rebuild_index();
self.rebuildIndex();
return true;
} else {
return false;
@ -216,45 +216,52 @@ pub const Headers = struct {
/// Removes the element at the specified index.
/// Moves items down to fill the empty space.
/// TODO this implementation can be replaced by adding
/// orderedRemove to the new hash table implementation as an
/// alternative to swapRemove.
pub fn orderedRemove(self: *Self, i: usize) void {
const removed = self.data.orderedRemove(i);
const kv = self.index.get(removed.name).?;
var dex = &kv.value;
const kv = self.index.getEntry(removed.name).?;
const dex = &kv.value;
if (dex.items.len == 1) {
// was last item; delete the index
_ = self.index.remove(kv.key);
dex.deinit();
removed.deinit();
self.allocator.free(kv.key);
const key = kv.key;
_ = self.index.remove(key); // invalidates `kv` and `dex`
self.allocator.free(key);
} else {
dex.shrink(dex.items.len - 1);
removed.deinit();
}
// if it was the last item; no need to rebuild index
if (i != self.data.items.len) {
self.rebuild_index();
self.rebuildIndex();
}
}
/// Removes the element at the specified index.
/// The empty slot is filled from the end of the list.
/// TODO this implementation can be replaced by simply using the
/// new hash table which does swap removal.
pub fn swapRemove(self: *Self, i: usize) void {
const removed = self.data.swapRemove(i);
const kv = self.index.get(removed.name).?;
var dex = &kv.value;
const kv = self.index.getEntry(removed.name).?;
const dex = &kv.value;
if (dex.items.len == 1) {
// was last item; delete the index
_ = self.index.remove(kv.key);
dex.deinit();
removed.deinit();
self.allocator.free(kv.key);
const key = kv.key;
_ = self.index.remove(key); // invalidates `kv` and `dex`
self.allocator.free(key);
} else {
dex.shrink(dex.items.len - 1);
removed.deinit();
}
// if it was the last item; no need to rebuild index
if (i != self.data.items.len) {
self.rebuild_index();
self.rebuildIndex();
}
}
@ -266,11 +273,7 @@ pub const Headers = struct {
/// Returns a list of indices containing headers with the given name.
/// The returned list should not be modified by the caller.
pub fn getIndices(self: Self, name: []const u8) ?HeaderIndexList {
if (self.index.get(name)) |kv| {
return kv.value;
} else {
return null;
}
return self.index.get(name);
}
/// Returns a slice containing each header with the given name.
@ -325,25 +328,20 @@ pub const Headers = struct {
return buf;
}
fn rebuild_index(self: *Self) void {
{ // clear out the indexes
var it = self.index.iterator();
while (it.next()) |kv| {
var dex = &kv.value;
dex.items.len = 0; // keeps capacity available
}
fn rebuildIndex(self: *Self) void {
// clear out the indexes
for (self.index.items()) |*entry| {
entry.value.shrinkRetainingCapacity(0);
}
{ // fill up indexes again; we know capacity is fine from before
for (self.data.span()) |entry, i| {
var dex = &self.index.get(entry.name).?.value;
dex.appendAssumeCapacity(i);
}
// fill up indexes again; we know capacity is fine from before
for (self.data.items) |entry, i| {
self.index.getEntry(entry.name).?.value.appendAssumeCapacity(i);
}
}
pub fn sort(self: *Self) void {
std.sort.sort(HeaderEntry, self.data.items, {}, HeaderEntry.compare);
self.rebuild_index();
self.rebuildIndex();
}
pub fn format(

View File

@ -2149,27 +2149,27 @@ test "json.parser.dynamic" {
var root = tree.root;
var image = root.Object.get("Image").?.value;
var image = root.Object.get("Image").?;
const width = image.Object.get("Width").?.value;
const width = image.Object.get("Width").?;
testing.expect(width.Integer == 800);
const height = image.Object.get("Height").?.value;
const height = image.Object.get("Height").?;
testing.expect(height.Integer == 600);
const title = image.Object.get("Title").?.value;
const title = image.Object.get("Title").?;
testing.expect(mem.eql(u8, title.String, "View from 15th Floor"));
const animated = image.Object.get("Animated").?.value;
const animated = image.Object.get("Animated").?;
testing.expect(animated.Bool == false);
const array_of_object = image.Object.get("ArrayOfObject").?.value;
const array_of_object = image.Object.get("ArrayOfObject").?;
testing.expect(array_of_object.Array.items.len == 1);
const obj0 = array_of_object.Array.items[0].Object.get("n").?.value;
const obj0 = array_of_object.Array.items[0].Object.get("n").?;
testing.expect(mem.eql(u8, obj0.String, "m"));
const double = image.Object.get("double").?.value;
const double = image.Object.get("double").?;
testing.expect(double.Float == 1.3412);
}
@ -2217,12 +2217,12 @@ test "write json then parse it" {
var tree = try parser.parse(fixed_buffer_stream.getWritten());
defer tree.deinit();
testing.expect(tree.root.Object.get("f").?.value.Bool == false);
testing.expect(tree.root.Object.get("t").?.value.Bool == true);
testing.expect(tree.root.Object.get("int").?.value.Integer == 1234);
testing.expect(tree.root.Object.get("array").?.value.Array.items[0].Null == {});
testing.expect(tree.root.Object.get("array").?.value.Array.items[1].Float == 12.34);
testing.expect(mem.eql(u8, tree.root.Object.get("str").?.value.String, "hello"));
testing.expect(tree.root.Object.get("f").?.Bool == false);
testing.expect(tree.root.Object.get("t").?.Bool == true);
testing.expect(tree.root.Object.get("int").?.Integer == 1234);
testing.expect(tree.root.Object.get("array").?.Array.items[0].Null == {});
testing.expect(tree.root.Object.get("array").?.Array.items[1].Float == 12.34);
testing.expect(mem.eql(u8, tree.root.Object.get("str").?.String, "hello"));
}
fn test_parse(arena_allocator: *std.mem.Allocator, json_str: []const u8) !Value {
@ -2245,7 +2245,7 @@ test "integer after float has proper type" {
\\ "ints": [1, 2, 3]
\\}
);
std.testing.expect(json.Object.getValue("ints").?.Array.items[0] == .Integer);
std.testing.expect(json.Object.get("ints").?.Array.items[0] == .Integer);
}
test "escaped characters" {
@ -2271,16 +2271,16 @@ test "escaped characters" {
const obj = (try test_parse(&arena_allocator.allocator, input)).Object;
testing.expectEqualSlices(u8, obj.get("backslash").?.value.String, "\\");
testing.expectEqualSlices(u8, obj.get("forwardslash").?.value.String, "/");
testing.expectEqualSlices(u8, obj.get("newline").?.value.String, "\n");
testing.expectEqualSlices(u8, obj.get("carriagereturn").?.value.String, "\r");
testing.expectEqualSlices(u8, obj.get("tab").?.value.String, "\t");
testing.expectEqualSlices(u8, obj.get("formfeed").?.value.String, "\x0C");
testing.expectEqualSlices(u8, obj.get("backspace").?.value.String, "\x08");
testing.expectEqualSlices(u8, obj.get("doublequote").?.value.String, "\"");
testing.expectEqualSlices(u8, obj.get("unicode").?.value.String, "ą");
testing.expectEqualSlices(u8, obj.get("surrogatepair").?.value.String, "😂");
testing.expectEqualSlices(u8, obj.get("backslash").?.String, "\\");
testing.expectEqualSlices(u8, obj.get("forwardslash").?.String, "/");
testing.expectEqualSlices(u8, obj.get("newline").?.String, "\n");
testing.expectEqualSlices(u8, obj.get("carriagereturn").?.String, "\r");
testing.expectEqualSlices(u8, obj.get("tab").?.String, "\t");
testing.expectEqualSlices(u8, obj.get("formfeed").?.String, "\x0C");
testing.expectEqualSlices(u8, obj.get("backspace").?.String, "\x08");
testing.expectEqualSlices(u8, obj.get("doublequote").?.String, "\"");
testing.expectEqualSlices(u8, obj.get("unicode").?.String, "ą");
testing.expectEqualSlices(u8, obj.get("surrogatepair").?.String, "😂");
}
test "string copy option" {
@ -2306,11 +2306,11 @@ test "string copy option" {
const obj_copy = tree_copy.root.Object;
for ([_][]const u8{ "noescape", "simple", "unicode", "surrogatepair" }) |field_name| {
testing.expectEqualSlices(u8, obj_nocopy.getValue(field_name).?.String, obj_copy.getValue(field_name).?.String);
testing.expectEqualSlices(u8, obj_nocopy.get(field_name).?.String, obj_copy.get(field_name).?.String);
}
const nocopy_addr = &obj_nocopy.getValue("noescape").?.String[0];
const copy_addr = &obj_copy.getValue("noescape").?.String[0];
const nocopy_addr = &obj_nocopy.get("noescape").?.String[0];
const copy_addr = &obj_copy.get("noescape").?.String[0];
var found_nocopy = false;
for (input) |_, index| {

View File

@ -720,7 +720,7 @@ fn fmtPathDir(
defer dir.close();
const stat = try dir.stat();
if (try fmt.seen.put(stat.inode, {})) |_| return;
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
var dir_it = dir.iterate();
while (try dir_it.next()) |entry| {
@ -768,7 +768,7 @@ fn fmtPathFile(
defer fmt.gpa.free(source_code);
// Add to set after no longer possible to get error.IsDir.
if (try fmt.seen.put(stat.inode, {})) |_| return;
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
const tree = try std.zig.parse(fmt.gpa, source_code);
defer tree.deinit();

View File

@ -20,7 +20,7 @@ pub const Error = error{OutOfMemory};
const TypeError = Error || error{UnsupportedType};
const TransError = TypeError || error{UnsupportedTranslation};
const DeclTable = std.HashMap(usize, []const u8, addrHash, addrEql);
const DeclTable = std.HashMap(usize, []const u8, addrHash, addrEql, false);
fn addrHash(x: usize) u32 {
switch (@typeInfo(usize).Int.bits) {
@ -776,8 +776,8 @@ fn checkForBuiltinTypedef(checked_name: []const u8) ?[]const u8 {
}
fn transTypeDef(c: *Context, typedef_decl: *const ZigClangTypedefNameDecl, top_level_visit: bool) Error!?*ast.Node {
if (c.decl_table.get(@ptrToInt(ZigClangTypedefNameDecl_getCanonicalDecl(typedef_decl)))) |kv|
return transCreateNodeIdentifier(c, kv.value); // Avoid processing this decl twice
if (c.decl_table.get(@ptrToInt(ZigClangTypedefNameDecl_getCanonicalDecl(typedef_decl)))) |name|
return transCreateNodeIdentifier(c, name); // Avoid processing this decl twice
const rp = makeRestorePoint(c);
const typedef_name = try c.str(ZigClangNamedDecl_getName_bytes_begin(@ptrCast(*const ZigClangNamedDecl, typedef_decl)));
@ -818,8 +818,8 @@ fn transCreateNodeTypedef(rp: RestorePoint, typedef_decl: *const ZigClangTypedef
}
fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*ast.Node {
if (c.decl_table.get(@ptrToInt(ZigClangRecordDecl_getCanonicalDecl(record_decl)))) |kv|
return try transCreateNodeIdentifier(c, kv.value); // Avoid processing this decl twice
if (c.decl_table.get(@ptrToInt(ZigClangRecordDecl_getCanonicalDecl(record_decl)))) |name|
return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice
const record_loc = ZigClangRecordDecl_getLocation(record_decl);
var bare_name = try c.str(ZigClangNamedDecl_getName_bytes_begin(@ptrCast(*const ZigClangNamedDecl, record_decl)));
@ -969,7 +969,7 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!?*
fn transEnumDecl(c: *Context, enum_decl: *const ZigClangEnumDecl) Error!?*ast.Node {
if (c.decl_table.get(@ptrToInt(ZigClangEnumDecl_getCanonicalDecl(enum_decl)))) |name|
return try transCreateNodeIdentifier(c, name.value); // Avoid processing this decl twice
return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice
const rp = makeRestorePoint(c);
const enum_loc = ZigClangEnumDecl_getLocation(enum_decl);
@ -2130,7 +2130,7 @@ fn transInitListExprRecord(
var raw_name = try rp.c.str(ZigClangNamedDecl_getName_bytes_begin(@ptrCast(*const ZigClangNamedDecl, field_decl)));
if (ZigClangFieldDecl_isAnonymousStructOrUnion(field_decl)) {
const name = rp.c.decl_table.get(@ptrToInt(ZigClangFieldDecl_getCanonicalDecl(field_decl))).?;
raw_name = try mem.dupe(rp.c.arena, u8, name.value);
raw_name = try mem.dupe(rp.c.arena, u8, name);
}
const field_name_tok = try appendIdentifier(rp.c, raw_name);
@ -2855,7 +2855,7 @@ fn transMemberExpr(rp: RestorePoint, scope: *Scope, stmt: *const ZigClangMemberE
const field_decl = @ptrCast(*const struct_ZigClangFieldDecl, member_decl);
if (ZigClangFieldDecl_isAnonymousStructOrUnion(field_decl)) {
const name = rp.c.decl_table.get(@ptrToInt(ZigClangFieldDecl_getCanonicalDecl(field_decl))).?;
break :blk try mem.dupe(rp.c.arena, u8, name.value);
break :blk try mem.dupe(rp.c.arena, u8, name);
}
}
const decl = @ptrCast(*const ZigClangNamedDecl, member_decl);
@ -6040,8 +6040,8 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
} else if (node.id == .PrefixOp) {
return node;
} else if (node.cast(ast.Node.Identifier)) |ident| {
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |kv| {
if (kv.value.cast(ast.Node.VarDecl)) |var_decl|
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
if (value.cast(ast.Node.VarDecl)) |var_decl|
return getContainer(c, var_decl.init_node.?);
}
} else if (node.cast(ast.Node.InfixOp)) |infix| {
@ -6064,8 +6064,8 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
if (ref.cast(ast.Node.Identifier)) |ident| {
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |kv| {
if (kv.value.cast(ast.Node.VarDecl)) |var_decl| {
if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
if (value.cast(ast.Node.VarDecl)) |var_decl| {
if (var_decl.type_node) |ty|
return getContainer(c, ty);
}
@ -6104,8 +6104,7 @@ fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto {
}
fn addMacros(c: *Context) !void {
var macro_it = c.global_scope.macro_table.iterator();
while (macro_it.next()) |kv| {
for (c.global_scope.macro_table.items()) |kv| {
if (getFnProto(c, kv.value)) |proto_node| {
// If a macro aliases a global variable which is a function pointer, we conclude that
// the macro is intended to represent a function that assumes the function pointer