diff --git a/js/ipc/JavaScriptShared.cpp b/js/ipc/JavaScriptShared.cpp index 9786243f2..aba02252d 100644 --- a/js/ipc/JavaScriptShared.cpp +++ b/js/ipc/JavaScriptShared.cpp @@ -61,6 +61,15 @@ IdToObjectMap::find(ObjectId id) return p->value(); } +JSObject* +IdToObjectMap::findPreserveColor(ObjectId id) +{ + Table::Ptr p = table_.lookup(id); + if (!p) + return nullptr; + return p->value().unbarrieredGet(); + } + bool IdToObjectMap::add(ObjectId id, JSObject* obj) { @@ -757,4 +766,4 @@ CPOWManager* mozilla::jsipc::CPOWManagerFor(PJavaScriptChild* aChild) { return static_cast(aChild); -} +} \ No newline at end of file diff --git a/js/ipc/JavaScriptShared.h b/js/ipc/JavaScriptShared.h index 4de153826..d0cd4615b 100644 --- a/js/ipc/JavaScriptShared.h +++ b/js/ipc/JavaScriptShared.h @@ -96,6 +96,7 @@ class IdToObjectMap bool add(ObjectId id, JSObject* obj); JSObject* find(ObjectId id); + JSObject* findPreserveColor(ObjectId id); void remove(ObjectId id); void clear(); @@ -233,4 +234,4 @@ class JavaScriptShared : public CPOWManager } // namespace jsipc } // namespace mozilla -#endif +#endif \ No newline at end of file diff --git a/js/ipc/WrapperAnswer.cpp b/js/ipc/WrapperAnswer.cpp index fc342bbb6..6ae68e01f 100644 --- a/js/ipc/WrapperAnswer.cpp +++ b/js/ipc/WrapperAnswer.cpp @@ -789,10 +789,10 @@ WrapperAnswer::RecvDOMInstanceOf(const ObjectId& objId, const int& prototypeID, bool WrapperAnswer::RecvDropObject(const ObjectId& objId) { - JSObject* obj = objects_.find(objId); + JSObject* obj = objects_.findPreserveColor(objId); if (obj) { objectIdMap(objId.hasXrayWaiver()).remove(obj); objects_.remove(objId); } return true; -} +} \ No newline at end of file diff --git a/js/src/builtin/TypedObject.cpp b/js/src/builtin/TypedObject.cpp index ae74f01bf..95704ee46 100644 --- a/js/src/builtin/TypedObject.cpp +++ b/js/src/builtin/TypedObject.cpp @@ -652,7 +652,7 @@ ArrayMetaTypeDescr::create(JSContext* cx, if (!CreateTraceList(cx, obj)) return nullptr; - if (!cx->zone()->typeDescrObjects.put(obj)) { + if (!cx->zone()->addTypeDescrObject(cx, obj)) { ReportOutOfMemory(cx); return nullptr; } @@ -993,8 +993,8 @@ StructMetaTypeDescr::create(JSContext* cx, if (!CreateTraceList(cx, descr)) return nullptr; - if (!cx->zone()->typeDescrObjects.put(descr) || - !cx->zone()->typeDescrObjects.put(fieldTypeVec)) + if (!cx->zone()->addTypeDescrObject(cx, descr) || + !cx->zone()->addTypeDescrObject(cx, fieldTypeVec)) { ReportOutOfMemory(cx); return nullptr; @@ -1165,10 +1165,8 @@ DefineSimpleTypeDescr(JSContext* cx, if (!CreateTraceList(cx, descr)) return false; - if (!cx->zone()->typeDescrObjects.put(descr)) { - ReportOutOfMemory(cx); + if (!cx->zone()->addTypeDescrObject(cx, descr)) return false; - } return true; } @@ -3005,4 +3003,4 @@ TypeDescr::finalize(FreeOp* fop, JSObject* obj) TypeDescr& descr = obj->as(); if (descr.hasTraceList()) js_free(const_cast(descr.traceList())); -} +} \ No newline at end of file diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h index 5c2576efd..16260a4e3 100644 --- a/js/src/gc/GCRuntime.h +++ b/js/src/gc/GCRuntime.h @@ -900,7 +900,8 @@ class GCRuntime void requestMajorGC(JS::gcreason::Reason reason); SliceBudget defaultBudget(JS::gcreason::Reason reason, int64_t millis); - void budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& lock); + void budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget, + AutoLockForExclusiveAccess& lock); void resetIncrementalGC(AbortReason reason, AutoLockForExclusiveAccess& lock); // Assert if the system state is such that we should never @@ -915,6 +916,7 @@ class GCRuntime void collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) JS_HAZ_GC_CALL; MOZ_MUST_USE bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason::Reason reason); + bool shouldRepeatForDeadZone(JS::gcreason::Reason reason); void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock); @@ -1348,4 +1350,4 @@ class MOZ_RAII AutoMaybeStartBackgroundAllocation } /* namespace js */ -#endif +#endif \ No newline at end of file diff --git a/js/src/gc/RootMarking.cpp b/js/src/gc/RootMarking.cpp index 93264084b..ed7b8fb6f 100644 --- a/js/src/gc/RootMarking.cpp +++ b/js/src/gc/RootMarking.cpp @@ -478,6 +478,7 @@ js::gc::GCRuntime::bufferGrayRoots() for (GCZonesIter zone(rt); !zone.done(); zone.next()) MOZ_ASSERT(zone->gcGrayRoots.empty()); + gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS); BufferGrayRootsTracer grayBufferer(rt); if (JSTraceDataOp op = grayRootTracer.op) @@ -539,5 +540,4 @@ GCRuntime::resetBufferedGrayRoots() const "Do not clear the gray buffers unless we are Failed or becoming Unused"); for (GCZonesIter zone(rt); !zone.done(); zone.next()) zone->gcGrayRoots.clearAndFree(); -} - +} \ No newline at end of file diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp index ed099341c..ecfb9a38c 100644 --- a/js/src/gc/Zone.cpp +++ b/js/src/gc/Zone.cpp @@ -370,6 +370,21 @@ Zone::fixupAfterMovingGC() fixupInitialShapeTable(); } +bool +Zone::addTypeDescrObject(JSContext* cx, HandleObject obj) +{ + // Type descriptor objects are always tenured so we don't need post barriers + // on the set. + MOZ_ASSERT(!IsInsideNursery(obj)); + + if (!typeDescrObjects.put(obj)) { + ReportOutOfMemory(cx); + return false; + } + + return true; +} + ZoneList::ZoneList() : head(nullptr), tail(nullptr) {} @@ -468,4 +483,4 @@ JS_PUBLIC_API(void) JS::shadow::RegisterWeakCache(JS::Zone* zone, WeakCache* cachep) { zone->registerWeakCache(cachep); -} +} \ No newline at end of file diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h index 50d06319d..24f4648f7 100644 --- a/js/src/gc/Zone.h +++ b/js/src/gc/Zone.h @@ -349,10 +349,17 @@ struct Zone : public JS::shadow::Zone, // Keep track of all TypeDescr and related objects in this compartment. // This is used by the GC to trace them all first when compacting, since the // TypedObject trace hook may access these objects. - using TypeDescrObjectSet = js::GCHashSet, - js::MovableCellHasher>, + + // + // There are no barriers here - the set contains only tenured objects so no + // post-barrier is required, and these are weak references so no pre-barrier + // is required. + using TypeDescrObjectSet = js::GCHashSet, js::SystemAllocPolicy>; JS::WeakCache typeDescrObjects; + + bool addTypeDescrObject(JSContext* cx, HandleObject obj); // Malloc counter to measure memory pressure for GC scheduling. It runs from @@ -734,4 +741,4 @@ class ZoneAllocPolicy } // namespace js -#endif // gc_Zone_h +#endif // gc_Zone_h \ No newline at end of file diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 8cee9ec09..1e8e4fc8d 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1524,20 +1524,11 @@ GCMarker::delayMarkingChildren(const void* thing) } inline void -ArenaLists::prepareForIncrementalGC(JSRuntime* rt) +ArenaLists::prepareForIncrementalGC() { - for (auto i : AllAllocKinds()) { - FreeSpan* span = freeLists[i]; - if (span != &placeholder) { - if (!span->isEmpty()) { - Arena* arena = span->getArena(); - arena->allocatedDuringIncremental = true; - rt->gc.marker.delayMarkingArena(arena); - } else { - freeLists[i] = &placeholder; - } - } - } + purge(); + for (auto i : AllAllocKinds()) + arenaLists[i].moveCursorToEnd(); } /* Compacting GC */ @@ -2251,7 +2242,7 @@ GCRuntime::updateTypeDescrObjects(MovingTracer* trc, Zone* zone) { zone->typeDescrObjects.sweep(); for (auto r = zone->typeDescrObjects.all(); !r.empty(); r.popFront()) - UpdateCellPointers(trc, r.front().get()); + UpdateCellPointers(trc, r.front()); } void @@ -3579,6 +3570,23 @@ RelazifyFunctions(Zone* zone, AllocKind kind) } } +static bool +ShouldCollectZone(Zone* zone, JS::gcreason::Reason reason) +{ + // Normally we collect all scheduled zones. + if (reason != JS::gcreason::COMPARTMENT_REVIVED) + return zone->isGCScheduled(); + + // If we are repeating a GC becuase we noticed dead compartments haven't + // been collected, then only collect zones contianing those compartments. + for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) { + if (comp->scheduledForDestruction) + return true; + } + + return false; +} + bool GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock) { @@ -3602,7 +3610,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces #endif /* Set up which zones will be collected. */ - if (zone->isGCScheduled()) { + if (ShouldCollectZone(zone, reason)) { if (!zone->isAtomsZone()) { any = true; zone->setGCState(Zone::Mark); @@ -3621,7 +3629,7 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next()) { c->marked = false; c->scheduledForDestruction = false; - c->maybeAlive = false; + c->maybeAlive = c->hasBeenEntered() || !c->zone()->isGCScheduled(); if (shouldPreserveJITCode(c, currentTime, reason, canAllocateMoreCode)) c->zone()->setPreservingCode(true); } @@ -3640,6 +3648,12 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces * keepAtoms() will only change on the main thread, which we are currently * on. If the value of keepAtoms() changes between GC slices, then we'll * cancel the incremental GC. See IsIncrementalGCSafe. + + + + + + */ if (isFull && !rt->keepAtoms()) { Zone* atomsZone = rt->atomsCompartment(lock)->zone(); @@ -3655,15 +3669,12 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces return false; /* - * At the end of each incremental slice, we call prepareForIncrementalGC, - * which marks objects in all arenas that we're currently allocating - * into. This can cause leaks if unreachable objects are in these - * arenas. This purge call ensures that we only mark arenas that have had - * allocations after the incremental GC started. + * Ensure that after the start of a collection we don't allocate into any + * existing arenas, as this can cause unreachable things to be marked. */ if (isIncremental) { for (GCZonesIter zone(rt); !zone.done(); zone.next()) - zone->arenas.purge(); + zone->arenas.prepareForIncrementalGC(); } MemProfiler::MarkTenuredStart(rt); @@ -3747,13 +3758,11 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces gcstats::AutoPhase ap2(stats, gcstats::PHASE_MARK_ROOTS); - if (isIncremental) { - gcstats::AutoPhase ap3(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS); - bufferGrayRoots(); - } - - markCompartments(); - + if (isIncremental) { + bufferGrayRoots(); + markCompartments(); + } + return true; } @@ -3766,9 +3775,14 @@ GCRuntime::markCompartments() * This code ensures that if a compartment is "dead", then it will be * collected in this GC. A compartment is considered dead if its maybeAlive * flag is false. The maybeAlive flag is set if: - * (1) the compartment has incoming cross-compartment edges, or - * (2) an object in the compartment was marked during root marking, either - * as a black root or a gray root. + * (1) the compartment has been entered (set in beginMarkPhase() above) + * (2) the compartment is not being collected (set in beginMarkPhase() + * above) + * (3) an object in the compartment was marked during root marking, either + * as a black root or a gray root (set in RootMarking.cpp), or + * (4) the compartment has incoming cross-compartment edges from another + * compartment that has maybeAlive set (set by this method). + * * If the maybeAlive is false, then we set the scheduledForDestruction flag. * At the end of the GC, we look for compartments where * scheduledForDestruction is true. These are compartments that were somehow @@ -3786,26 +3800,37 @@ GCRuntime::markCompartments() * allocation and read barriers during JS_TransplantObject and the like. */ - /* Set the maybeAlive flag based on cross-compartment edges. */ - for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { - for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) { + /* Propagate the maybeAlive flag via cross-compartment edges. */ + + Vector workList; + + for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) { + if (comp->maybeAlive) { + if (!workList.append(comp)) + return; + } + } + while (!workList.empty()) { + JSCompartment* comp = workList.popCopy(); + for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) { if (e.front().key().is()) continue; JSCompartment* dest = e.front().mutableKey().compartment(); - if (dest) + if (dest && !dest->maybeAlive) { dest->maybeAlive = true; + if (!workList.append(dest)) + return; + } } } - /* - * For black roots, code in gc/Marking.cpp will already have set maybeAlive - * during MarkRuntime. - */ - - /* Propogate maybeAlive to scheduleForDestruction. */ - for (GCCompartmentsIter c(rt); !c.done(); c.next()) { - if (!c->maybeAlive && !rt->isAtomsCompartment(c)) - c->scheduledForDestruction = true; + + /* Set scheduleForDestruction based on maybeAlive. */ + + for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) { + MOZ_ASSERT(!comp->scheduledForDestruction); + if (!comp->maybeAlive && !rt->isAtomsCompartment(comp)) + comp->scheduledForDestruction = true; } } @@ -5306,7 +5331,7 @@ AutoGCSlice::~AutoGCSlice() for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) { if (zone->isGCMarking()) { zone->setNeedsIncrementalBarrier(true, Zone::UpdateJit); - zone->arenas.prepareForIncrementalGC(runtime); + zone->arenas.purge(); } else { zone->setNeedsIncrementalBarrier(false, Zone::UpdateJit); } @@ -5487,9 +5512,9 @@ gc::AbortReason gc::IsIncrementalGCUnsafe(JSRuntime* rt) { MOZ_ASSERT(!rt->mainThread.suppressGC); - - if (rt->keepAtoms()) - return gc::AbortReason::KeepAtomsSet; + + if (rt->keepAtoms()) + return gc::AbortReason::KeepAtomsSet; if (!rt->gc.isIncrementalGCAllowed()) return gc::AbortReason::IncrementalDisabled; @@ -5498,9 +5523,17 @@ gc::IsIncrementalGCUnsafe(JSRuntime* rt) } void -GCRuntime::budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& lock) +GCRuntime::budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget, + AutoLockForExclusiveAccess& lock) { AbortReason unsafeReason = IsIncrementalGCUnsafe(rt); + if (unsafeReason == AbortReason::None) { + if (reason == JS::gcreason::COMPARTMENT_REVIVED) + unsafeReason = gc::AbortReason::CompartmentRevived; + else if (mode != JSGC_MODE_INCREMENTAL) + unsafeReason = gc::AbortReason::ModeChange; + } + if (unsafeReason != AbortReason::None) { resetIncrementalGC(unsafeReason, lock); budget.makeUnlimited(); @@ -5508,12 +5541,7 @@ GCRuntime::budgetIncrementalGC(SliceBudget& budget, AutoLockForExclusiveAccess& return; } - if (mode != JSGC_MODE_INCREMENTAL) { - resetIncrementalGC(AbortReason::ModeChange, lock); - budget.makeUnlimited(); - stats.nonincremental(AbortReason::ModeChange); - return; - } + if (isTooMuchMalloc()) { budget.makeUnlimited(); @@ -5660,6 +5688,10 @@ GCRuntime::gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason:: } State prevState = incrementalState; + + + + if (nonincrementalByAPI) { // Reset any in progress incremental GC if this was triggered via the @@ -5672,7 +5704,7 @@ GCRuntime::gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason:: stats.nonincremental(gc::AbortReason::NonIncrementalRequested); budget.makeUnlimited(); } else { - budgetIncrementalGC(budget, session.lock); + budgetIncrementalGC(reason, budget, session.lock); } /* The GC was reset, so we need a do-over. */ @@ -5764,6 +5796,22 @@ GCRuntime::checkIfGCAllowedInCurrentState(JS::gcreason::Reason reason) return true; } +bool +GCRuntime::shouldRepeatForDeadZone(JS::gcreason::Reason reason) +{ + MOZ_ASSERT_IF(reason == JS::gcreason::COMPARTMENT_REVIVED, !isIncremental); + + if (!isIncremental || isIncrementalGCInProgress()) + return false; + + for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { + if (c->scheduledForDestruction) + return true; + } + + return false; +} + void GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) { @@ -5782,27 +5830,23 @@ GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::R do { poked = false; bool wasReset = gcCycle(nonincrementalByAPI, budget, reason); - - /* Need to re-schedule all zones for GC. */ - if (poked && cleanUpEverything) + + bool repeatForDeadZone = false; + if (poked && cleanUpEverything) { + /* Need to re-schedule all zones for GC. */ JS::PrepareForFullGC(rt->contextFromMainThread()); - /* - * This code makes an extra effort to collect compartments that we - * thought were dead at the start of the GC. See the large comment in - * beginMarkPhase. - */ - bool repeatForDeadZone = false; - if (!nonincrementalByAPI && !isIncrementalGCInProgress()) { - for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { - if (c->scheduledForDestruction) { - nonincrementalByAPI = true; - repeatForDeadZone = true; - reason = JS::gcreason::COMPARTMENT_REVIVED; - c->zone()->scheduleGC(); - } - } - } + + } else if (shouldRepeatForDeadZone(reason) && !wasReset) { + /* + * This code makes an extra effort to collect compartments that we + * thought were dead at the start of the GC. See the large comment + * in beginMarkPhase. + */ + repeatForDeadZone = true; + reason = JS::gcreason::COMPARTMENT_REVIVED; + } + /* * If we reset an existing GC, we need to start a new one. Also, we @@ -7070,4 +7114,4 @@ js::gc::detail::CellIsMarkedGrayIfKnown(const Cell* cell) } return detail::CellIsMarkedGray(tc); -} +} \ No newline at end of file diff --git a/js/src/jsgc.h b/js/src/jsgc.h index d3cf31fe7..aa42d474c 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -61,7 +61,8 @@ enum class State { D(ModeChange) \ D(MallocBytesTrigger) \ D(GCBytesTrigger) \ - D(ZoneChange) + D(ZoneChange) \ + D(CompartmentRevived) enum class AbortReason { #define MAKE_REASON(name) name, GC_ABORT_REASONS(MAKE_REASON) @@ -353,7 +354,6 @@ struct SortedArenaListSegment * be treated as an invariant, however, as the free lists may be cleared, * leaving arenas previously used for allocation partially full. Sorting order * is restored during sweeping. - * Arenas following the cursor should not be full. */ class ArenaList { @@ -453,6 +453,11 @@ class ArenaList { check(); return !*cursorp_; } + + void moveCursorToEnd() { + while (!isCursorAtEnd()) + cursorp_ = &(*cursorp_)->next; + } // This can return nullptr. Arena* arenaAfterCursor() const { @@ -739,7 +744,7 @@ class ArenaLists freeLists[i] = &placeholder; } - inline void prepareForIncrementalGC(JSRuntime* rt); + inline void prepareForIncrementalGC(); /* Check if this arena is in use. */ bool arenaIsInUse(Arena* arena, AllocKind kind) const { @@ -1504,4 +1509,4 @@ UninlinedIsInsideNursery(const gc::Cell* cell); } /* namespace js */ -#endif /* jsgc_h */ +#endif /* jsgc_h */ \ No newline at end of file diff --git a/js/src/jswatchpoint.cpp b/js/src/jswatchpoint.cpp index 3cf43e219..68afa4a59 100644 --- a/js/src/jswatchpoint.cpp +++ b/js/src/jswatchpoint.cpp @@ -185,17 +185,18 @@ WatchpointMap::markAll(JSTracer* trc) { for (Map::Enum e(map); !e.empty(); e.popFront()) { Map::Entry& entry = e.front(); - WatchKey key = entry.key(); - WatchKey prior = key; - MOZ_ASSERT(JSID_IS_STRING(prior.id) || JSID_IS_INT(prior.id) || JSID_IS_SYMBOL(prior.id)); + JSObject* object = entry.key().object; + jsid id = entry.key().id; + JSObject* priorObject = object; + jsid priorId = id; + MOZ_ASSERT(JSID_IS_STRING(priorId) || JSID_IS_INT(priorId) || JSID_IS_SYMBOL(priorId)); - TraceEdge(trc, const_cast(&key.object), - "held Watchpoint object"); - TraceEdge(trc, const_cast(&key.id), "WatchKey::id"); + TraceManuallyBarrieredEdge(trc, &object, "held Watchpoint object"); + TraceManuallyBarrieredEdge(trc, &id, "WatchKey::id"); TraceEdge(trc, &entry.value().closure, "Watchpoint::closure"); - if (prior.object != key.object || prior.id != key.id) - e.rekeyFront(key); + if (priorObject != object || priorId != id) + e.rekeyFront(WatchKey(object, id)); } } @@ -242,4 +243,4 @@ WatchpointMap::trace(WeakMapTracer* trc) JS::GCCellPtr(entry.key().object.get()), JS::GCCellPtr(entry.value().closure.get())); } -} +} \ No newline at end of file