From 7a74bbaf96535b0e8695153af0b910610843f7f8 Mon Sep 17 00:00:00 2001 From: Fedor Date: Mon, 8 Jul 2019 13:07:47 +0300 Subject: [PATCH] Tab to spaces newline and other such in js code --- js/ipc/JavaScriptShared.cpp | 12 +-- js/ipc/JavaScriptShared.h | 4 +- js/ipc/WrapperAnswer.cpp | 2 +- js/src/builtin/TypedObject.cpp | 2 +- js/src/gc/GCRuntime.h | 6 +- js/src/gc/RootMarking.cpp | 4 +- js/src/gc/Zone.cpp | 24 +++--- js/src/gc/Zone.h | 14 +-- js/src/jsgc.cpp | 152 ++++++++++++++++----------------- js/src/jsgc.h | 14 +-- js/src/jswatchpoint.cpp | 6 +- 11 files changed, 117 insertions(+), 123 deletions(-) diff --git a/js/ipc/JavaScriptShared.cpp b/js/ipc/JavaScriptShared.cpp index aba02252d..961a3f910 100644 --- a/js/ipc/JavaScriptShared.cpp +++ b/js/ipc/JavaScriptShared.cpp @@ -64,11 +64,11 @@ IdToObjectMap::find(ObjectId id) JSObject* IdToObjectMap::findPreserveColor(ObjectId id) { - Table::Ptr p = table_.lookup(id); - if (!p) - return nullptr; - return p->value().unbarrieredGet(); - } + Table::Ptr p = table_.lookup(id); + if (!p) + return nullptr; + return p->value().unbarrieredGet(); +} bool IdToObjectMap::add(ObjectId id, JSObject* obj) @@ -766,4 +766,4 @@ CPOWManager* mozilla::jsipc::CPOWManagerFor(PJavaScriptChild* aChild) { return static_cast(aChild); -} \ No newline at end of file +} diff --git a/js/ipc/JavaScriptShared.h b/js/ipc/JavaScriptShared.h index d0cd4615b..5ecec7429 100644 --- a/js/ipc/JavaScriptShared.h +++ b/js/ipc/JavaScriptShared.h @@ -96,7 +96,7 @@ class IdToObjectMap bool add(ObjectId id, JSObject* obj); JSObject* find(ObjectId id); - JSObject* findPreserveColor(ObjectId id); + JSObject* findPreserveColor(ObjectId id); void remove(ObjectId id); void clear(); @@ -234,4 +234,4 @@ class JavaScriptShared : public CPOWManager } // namespace jsipc } // namespace mozilla -#endif \ No newline at end of file +#endif diff --git a/js/ipc/WrapperAnswer.cpp b/js/ipc/WrapperAnswer.cpp index 6ae68e01f..563f8f90d 100644 --- a/js/ipc/WrapperAnswer.cpp +++ b/js/ipc/WrapperAnswer.cpp @@ -795,4 +795,4 @@ WrapperAnswer::RecvDropObject(const ObjectId& objId) objects_.remove(objId); } return true; -} \ No newline at end of file +} diff --git a/js/src/builtin/TypedObject.cpp b/js/src/builtin/TypedObject.cpp index 95704ee46..0dfc1123a 100644 --- a/js/src/builtin/TypedObject.cpp +++ b/js/src/builtin/TypedObject.cpp @@ -3003,4 +3003,4 @@ TypeDescr::finalize(FreeOp* fop, JSObject* obj) TypeDescr& descr = obj->as(); if (descr.hasTraceList()) js_free(const_cast(descr.traceList())); -} \ No newline at end of file +} diff --git a/js/src/gc/GCRuntime.h b/js/src/gc/GCRuntime.h index 16260a4e3..f102e9ef0 100644 --- a/js/src/gc/GCRuntime.h +++ b/js/src/gc/GCRuntime.h @@ -901,7 +901,7 @@ class GCRuntime void requestMajorGC(JS::gcreason::Reason reason); SliceBudget defaultBudget(JS::gcreason::Reason reason, int64_t millis); void budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget, - AutoLockForExclusiveAccess& lock); + AutoLockForExclusiveAccess& lock); void resetIncrementalGC(AbortReason reason, AutoLockForExclusiveAccess& lock); // Assert if the system state is such that we should never @@ -916,7 +916,7 @@ class GCRuntime void collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) JS_HAZ_GC_CALL; MOZ_MUST_USE bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason::Reason reason); - bool shouldRepeatForDeadZone(JS::gcreason::Reason reason); + bool shouldRepeatForDeadZone(JS::gcreason::Reason reason); void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason, AutoLockForExclusiveAccess& lock); @@ -1350,4 +1350,4 @@ class MOZ_RAII AutoMaybeStartBackgroundAllocation } /* namespace js */ -#endif \ No newline at end of file +#endif diff --git a/js/src/gc/RootMarking.cpp b/js/src/gc/RootMarking.cpp index ed7b8fb6f..f5969bc1f 100644 --- a/js/src/gc/RootMarking.cpp +++ b/js/src/gc/RootMarking.cpp @@ -478,7 +478,7 @@ js::gc::GCRuntime::bufferGrayRoots() for (GCZonesIter zone(rt); !zone.done(); zone.next()) MOZ_ASSERT(zone->gcGrayRoots.empty()); - gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS); + gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS); BufferGrayRootsTracer grayBufferer(rt); if (JSTraceDataOp op = grayRootTracer.op) @@ -540,4 +540,4 @@ GCRuntime::resetBufferedGrayRoots() const "Do not clear the gray buffers unless we are Failed or becoming Unused"); for (GCZonesIter zone(rt); !zone.done(); zone.next()) zone->gcGrayRoots.clearAndFree(); -} \ No newline at end of file +} diff --git a/js/src/gc/Zone.cpp b/js/src/gc/Zone.cpp index ecfb9a38c..f0cdde012 100644 --- a/js/src/gc/Zone.cpp +++ b/js/src/gc/Zone.cpp @@ -373,18 +373,18 @@ Zone::fixupAfterMovingGC() bool Zone::addTypeDescrObject(JSContext* cx, HandleObject obj) { - // Type descriptor objects are always tenured so we don't need post barriers - // on the set. - MOZ_ASSERT(!IsInsideNursery(obj)); - - if (!typeDescrObjects.put(obj)) { - ReportOutOfMemory(cx); - return false; - } - - return true; + // Type descriptor objects are always tenured so we don't need post barriers + // on the set. + MOZ_ASSERT(!IsInsideNursery(obj)); + + if (!typeDescrObjects.put(obj)) { + ReportOutOfMemory(cx); + return false; + } + + return true; } - + ZoneList::ZoneList() : head(nullptr), tail(nullptr) {} @@ -483,4 +483,4 @@ JS_PUBLIC_API(void) JS::shadow::RegisterWeakCache(JS::Zone* zone, WeakCache* cachep) { zone->registerWeakCache(cachep); -} \ No newline at end of file +} diff --git a/js/src/gc/Zone.h b/js/src/gc/Zone.h index 24f4648f7..c8520ed55 100644 --- a/js/src/gc/Zone.h +++ b/js/src/gc/Zone.h @@ -350,16 +350,16 @@ struct Zone : public JS::shadow::Zone, // This is used by the GC to trace them all first when compacting, since the // TypedObject trace hook may access these objects. - // - // There are no barriers here - the set contains only tenured objects so no - // post-barrier is required, and these are weak references so no pre-barrier - // is required. - using TypeDescrObjectSet = js::GCHashSet, js::SystemAllocPolicy>; JS::WeakCache typeDescrObjects; - bool addTypeDescrObject(JSContext* cx, HandleObject obj); + bool addTypeDescrObject(JSContext* cx, HandleObject obj); // Malloc counter to measure memory pressure for GC scheduling. It runs from @@ -741,4 +741,4 @@ class ZoneAllocPolicy } // namespace js -#endif // gc_Zone_h \ No newline at end of file +#endif // gc_Zone_h diff --git a/js/src/jsgc.cpp b/js/src/jsgc.cpp index 1e8e4fc8d..194468c5d 100644 --- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -1527,8 +1527,9 @@ inline void ArenaLists::prepareForIncrementalGC() { purge(); - for (auto i : AllAllocKinds()) + for (auto i : AllAllocKinds()) { arenaLists[i].moveCursorToEnd(); + } } /* Compacting GC */ @@ -3573,18 +3574,18 @@ RelazifyFunctions(Zone* zone, AllocKind kind) static bool ShouldCollectZone(Zone* zone, JS::gcreason::Reason reason) { - // Normally we collect all scheduled zones. - if (reason != JS::gcreason::COMPARTMENT_REVIVED) - return zone->isGCScheduled(); + // Normally we collect all scheduled zones. + if (reason != JS::gcreason::COMPARTMENT_REVIVED) + return zone->isGCScheduled(); - // If we are repeating a GC becuase we noticed dead compartments haven't - // been collected, then only collect zones contianing those compartments. - for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) { - if (comp->scheduledForDestruction) - return true; - } - - return false; + // If we are repeating a GC because we noticed dead compartments haven't + // been collected, then only collect zones containing those compartments. + for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) { + if (comp->scheduledForDestruction) + return true; + } + + return false; } bool @@ -3648,13 +3649,8 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces * keepAtoms() will only change on the main thread, which we are currently * on. If the value of keepAtoms() changes between GC slices, then we'll * cancel the incremental GC. See IsIncrementalGCSafe. - - - - - - */ + if (isFull && !rt->keepAtoms()) { Zone* atomsZone = rt->atomsCompartment(lock)->zone(); if (atomsZone->isGCScheduled()) { @@ -3758,11 +3754,11 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces gcstats::AutoPhase ap2(stats, gcstats::PHASE_MARK_ROOTS); - if (isIncremental) { - bufferGrayRoots(); - markCompartments(); - } - + if (isIncremental) { + bufferGrayRoots(); + markCompartments(); + } + return true; } @@ -3778,11 +3774,11 @@ GCRuntime::markCompartments() * (1) the compartment has been entered (set in beginMarkPhase() above) * (2) the compartment is not being collected (set in beginMarkPhase() * above) - * (3) an object in the compartment was marked during root marking, either - * as a black root or a gray root (set in RootMarking.cpp), or + * (3) an object in the compartment was marked during root marking, either + * as a black root or a gray root (set in RootMarking.cpp), or * (4) the compartment has incoming cross-compartment edges from another - * compartment that has maybeAlive set (set by this method). - * + * compartment that has maybeAlive set (set by this method). + * * If the maybeAlive is false, then we set the scheduledForDestruction flag. * At the end of the GC, we look for compartments where * scheduledForDestruction is true. These are compartments that were somehow @@ -3802,35 +3798,35 @@ GCRuntime::markCompartments() /* Propagate the maybeAlive flag via cross-compartment edges. */ - Vector workList; - - for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) { - if (comp->maybeAlive) { - if (!workList.append(comp)) - return; - } - } - while (!workList.empty()) { - JSCompartment* comp = workList.popCopy(); - for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) { + Vector workList; + + for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) { + if (comp->maybeAlive) { + if (!workList.append(comp)) + return; + } + } + while (!workList.empty()) { + JSCompartment* comp = workList.popCopy(); + for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) { if (e.front().key().is()) continue; JSCompartment* dest = e.front().mutableKey().compartment(); if (dest && !dest->maybeAlive) { dest->maybeAlive = true; - if (!workList.append(dest)) - return; - } + if (!workList.append(dest)) + return; + } } } /* Set scheduleForDestruction based on maybeAlive. */ - for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) { - MOZ_ASSERT(!comp->scheduledForDestruction); - if (!comp->maybeAlive && !rt->isAtomsCompartment(comp)) - comp->scheduledForDestruction = true; + for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) { + MOZ_ASSERT(!comp->scheduledForDestruction); + if (!comp->maybeAlive && !rt->isAtomsCompartment(comp)) + comp->scheduledForDestruction = true; } } @@ -5513,8 +5509,8 @@ gc::IsIncrementalGCUnsafe(JSRuntime* rt) { MOZ_ASSERT(!rt->mainThread.suppressGC); - if (rt->keepAtoms()) - return gc::AbortReason::KeepAtomsSet; + if (rt->keepAtoms()) + return gc::AbortReason::KeepAtomsSet; if (!rt->gc.isIncrementalGCAllowed()) return gc::AbortReason::IncrementalDisabled; @@ -5524,15 +5520,15 @@ gc::IsIncrementalGCUnsafe(JSRuntime* rt) void GCRuntime::budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget, - AutoLockForExclusiveAccess& lock) + AutoLockForExclusiveAccess& lock) { AbortReason unsafeReason = IsIncrementalGCUnsafe(rt); - if (unsafeReason == AbortReason::None) { - if (reason == JS::gcreason::COMPARTMENT_REVIVED) - unsafeReason = gc::AbortReason::CompartmentRevived; - else if (mode != JSGC_MODE_INCREMENTAL) - unsafeReason = gc::AbortReason::ModeChange; - } + if (unsafeReason == AbortReason::None) { + if (reason == JS::gcreason::COMPARTMENT_REVIVED) + unsafeReason = gc::AbortReason::CompartmentRevived; + else if (mode != JSGC_MODE_INCREMENTAL) + unsafeReason = gc::AbortReason::ModeChange; + } if (unsafeReason != AbortReason::None) { resetIncrementalGC(unsafeReason, lock); @@ -5688,10 +5684,6 @@ GCRuntime::gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason:: } State prevState = incrementalState; - - - - if (nonincrementalByAPI) { // Reset any in progress incremental GC if this was triggered via the @@ -5799,17 +5791,17 @@ GCRuntime::checkIfGCAllowedInCurrentState(JS::gcreason::Reason reason) bool GCRuntime::shouldRepeatForDeadZone(JS::gcreason::Reason reason) { - MOZ_ASSERT_IF(reason == JS::gcreason::COMPARTMENT_REVIVED, !isIncremental); - - if (!isIncremental || isIncrementalGCInProgress()) - return false; - - for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { - if (c->scheduledForDestruction) - return true; - } - - return false; + MOZ_ASSERT_IF(reason == JS::gcreason::COMPARTMENT_REVIVED, !isIncremental); + + if (!isIncremental || isIncrementalGCInProgress()) + return false; + + for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { + if (c->scheduledForDestruction) + return true; + } + + return false; } void @@ -5831,21 +5823,21 @@ GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::R poked = false; bool wasReset = gcCycle(nonincrementalByAPI, budget, reason); - bool repeatForDeadZone = false; + bool repeatForDeadZone = false; if (poked && cleanUpEverything) { - /* Need to re-schedule all zones for GC. */ + /* Need to re-schedule all zones for GC. */ JS::PrepareForFullGC(rt->contextFromMainThread()); } else if (shouldRepeatForDeadZone(reason) && !wasReset) { - /* - * This code makes an extra effort to collect compartments that we - * thought were dead at the start of the GC. See the large comment - * in beginMarkPhase. - */ - repeatForDeadZone = true; - reason = JS::gcreason::COMPARTMENT_REVIVED; - } + /* + * This code makes an extra effort to collect compartments that we + * thought were dead at the start of the GC. See the large comment + * in beginMarkPhase. + */ + repeatForDeadZone = true; + reason = JS::gcreason::COMPARTMENT_REVIVED; + } /* @@ -7114,4 +7106,4 @@ js::gc::detail::CellIsMarkedGrayIfKnown(const Cell* cell) } return detail::CellIsMarkedGray(tc); -} \ No newline at end of file +} diff --git a/js/src/jsgc.h b/js/src/jsgc.h index aa42d474c..952fd6bae 100644 --- a/js/src/jsgc.h +++ b/js/src/jsgc.h @@ -62,7 +62,7 @@ enum class State { D(MallocBytesTrigger) \ D(GCBytesTrigger) \ D(ZoneChange) \ - D(CompartmentRevived) + D(CompartmentRevived) enum class AbortReason { #define MAKE_REASON(name) name, GC_ABORT_REASONS(MAKE_REASON) @@ -354,6 +354,7 @@ struct SortedArenaListSegment * be treated as an invariant, however, as the free lists may be cleared, * leaving arenas previously used for allocation partially full. Sorting order * is restored during sweeping. + * Arenas following the cursor should not be full. */ class ArenaList { @@ -454,10 +455,11 @@ class ArenaList { return !*cursorp_; } - void moveCursorToEnd() { - while (!isCursorAtEnd()) - cursorp_ = &(*cursorp_)->next; - } + void moveCursorToEnd() { + while (!isCursorAtEnd()) { + cursorp_ = &(*cursorp_)->next; + } + } // This can return nullptr. Arena* arenaAfterCursor() const { @@ -1509,4 +1511,4 @@ UninlinedIsInsideNursery(const gc::Cell* cell); } /* namespace js */ -#endif /* jsgc_h */ \ No newline at end of file +#endif /* jsgc_h */ diff --git a/js/src/jswatchpoint.cpp b/js/src/jswatchpoint.cpp index 68afa4a59..e37323555 100644 --- a/js/src/jswatchpoint.cpp +++ b/js/src/jswatchpoint.cpp @@ -187,8 +187,8 @@ WatchpointMap::markAll(JSTracer* trc) Map::Entry& entry = e.front(); JSObject* object = entry.key().object; jsid id = entry.key().id; - JSObject* priorObject = object; - jsid priorId = id; + JSObject* priorObject = object; + jsid priorId = id; MOZ_ASSERT(JSID_IS_STRING(priorId) || JSID_IS_INT(priorId) || JSID_IS_SYMBOL(priorId)); TraceManuallyBarrieredEdge(trc, &object, "held Watchpoint object"); @@ -243,4 +243,4 @@ WatchpointMap::trace(WeakMapTracer* trc) JS::GCCellPtr(entry.key().object.get()), JS::GCCellPtr(entry.value().closure.get())); } -} \ No newline at end of file +}