Tab to spaces newline and other such in js code

master
Fedor 2019-07-08 13:07:47 +03:00
parent ca72ef1006
commit 7a74bbaf96
11 changed files with 117 additions and 123 deletions

View File

@ -64,11 +64,11 @@ IdToObjectMap::find(ObjectId id)
JSObject*
IdToObjectMap::findPreserveColor(ObjectId id)
{
Table::Ptr p = table_.lookup(id);
if (!p)
return nullptr;
return p->value().unbarrieredGet();
}
Table::Ptr p = table_.lookup(id);
if (!p)
return nullptr;
return p->value().unbarrieredGet();
}
bool
IdToObjectMap::add(ObjectId id, JSObject* obj)
@ -766,4 +766,4 @@ CPOWManager*
mozilla::jsipc::CPOWManagerFor(PJavaScriptChild* aChild)
{
return static_cast<JavaScriptChild*>(aChild);
}
}

View File

@ -96,7 +96,7 @@ class IdToObjectMap
bool add(ObjectId id, JSObject* obj);
JSObject* find(ObjectId id);
JSObject* findPreserveColor(ObjectId id);
JSObject* findPreserveColor(ObjectId id);
void remove(ObjectId id);
void clear();
@ -234,4 +234,4 @@ class JavaScriptShared : public CPOWManager
} // namespace jsipc
} // namespace mozilla
#endif
#endif

View File

@ -795,4 +795,4 @@ WrapperAnswer::RecvDropObject(const ObjectId& objId)
objects_.remove(objId);
}
return true;
}
}

View File

@ -3003,4 +3003,4 @@ TypeDescr::finalize(FreeOp* fop, JSObject* obj)
TypeDescr& descr = obj->as<TypeDescr>();
if (descr.hasTraceList())
js_free(const_cast<int32_t*>(descr.traceList()));
}
}

View File

@ -901,7 +901,7 @@ class GCRuntime
void requestMajorGC(JS::gcreason::Reason reason);
SliceBudget defaultBudget(JS::gcreason::Reason reason, int64_t millis);
void budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget,
AutoLockForExclusiveAccess& lock);
AutoLockForExclusiveAccess& lock);
void resetIncrementalGC(AbortReason reason, AutoLockForExclusiveAccess& lock);
// Assert if the system state is such that we should never
@ -916,7 +916,7 @@ class GCRuntime
void collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::Reason reason) JS_HAZ_GC_CALL;
MOZ_MUST_USE bool gcCycle(bool nonincrementalByAPI, SliceBudget& budget,
JS::gcreason::Reason reason);
bool shouldRepeatForDeadZone(JS::gcreason::Reason reason);
bool shouldRepeatForDeadZone(JS::gcreason::Reason reason);
void incrementalCollectSlice(SliceBudget& budget, JS::gcreason::Reason reason,
AutoLockForExclusiveAccess& lock);
@ -1350,4 +1350,4 @@ class MOZ_RAII AutoMaybeStartBackgroundAllocation
} /* namespace js */
#endif
#endif

View File

@ -478,7 +478,7 @@ js::gc::GCRuntime::bufferGrayRoots()
for (GCZonesIter zone(rt); !zone.done(); zone.next())
MOZ_ASSERT(zone->gcGrayRoots.empty());
gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS);
gcstats::AutoPhase ap(stats, gcstats::PHASE_BUFFER_GRAY_ROOTS);
BufferGrayRootsTracer grayBufferer(rt);
if (JSTraceDataOp op = grayRootTracer.op)
@ -540,4 +540,4 @@ GCRuntime::resetBufferedGrayRoots() const
"Do not clear the gray buffers unless we are Failed or becoming Unused");
for (GCZonesIter zone(rt); !zone.done(); zone.next())
zone->gcGrayRoots.clearAndFree();
}
}

View File

@ -373,18 +373,18 @@ Zone::fixupAfterMovingGC()
bool
Zone::addTypeDescrObject(JSContext* cx, HandleObject obj)
{
// Type descriptor objects are always tenured so we don't need post barriers
// on the set.
MOZ_ASSERT(!IsInsideNursery(obj));
if (!typeDescrObjects.put(obj)) {
ReportOutOfMemory(cx);
return false;
}
return true;
// Type descriptor objects are always tenured so we don't need post barriers
// on the set.
MOZ_ASSERT(!IsInsideNursery(obj));
if (!typeDescrObjects.put(obj)) {
ReportOutOfMemory(cx);
return false;
}
return true;
}
ZoneList::ZoneList()
: head(nullptr), tail(nullptr)
{}
@ -483,4 +483,4 @@ JS_PUBLIC_API(void)
JS::shadow::RegisterWeakCache(JS::Zone* zone, WeakCache<void*>* cachep)
{
zone->registerWeakCache(cachep);
}
}

View File

@ -350,16 +350,16 @@ struct Zone : public JS::shadow::Zone,
// This is used by the GC to trace them all first when compacting, since the
// TypedObject trace hook may access these objects.
//
// There are no barriers here - the set contains only tenured objects so no
// post-barrier is required, and these are weak references so no pre-barrier
// is required.
using TypeDescrObjectSet = js::GCHashSet<JSObject*,
//
// There are no barriers here - the set contains only tenured objects so no
// post-barrier is required, and these are weak references so no pre-barrier
// is required.
using TypeDescrObjectSet = js::GCHashSet<JSObject*,
js::MovableCellHasher<JSObject*>,
js::SystemAllocPolicy>;
JS::WeakCache<TypeDescrObjectSet> typeDescrObjects;
bool addTypeDescrObject(JSContext* cx, HandleObject obj);
bool addTypeDescrObject(JSContext* cx, HandleObject obj);
// Malloc counter to measure memory pressure for GC scheduling. It runs from
@ -741,4 +741,4 @@ class ZoneAllocPolicy
} // namespace js
#endif // gc_Zone_h
#endif // gc_Zone_h

View File

@ -1527,8 +1527,9 @@ inline void
ArenaLists::prepareForIncrementalGC()
{
purge();
for (auto i : AllAllocKinds())
for (auto i : AllAllocKinds()) {
arenaLists[i].moveCursorToEnd();
}
}
/* Compacting GC */
@ -3573,18 +3574,18 @@ RelazifyFunctions(Zone* zone, AllocKind kind)
static bool
ShouldCollectZone(Zone* zone, JS::gcreason::Reason reason)
{
// Normally we collect all scheduled zones.
if (reason != JS::gcreason::COMPARTMENT_REVIVED)
return zone->isGCScheduled();
// Normally we collect all scheduled zones.
if (reason != JS::gcreason::COMPARTMENT_REVIVED)
return zone->isGCScheduled();
// If we are repeating a GC becuase we noticed dead compartments haven't
// been collected, then only collect zones contianing those compartments.
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
if (comp->scheduledForDestruction)
return true;
}
return false;
// If we are repeating a GC because we noticed dead compartments haven't
// been collected, then only collect zones containing those compartments.
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
if (comp->scheduledForDestruction)
return true;
}
return false;
}
bool
@ -3648,13 +3649,8 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
* keepAtoms() will only change on the main thread, which we are currently
* on. If the value of keepAtoms() changes between GC slices, then we'll
* cancel the incremental GC. See IsIncrementalGCSafe.
*/
if (isFull && !rt->keepAtoms()) {
Zone* atomsZone = rt->atomsCompartment(lock)->zone();
if (atomsZone->isGCScheduled()) {
@ -3758,11 +3754,11 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
gcstats::AutoPhase ap2(stats, gcstats::PHASE_MARK_ROOTS);
if (isIncremental) {
bufferGrayRoots();
markCompartments();
}
if (isIncremental) {
bufferGrayRoots();
markCompartments();
}
return true;
}
@ -3778,11 +3774,11 @@ GCRuntime::markCompartments()
* (1) the compartment has been entered (set in beginMarkPhase() above)
* (2) the compartment is not being collected (set in beginMarkPhase()
* above)
* (3) an object in the compartment was marked during root marking, either
* as a black root or a gray root (set in RootMarking.cpp), or
* (3) an object in the compartment was marked during root marking, either
* as a black root or a gray root (set in RootMarking.cpp), or
* (4) the compartment has incoming cross-compartment edges from another
* compartment that has maybeAlive set (set by this method).
*
* compartment that has maybeAlive set (set by this method).
*
* If the maybeAlive is false, then we set the scheduledForDestruction flag.
* At the end of the GC, we look for compartments where
* scheduledForDestruction is true. These are compartments that were somehow
@ -3802,35 +3798,35 @@ GCRuntime::markCompartments()
/* Propagate the maybeAlive flag via cross-compartment edges. */
Vector<JSCompartment*, 0, js::SystemAllocPolicy> workList;
for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) {
if (comp->maybeAlive) {
if (!workList.append(comp))
return;
}
}
while (!workList.empty()) {
JSCompartment* comp = workList.popCopy();
for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
Vector<JSCompartment*, 0, js::SystemAllocPolicy> workList;
for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) {
if (comp->maybeAlive) {
if (!workList.append(comp))
return;
}
}
while (!workList.empty()) {
JSCompartment* comp = workList.popCopy();
for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
if (e.front().key().is<JSString*>())
continue;
JSCompartment* dest = e.front().mutableKey().compartment();
if (dest && !dest->maybeAlive) {
dest->maybeAlive = true;
if (!workList.append(dest))
return;
}
if (!workList.append(dest))
return;
}
}
}
/* Set scheduleForDestruction based on maybeAlive. */
for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) {
MOZ_ASSERT(!comp->scheduledForDestruction);
if (!comp->maybeAlive && !rt->isAtomsCompartment(comp))
comp->scheduledForDestruction = true;
for (GCCompartmentsIter comp(rt); !comp.done(); comp.next()) {
MOZ_ASSERT(!comp->scheduledForDestruction);
if (!comp->maybeAlive && !rt->isAtomsCompartment(comp))
comp->scheduledForDestruction = true;
}
}
@ -5513,8 +5509,8 @@ gc::IsIncrementalGCUnsafe(JSRuntime* rt)
{
MOZ_ASSERT(!rt->mainThread.suppressGC);
if (rt->keepAtoms())
return gc::AbortReason::KeepAtomsSet;
if (rt->keepAtoms())
return gc::AbortReason::KeepAtomsSet;
if (!rt->gc.isIncrementalGCAllowed())
return gc::AbortReason::IncrementalDisabled;
@ -5524,15 +5520,15 @@ gc::IsIncrementalGCUnsafe(JSRuntime* rt)
void
GCRuntime::budgetIncrementalGC(JS::gcreason::Reason reason, SliceBudget& budget,
AutoLockForExclusiveAccess& lock)
AutoLockForExclusiveAccess& lock)
{
AbortReason unsafeReason = IsIncrementalGCUnsafe(rt);
if (unsafeReason == AbortReason::None) {
if (reason == JS::gcreason::COMPARTMENT_REVIVED)
unsafeReason = gc::AbortReason::CompartmentRevived;
else if (mode != JSGC_MODE_INCREMENTAL)
unsafeReason = gc::AbortReason::ModeChange;
}
if (unsafeReason == AbortReason::None) {
if (reason == JS::gcreason::COMPARTMENT_REVIVED)
unsafeReason = gc::AbortReason::CompartmentRevived;
else if (mode != JSGC_MODE_INCREMENTAL)
unsafeReason = gc::AbortReason::ModeChange;
}
if (unsafeReason != AbortReason::None) {
resetIncrementalGC(unsafeReason, lock);
@ -5688,10 +5684,6 @@ GCRuntime::gcCycle(bool nonincrementalByAPI, SliceBudget& budget, JS::gcreason::
}
State prevState = incrementalState;
if (nonincrementalByAPI) {
// Reset any in progress incremental GC if this was triggered via the
@ -5799,17 +5791,17 @@ GCRuntime::checkIfGCAllowedInCurrentState(JS::gcreason::Reason reason)
bool
GCRuntime::shouldRepeatForDeadZone(JS::gcreason::Reason reason)
{
MOZ_ASSERT_IF(reason == JS::gcreason::COMPARTMENT_REVIVED, !isIncremental);
if (!isIncremental || isIncrementalGCInProgress())
return false;
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
if (c->scheduledForDestruction)
return true;
}
return false;
MOZ_ASSERT_IF(reason == JS::gcreason::COMPARTMENT_REVIVED, !isIncremental);
if (!isIncremental || isIncrementalGCInProgress())
return false;
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
if (c->scheduledForDestruction)
return true;
}
return false;
}
void
@ -5831,21 +5823,21 @@ GCRuntime::collect(bool nonincrementalByAPI, SliceBudget budget, JS::gcreason::R
poked = false;
bool wasReset = gcCycle(nonincrementalByAPI, budget, reason);
bool repeatForDeadZone = false;
bool repeatForDeadZone = false;
if (poked && cleanUpEverything) {
/* Need to re-schedule all zones for GC. */
/* Need to re-schedule all zones for GC. */
JS::PrepareForFullGC(rt->contextFromMainThread());
} else if (shouldRepeatForDeadZone(reason) && !wasReset) {
/*
* This code makes an extra effort to collect compartments that we
* thought were dead at the start of the GC. See the large comment
* in beginMarkPhase.
*/
repeatForDeadZone = true;
reason = JS::gcreason::COMPARTMENT_REVIVED;
}
/*
* This code makes an extra effort to collect compartments that we
* thought were dead at the start of the GC. See the large comment
* in beginMarkPhase.
*/
repeatForDeadZone = true;
reason = JS::gcreason::COMPARTMENT_REVIVED;
}
/*
@ -7114,4 +7106,4 @@ js::gc::detail::CellIsMarkedGrayIfKnown(const Cell* cell)
}
return detail::CellIsMarkedGray(tc);
}
}

View File

@ -62,7 +62,7 @@ enum class State {
D(MallocBytesTrigger) \
D(GCBytesTrigger) \
D(ZoneChange) \
D(CompartmentRevived)
D(CompartmentRevived)
enum class AbortReason {
#define MAKE_REASON(name) name,
GC_ABORT_REASONS(MAKE_REASON)
@ -354,6 +354,7 @@ struct SortedArenaListSegment
* be treated as an invariant, however, as the free lists may be cleared,
* leaving arenas previously used for allocation partially full. Sorting order
* is restored during sweeping.
* Arenas following the cursor should not be full.
*/
class ArenaList {
@ -454,10 +455,11 @@ class ArenaList {
return !*cursorp_;
}
void moveCursorToEnd() {
while (!isCursorAtEnd())
cursorp_ = &(*cursorp_)->next;
}
void moveCursorToEnd() {
while (!isCursorAtEnd()) {
cursorp_ = &(*cursorp_)->next;
}
}
// This can return nullptr.
Arena* arenaAfterCursor() const {
@ -1509,4 +1511,4 @@ UninlinedIsInsideNursery(const gc::Cell* cell);
} /* namespace js */
#endif /* jsgc_h */
#endif /* jsgc_h */

View File

@ -187,8 +187,8 @@ WatchpointMap::markAll(JSTracer* trc)
Map::Entry& entry = e.front();
JSObject* object = entry.key().object;
jsid id = entry.key().id;
JSObject* priorObject = object;
jsid priorId = id;
JSObject* priorObject = object;
jsid priorId = id;
MOZ_ASSERT(JSID_IS_STRING(priorId) || JSID_IS_INT(priorId) || JSID_IS_SYMBOL(priorId));
TraceManuallyBarrieredEdge(trc, &object, "held Watchpoint object");
@ -243,4 +243,4 @@ WatchpointMap::trace(WeakMapTracer* trc)
JS::GCCellPtr(entry.key().object.get()),
JS::GCCellPtr(entry.value().closure.get()));
}
}
}