summaryrefslogtreecommitdiffstats
path: root/Source/JavaScriptCore/runtime/Heap.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'Source/JavaScriptCore/runtime/Heap.cpp')
-rw-r--r--Source/JavaScriptCore/runtime/Heap.cpp66
1 files changed, 37 insertions, 29 deletions
diff --git a/Source/JavaScriptCore/runtime/Heap.cpp b/Source/JavaScriptCore/runtime/Heap.cpp
index c52bcfe..e3ef8bb 100644
--- a/Source/JavaScriptCore/runtime/Heap.cpp
+++ b/Source/JavaScriptCore/runtime/Heap.cpp
@@ -46,7 +46,7 @@ Heap::Heap(JSGlobalData* globalData)
, m_markListSet(0)
, m_activityCallback(DefaultGCActivityCallback::create(this))
, m_globalData(globalData)
- , m_machineStackMarker(this)
+ , m_machineThreads(this)
, m_markStack(globalData->jsArrayVPtr)
, m_handleHeap(globalData)
, m_extraCost(0)
@@ -75,6 +75,10 @@ void Heap::destroy()
// (and thus the global data) before other objects that may use the global data.
RefPtr<JSGlobalData> protect(m_globalData);
+#if ENABLE(JIT)
+ m_globalData->jitStubs->clearHostFunctionStubs();
+#endif
+
delete m_markListSet;
m_markListSet = 0;
m_markedSpace.clearMarks();
@@ -97,7 +101,7 @@ void Heap::reportExtraMemoryCostSlowCase(size_t cost)
// if a large value survives one garbage collection, there is not much point to
// collecting more frequently as long as it stays alive.
- if (m_extraCost > maxExtraCost && m_extraCost > m_markedSpace.capacity() / 2)
+ if (m_extraCost > maxExtraCost && m_extraCost > m_markedSpace.highWaterMark() / 2)
collectAllGarbage();
m_extraCost += cost;
}
@@ -147,11 +151,11 @@ bool Heap::unprotect(JSValue k)
return m_protectedValues.remove(k.asCell());
}
-void Heap::markProtectedObjects(MarkStack& markStack)
+void Heap::markProtectedObjects(HeapRootMarker& heapRootMarker)
{
ProtectCountSet::iterator end = m_protectedValues.end();
for (ProtectCountSet::iterator it = m_protectedValues.begin(); it != end; ++it)
- markStack.deprecatedAppend(&it->first);
+ heapRootMarker.mark(&it->first);
}
void Heap::pushTempSortVector(Vector<ValueStringPair>* tempVector)
@@ -165,7 +169,7 @@ void Heap::popTempSortVector(Vector<ValueStringPair>* tempVector)
m_tempSortingVectors.removeLast();
}
-void Heap::markTempSortVectors(MarkStack& markStack)
+void Heap::markTempSortVectors(HeapRootMarker& heapRootMarker)
{
typedef Vector<Vector<ValueStringPair>* > VectorOfValueStringVectors;
@@ -176,7 +180,7 @@ void Heap::markTempSortVectors(MarkStack& markStack)
Vector<ValueStringPair>::iterator vectorEnd = tempSortingVector->end();
for (Vector<ValueStringPair>::iterator vectorIt = tempSortingVector->begin(); vectorIt != vectorEnd; ++vectorIt) {
if (vectorIt->first)
- markStack.deprecatedAppend(&vectorIt->first);
+ heapRootMarker.mark(&vectorIt->first);
}
}
}
@@ -195,52 +199,52 @@ void Heap::markRoots()
}
#endif
+ void* dummy;
+
ASSERT(m_operationInProgress == NoOperation);
if (m_operationInProgress != NoOperation)
CRASH();
m_operationInProgress = Collection;
- // We gather the conservative set before clearing mark bits, because
+ MarkStack& markStack = m_markStack;
+ HeapRootMarker heapRootMarker(markStack);
+
+ // We gather conservative roots before clearing mark bits because
// conservative gathering uses the mark bits from our last mark pass to
// determine whether a reference is valid.
- ConservativeSet conservativeSet(this);
- m_machineStackMarker.markMachineStackConservatively(conservativeSet);
- conservativeSet.add(registerFile().start(), registerFile().end());
+ ConservativeRoots machineThreadRoots(this);
+ m_machineThreads.gatherConservativeRoots(machineThreadRoots, &dummy);
+
+ ConservativeRoots registerFileRoots(this);
+ registerFile().gatherConservativeRoots(registerFileRoots);
m_markedSpace.clearMarks();
- MarkStack& markStack = m_markStack;
- conservativeSet.mark(markStack);
+ markStack.append(machineThreadRoots);
markStack.drain();
- // Mark explicitly registered roots.
- markProtectedObjects(markStack);
+ markStack.append(registerFileRoots);
markStack.drain();
-
- // Mark temporary vector for Array sorting
- markTempSortVectors(markStack);
+
+ markProtectedObjects(heapRootMarker);
markStack.drain();
- HashSet<GlobalCodeBlock*>::const_iterator end = m_codeBlocks.end();
- for (HashSet<GlobalCodeBlock*>::const_iterator it = m_codeBlocks.begin(); it != end; ++it)
- (*it)->markAggregate(markStack);
+ markTempSortVectors(heapRootMarker);
markStack.drain();
- // Mark misc. other roots.
if (m_markListSet && m_markListSet->size())
- MarkedArgumentBuffer::markLists(markStack, *m_markListSet);
+ MarkedArgumentBuffer::markLists(heapRootMarker, *m_markListSet);
if (m_globalData->exception)
- markStack.append(&m_globalData->exception);
- if (m_globalData->firstStringifierToMark)
- JSONObject::markStringifiers(markStack, m_globalData->firstStringifierToMark);
+ heapRootMarker.mark(&m_globalData->exception);
markStack.drain();
- m_handleHeap.markStrongHandles(markStack);
+ m_handleHeap.markStrongHandles(heapRootMarker);
+ m_handleStack.mark(heapRootMarker);
// Mark the small strings cache last, since it will clear itself if nothing
// else has marked it.
- m_globalData->smallStrings.markChildren(markStack);
+ m_globalData->smallStrings.markChildren(heapRootMarker);
markStack.drain();
markStack.compact();
@@ -371,7 +375,7 @@ void Heap::reset(SweepToggle sweepToggle)
m_extraCost = 0;
#if ENABLE(JSC_ZOMBIES)
- sweep();
+ sweepToggle = DoSweep;
#endif
if (sweepToggle == DoSweep) {
@@ -379,7 +383,11 @@ void Heap::reset(SweepToggle sweepToggle)
m_markedSpace.shrink();
}
- size_t proportionalBytes = static_cast<size_t>(1.5 * m_markedSpace.size());
+ // To avoid pathological GC churn in large heaps, we set the allocation high
+ // water mark to be proportional to the current size of the heap. The exact
+ // proportion is a bit arbitrary. A 2X multiplier gives a 1:1 (heap size :
+ // new bytes allocated) proportion, and seems to work well in benchmarks.
+ size_t proportionalBytes = 2 * m_markedSpace.size();
m_markedSpace.setHighWaterMark(max(proportionalBytes, minBytesPerCycle));
JAVASCRIPTCORE_GC_END();