diff options
author | Ben Murdoch <benm@google.com> | 2011-06-02 12:07:03 +0100 |
---|---|---|
committer | Ben Murdoch <benm@google.com> | 2011-06-10 10:47:21 +0100 |
commit | 2daae5fd11344eaa88a0d92b0f6d65f8d2255c00 (patch) | |
tree | e4964fbd1cb70599f7718ff03e50ea1dab33890b /Source/JavaScriptCore/heap | |
parent | 87bdf0060a247bfbe668342b87e0874182e0ffa9 (diff) | |
download | external_webkit-2daae5fd11344eaa88a0d92b0f6d65f8d2255c00.zip external_webkit-2daae5fd11344eaa88a0d92b0f6d65f8d2255c00.tar.gz external_webkit-2daae5fd11344eaa88a0d92b0f6d65f8d2255c00.tar.bz2 |
Merge WebKit at r84325: Initial merge by git.
Change-Id: Ic1a909300ecc0a13ddc6b4e784371d2ac6e3d59b
Diffstat (limited to 'Source/JavaScriptCore/heap')
24 files changed, 3951 insertions, 0 deletions
diff --git a/Source/JavaScriptCore/heap/ConservativeRoots.cpp b/Source/JavaScriptCore/heap/ConservativeRoots.cpp new file mode 100644 index 0000000..1aad779 --- /dev/null +++ b/Source/JavaScriptCore/heap/ConservativeRoots.cpp @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "ConservativeRoots.h" + +namespace JSC { + +inline bool isPointerAligned(void* p) +{ + return !((intptr_t)(p) & (sizeof(char*) - 1)); +} + +void ConservativeRoots::grow() +{ + size_t newCapacity = m_capacity == inlineCapacity ? nonInlineCapacity : m_capacity * 2; + JSCell** newRoots = static_cast<JSCell**>(OSAllocator::reserveAndCommit(newCapacity * sizeof(JSCell*))); + memcpy(newRoots, m_roots, m_size * sizeof(JSCell*)); + if (m_roots != m_inlineRoots) + OSAllocator::decommitAndRelease(m_roots, m_capacity * sizeof(JSCell*)); + m_capacity = newCapacity; + m_roots = newRoots; +} + +void ConservativeRoots::add(void* begin, void* end) +{ + ASSERT(begin <= end); + ASSERT((static_cast<char*>(end) - static_cast<char*>(begin)) < 0x1000000); + ASSERT(isPointerAligned(begin)); + ASSERT(isPointerAligned(end)); + + for (char** it = static_cast<char**>(begin); it != static_cast<char**>(end); ++it) + add(*it); +} + +} // namespace JSC diff --git a/Source/JavaScriptCore/heap/ConservativeRoots.h b/Source/JavaScriptCore/heap/ConservativeRoots.h new file mode 100644 index 0000000..d078606 --- /dev/null +++ b/Source/JavaScriptCore/heap/ConservativeRoots.h @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2009 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY + * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef ConservativeRoots_h +#define ConservativeRoots_h + +#include "Heap.h" +#include <wtf/OSAllocator.h> +#include <wtf/Vector.h> + +namespace JSC { + +class JSCell; +class Heap; + +// May contain duplicates. + +class ConservativeRoots { +public: + ConservativeRoots(Heap*); + ~ConservativeRoots(); + + void add(void*); + void add(void* begin, void* end); + + size_t size(); + JSCell** roots(); + +private: + static const size_t inlineCapacity = 128; + static const size_t nonInlineCapacity = 8192 / sizeof(JSCell*); + + void grow(); + + Heap* m_heap; + JSCell** m_roots; + size_t m_size; + size_t m_capacity; + JSCell* m_inlineRoots[inlineCapacity]; +}; + +inline ConservativeRoots::ConservativeRoots(Heap* heap) + : m_heap(heap) + , m_roots(m_inlineRoots) + , m_size(0) + , m_capacity(inlineCapacity) +{ +} + +inline ConservativeRoots::~ConservativeRoots() +{ + if (m_roots != m_inlineRoots) + OSAllocator::decommitAndRelease(m_roots, m_capacity * sizeof(JSCell*)); +} + +inline void ConservativeRoots::add(void* p) +{ + if (!m_heap->contains(p)) + return; + + if (m_size == m_capacity) + grow(); + + m_roots[m_size++] = reinterpret_cast<JSCell*>(p); +} + +inline size_t ConservativeRoots::size() +{ + return m_size; +} + +inline JSCell** ConservativeRoots::roots() +{ + return m_roots; +} + +} // namespace JSC + +#endif // ConservativeRoots_h diff --git a/Source/JavaScriptCore/heap/Handle.h b/Source/JavaScriptCore/heap/Handle.h new file mode 100644 index 0000000..531d535 --- /dev/null +++ b/Source/JavaScriptCore/heap/Handle.h @@ -0,0 +1,219 @@ +/* + * Copyright (C) 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef Handle_h +#define Handle_h + +#include "WriteBarrier.h" +#include <wtf/Assertions.h> + +namespace JSC { + +/* + A Handle is a smart pointer that updates automatically when the garbage + collector moves the object to which it points. + + The base Handle class represents a temporary reference to a pointer whose + lifetime is guaranteed by something else. +*/ + +template <class T> class Handle; + +// Creating a JSValue Handle is invalid +template <> class Handle<JSValue>; + +// Forward declare WeakGCMap +template<typename KeyType, typename MappedType, typename FinalizerCallback, typename HashArg, typename KeyTraitsArg> class WeakGCMap; + +class HandleBase { + friend class HandleHeap; + friend struct JSCallbackObjectData; + template <typename KeyType, typename MappedType, typename FinalizerCallback, typename HashArg, typename KeyTraitsArg> friend class WeakGCMap; + +public: + bool operator!() const { return !m_slot || !*m_slot; } + + // This conversion operator allows implicit conversion to bool but not to other integer types. + typedef JSValue (HandleBase::*UnspecifiedBoolType); + operator UnspecifiedBoolType*() const { return (m_slot && *m_slot) ? reinterpret_cast<UnspecifiedBoolType*>(1) : 0; } + +protected: + HandleBase(HandleSlot slot) + : m_slot(slot) + { + } + + void swap(HandleBase& other) { std::swap(m_slot, other.m_slot); } + + HandleSlot slot() const { return m_slot; } + void setSlot(HandleSlot slot) + { + m_slot = slot; + } + +private: + HandleSlot m_slot; +}; + +template <typename T> struct HandleTypes { + typedef T* ExternalType; + static ExternalType getFromSlot(HandleSlot slot) { return (slot && *slot) ? reinterpret_cast<ExternalType>(slot->asCell()) : 0; } + static JSValue toJSValue(T* cell) { return reinterpret_cast<JSCell*>(cell); } + template <typename U> static void validateUpcast() { T* temp; temp = (U*)0; } +}; + +template <> struct HandleTypes<Unknown> { + typedef JSValue ExternalType; + static ExternalType getFromSlot(HandleSlot slot) { return slot ? *slot : JSValue(); } + static JSValue toJSValue(const JSValue& v) { return v; } + template <typename U> static void validateUpcast() {} +}; + +template <typename Base, typename T> struct HandleConverter { + T* operator->() + { +#if ENABLE(JSC_ZOMBIES) + ASSERT(!static_cast<const Base*>(this)->get() || !static_cast<const Base*>(this)->get()->isZombie()); +#endif + return static_cast<Base*>(this)->get(); + } + const T* operator->() const + { +#if ENABLE(JSC_ZOMBIES) + ASSERT(!static_cast<const Base*>(this)->get() || !static_cast<const Base*>(this)->get()->isZombie()); +#endif + return static_cast<const Base*>(this)->get(); + } + + T* operator*() + { +#if ENABLE(JSC_ZOMBIES) + ASSERT(!static_cast<const Base*>(this)->get() || !static_cast<const Base*>(this)->get()->isZombie()); +#endif + return static_cast<Base*>(this)->get(); + } + const T* operator*() const + { +#if ENABLE(JSC_ZOMBIES) + ASSERT(!static_cast<const Base*>(this)->get() || !static_cast<const Base*>(this)->get()->isZombie()); +#endif + return static_cast<const Base*>(this)->get(); + } +}; + +template <typename Base> struct HandleConverter<Base, Unknown> { + Handle<JSObject> asObject() const; + bool isObject() const { return jsValue().isObject(); } + bool getNumber(double number) const { return jsValue().getNumber(number); } + UString getString(ExecState*) const; + bool isUndefinedOrNull() const { return jsValue().isUndefinedOrNull(); } + +private: + JSValue jsValue() const + { +#if ENABLE(JSC_ZOMBIES) + ASSERT(!static_cast<const Base*>(this)->get() || !static_cast<const Base*>(this)->get().isZombie()); +#endif + return static_cast<const Base*>(this)->get(); + } +}; + +template <typename T> class Handle : public HandleBase, public HandleConverter<Handle<T>, T> { +public: + template <typename A, typename B> friend class HandleConverter; + typedef typename HandleTypes<T>::ExternalType ExternalType; + template <typename U> Handle(Handle<U> o) + { + typename HandleTypes<T>::template validateUpcast<U>(); + setSlot(o.slot()); + } + + void swap(Handle& other) { HandleBase::swap(other); } + + ExternalType get() const { return HandleTypes<T>::getFromSlot(this->slot()); } + +protected: + Handle(HandleSlot slot = 0) + : HandleBase(slot) + { + } + +private: + friend class HandleHeap; + + static Handle<T> wrapSlot(HandleSlot slot) + { + return Handle<T>(slot); + } +}; + +template <typename Base> Handle<JSObject> HandleConverter<Base, Unknown>::asObject() const +{ + return Handle<JSObject>::wrapSlot(static_cast<const Base*>(this)->slot()); +} + +template <typename T, typename U> inline bool operator==(const Handle<T>& a, const Handle<U>& b) +{ + return a.get() == b.get(); +} + +template <typename T, typename U> inline bool operator==(const Handle<T>& a, U* b) +{ + return a.get() == b; +} + +template <typename T, typename U> inline bool operator==(T* a, const Handle<U>& b) +{ + return a == b.get(); +} + +template <typename T, typename U> inline bool operator!=(const Handle<T>& a, const Handle<U>& b) +{ + return a.get() != b.get(); +} + +template <typename T, typename U> inline bool operator!=(const Handle<T>& a, U* b) +{ + return a.get() != b; +} + +template <typename T, typename U> inline bool operator!=(T* a, const Handle<U>& b) +{ + return a != b.get(); +} + +template <typename T, typename U> inline bool operator!=(const Handle<T>& a, JSValue b) +{ + return a.get() != b; +} + +template <typename T, typename U> inline bool operator!=(JSValue a, const Handle<U>& b) +{ + return a != b.get(); +} + +} + +#endif diff --git a/Source/JavaScriptCore/heap/HandleHeap.cpp b/Source/JavaScriptCore/heap/HandleHeap.cpp new file mode 100644 index 0000000..1a0051e --- /dev/null +++ b/Source/JavaScriptCore/heap/HandleHeap.cpp @@ -0,0 +1,174 @@ +/* + * Copyright (C) 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "HandleHeap.h" + +#include "JSObject.h" + +namespace JSC { + +WeakHandleOwner::~WeakHandleOwner() +{ +} + +bool WeakHandleOwner::isReachableFromOpaqueRoots(Handle<Unknown>, void*, MarkStack&) +{ + return false; +} + +void WeakHandleOwner::finalize(Handle<Unknown>, void*) +{ +} + +HandleHeap::HandleHeap(JSGlobalData* globalData) + : m_globalData(globalData) + , m_nextToFinalize(0) +{ + grow(); +} + +void HandleHeap::grow() +{ + Node* block = m_blockStack.grow(); + for (int i = m_blockStack.blockLength - 1; i >= 0; --i) { + Node* node = &block[i]; + new (node) Node(this); + m_freeList.push(node); + } +} + +void HandleHeap::markStrongHandles(HeapRootMarker& heapRootMarker) +{ + Node* end = m_strongList.end(); + for (Node* node = m_strongList.begin(); node != end; node = node->next()) + heapRootMarker.mark(node->slot()); +} + +void HandleHeap::markWeakHandles(HeapRootMarker& heapRootMarker) +{ + MarkStack& markStack = heapRootMarker.markStack(); + + Node* end = m_weakList.end(); + for (Node* node = m_weakList.begin(); node != end; node = node->next()) { + ASSERT(isValidWeakNode(node)); + JSCell* cell = node->slot()->asCell(); + if (Heap::isMarked(cell)) + continue; + + WeakHandleOwner* weakOwner = node->weakOwner(); + if (!weakOwner) + continue; + + if (!weakOwner->isReachableFromOpaqueRoots(Handle<Unknown>::wrapSlot(node->slot()), node->weakOwnerContext(), markStack)) + continue; + + heapRootMarker.mark(node->slot()); + } +} + +void HandleHeap::finalizeWeakHandles() +{ + Node* end = m_weakList.end(); + for (Node* node = m_weakList.begin(); node != end; node = m_nextToFinalize) { + m_nextToFinalize = node->next(); + + ASSERT(isValidWeakNode(node)); + JSCell* cell = node->slot()->asCell(); + if (Heap::isMarked(cell)) + continue; + + if (WeakHandleOwner* weakOwner = node->weakOwner()) { + weakOwner->finalize(Handle<Unknown>::wrapSlot(node->slot()), node->weakOwnerContext()); + if (m_nextToFinalize != node->next()) // Owner deallocated node. + continue; + } + + *node->slot() = JSValue(); + SentinelLinkedList<Node>::remove(node); + m_immediateList.push(node); + } + + m_nextToFinalize = 0; +} + +void HandleHeap::writeBarrier(HandleSlot slot, const JSValue& value) +{ + ASSERT(!m_nextToFinalize); // Forbid assignment to handles during the finalization phase, since it would violate many GC invariants. + + if (!value == !*slot && slot->isCell() == value.isCell()) + return; + + Node* node = toNode(slot); + SentinelLinkedList<Node>::remove(node); + if (!value || !value.isCell()) { + m_immediateList.push(node); + return; + } + + if (node->isWeak()) { + m_weakList.push(node); + return; + } + + m_strongList.push(node); +} + +unsigned HandleHeap::protectedGlobalObjectCount() +{ + unsigned count = 0; + Node* end = m_strongList.end(); + for (Node* node = m_strongList.begin(); node != end; node = node->next()) { + JSValue value = *node->slot(); + if (value.isObject() && asObject(value.asCell())->isGlobalObject()) + count++; + } + return count; +} + +#if !ASSERT_DISABLED +bool HandleHeap::isValidWeakNode(Node* node) +{ + if (!node->isWeak()) + return false; + + JSValue value = *node->slot(); + if (!value || !value.isCell()) + return false; + + JSCell* cell = value.asCell(); + if (!cell || !cell->structure()) + return false; + +#if ENABLE(JSC_ZOMBIES) + if (cell->isZombie()) + return false; +#endif + + return true; +} +#endif + +} // namespace JSC diff --git a/Source/JavaScriptCore/heap/HandleHeap.h b/Source/JavaScriptCore/heap/HandleHeap.h new file mode 100644 index 0000000..886c94a --- /dev/null +++ b/Source/JavaScriptCore/heap/HandleHeap.h @@ -0,0 +1,276 @@ +/* + * Copyright (C) 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef HandleHeap_h +#define HandleHeap_h + +#include "BlockStack.h" +#include "Handle.h" +#include "SentinelLinkedList.h" +#include "SinglyLinkedList.h" + +namespace JSC { + +class HandleHeap; +class HeapRootMarker; +class JSGlobalData; +class JSValue; +class MarkStack; +class TypeCounter; + +class WeakHandleOwner { +public: + virtual ~WeakHandleOwner(); + virtual bool isReachableFromOpaqueRoots(Handle<Unknown>, void* context, MarkStack&); + virtual void finalize(Handle<Unknown>, void* context); +}; + +class HandleHeap { +public: + static HandleHeap* heapFor(HandleSlot); + + HandleHeap(JSGlobalData*); + + JSGlobalData* globalData(); + + HandleSlot allocate(); + void deallocate(HandleSlot); + + void makeWeak(HandleSlot, WeakHandleOwner* = 0, void* context = 0); + HandleSlot copyWeak(HandleSlot); + + void markStrongHandles(HeapRootMarker&); + void markWeakHandles(HeapRootMarker&); + void finalizeWeakHandles(); + + void writeBarrier(HandleSlot, const JSValue&); + +#if !ASSERT_DISABLED + bool hasWeakOwner(HandleSlot, WeakHandleOwner*); +#endif + + unsigned protectedGlobalObjectCount(); + void protectedObjectTypeCounts(TypeCounter&); + +private: + class Node { + public: + Node(WTF::SentinelTag); + Node(HandleHeap*); + + HandleSlot slot(); + HandleHeap* handleHeap(); + + void makeWeak(WeakHandleOwner*, void* context); + bool isWeak(); + + WeakHandleOwner* weakOwner(); + void* weakOwnerContext(); + + void setPrev(Node*); + Node* prev(); + + void setNext(Node*); + Node* next(); + + private: + WeakHandleOwner* emptyWeakOwner(); + + JSValue m_value; + HandleHeap* m_handleHeap; + WeakHandleOwner* m_weakOwner; + void* m_weakOwnerContext; + Node* m_prev; + Node* m_next; + }; + + static HandleSlot toHandle(Node*); + static Node* toNode(HandleSlot); + + void grow(); + +#if !ASSERT_DISABLED + bool isValidWeakNode(Node*); +#endif + + JSGlobalData* m_globalData; + BlockStack<Node> m_blockStack; + + SentinelLinkedList<Node> m_strongList; + SentinelLinkedList<Node> m_weakList; + SentinelLinkedList<Node> m_immediateList; + SinglyLinkedList<Node> m_freeList; + Node* m_nextToFinalize; +}; + +inline HandleHeap* HandleHeap::heapFor(HandleSlot handle) +{ + return toNode(handle)->handleHeap(); +} + +inline JSGlobalData* HandleHeap::globalData() +{ + return m_globalData; +} + +inline HandleSlot HandleHeap::toHandle(Node* node) +{ + return reinterpret_cast<HandleSlot>(node); +} + +inline HandleHeap::Node* HandleHeap::toNode(HandleSlot handle) +{ + return reinterpret_cast<Node*>(handle); +} + +inline HandleSlot HandleHeap::allocate() +{ + if (m_freeList.isEmpty()) + grow(); + + Node* node = m_freeList.pop(); + new (node) Node(this); + m_immediateList.push(node); + return toHandle(node); +} + +inline void HandleHeap::deallocate(HandleSlot handle) +{ + Node* node = toNode(handle); + if (node == m_nextToFinalize) { + m_nextToFinalize = node->next(); + ASSERT(m_nextToFinalize->next()); + } + + SentinelLinkedList<Node>::remove(node); + m_freeList.push(node); +} + +inline HandleSlot HandleHeap::copyWeak(HandleSlot other) +{ + Node* node = toNode(allocate()); + node->makeWeak(toNode(other)->weakOwner(), toNode(other)->weakOwnerContext()); + writeBarrier(node->slot(), *other); + *node->slot() = *other; + return toHandle(node); +} + +inline void HandleHeap::makeWeak(HandleSlot handle, WeakHandleOwner* weakOwner, void* context) +{ + Node* node = toNode(handle); + node->makeWeak(weakOwner, context); + + SentinelLinkedList<Node>::remove(node); + if (!*handle || !handle->isCell()) { + m_immediateList.push(node); + return; + } + + m_weakList.push(node); +} + +#if !ASSERT_DISABLED +inline bool HandleHeap::hasWeakOwner(HandleSlot handle, WeakHandleOwner* weakOwner) +{ + return toNode(handle)->weakOwner() == weakOwner; +} +#endif + +inline HandleHeap::Node::Node(HandleHeap* handleHeap) + : m_handleHeap(handleHeap) + , m_weakOwner(0) + , m_weakOwnerContext(0) +{ +} + +inline HandleHeap::Node::Node(WTF::SentinelTag) + : m_handleHeap(0) + , m_weakOwner(0) + , m_weakOwnerContext(0) +{ +} + +inline HandleSlot HandleHeap::Node::slot() +{ + return &m_value; +} + +inline HandleHeap* HandleHeap::Node::handleHeap() +{ + return m_handleHeap; +} + +inline void HandleHeap::Node::makeWeak(WeakHandleOwner* weakOwner, void* context) +{ + m_weakOwner = weakOwner ? weakOwner : emptyWeakOwner(); + m_weakOwnerContext = context; +} + +inline bool HandleHeap::Node::isWeak() +{ + return m_weakOwner; // True for emptyWeakOwner(). +} + +inline WeakHandleOwner* HandleHeap::Node::weakOwner() +{ + return m_weakOwner == emptyWeakOwner() ? 0 : m_weakOwner; // 0 for emptyWeakOwner(). +} + +inline void* HandleHeap::Node::weakOwnerContext() +{ + ASSERT(weakOwner()); + return m_weakOwnerContext; +} + +inline void HandleHeap::Node::setPrev(Node* prev) +{ + m_prev = prev; +} + +inline HandleHeap::Node* HandleHeap::Node::prev() +{ + return m_prev; +} + +inline void HandleHeap::Node::setNext(Node* next) +{ + m_next = next; +} + +inline HandleHeap::Node* HandleHeap::Node::next() +{ + return m_next; +} + +// Sentinel to indicate that a node is weak, but its owner has no meaningful +// callbacks. This allows us to optimize by skipping such nodes. +inline WeakHandleOwner* HandleHeap::Node::emptyWeakOwner() +{ + return reinterpret_cast<WeakHandleOwner*>(-1); +} + +} + +#endif diff --git a/Source/JavaScriptCore/heap/HandleStack.cpp b/Source/JavaScriptCore/heap/HandleStack.cpp new file mode 100644 index 0000000..1c151af --- /dev/null +++ b/Source/JavaScriptCore/heap/HandleStack.cpp @@ -0,0 +1,63 @@ +/* + * Copyright (C) 2010 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" + +#include "HandleStack.h" + +#include "MarkStack.h" + +namespace JSC { + +HandleStack::HandleStack() +#ifndef NDEBUG + : m_scopeDepth(0) +#endif +{ + grow(); +} + +void HandleStack::mark(HeapRootMarker& heapRootMarker) +{ + const Vector<HandleSlot>& blocks = m_blockStack.blocks(); + size_t blockLength = m_blockStack.blockLength; + + int end = blocks.size() - 1; + for (int i = 0; i < end; ++i) { + HandleSlot block = blocks[i]; + heapRootMarker.mark(block, blockLength); + } + HandleSlot block = blocks[end]; + heapRootMarker.mark(block, m_frame.m_next - block); +} + +void HandleStack::grow() +{ + HandleSlot block = m_blockStack.grow(); + m_frame.m_next = block; + m_frame.m_end = block + m_blockStack.blockLength; +} + +} diff --git a/Source/JavaScriptCore/heap/HandleStack.h b/Source/JavaScriptCore/heap/HandleStack.h new file mode 100644 index 0000000..54e6c60 --- /dev/null +++ b/Source/JavaScriptCore/heap/HandleStack.h @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2010 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef HandleStack_h +#define HandleStack_h + +#include "Assertions.h" +#include "BlockStack.h" +#include "Handle.h" + +#include <wtf/UnusedParam.h> + +namespace JSC { + +class LocalScope; +class HeapRootMarker; + +class HandleStack { +public: + class Frame { + public: + HandleSlot m_next; + HandleSlot m_end; + }; + + HandleStack(); + + void enterScope(Frame&); + void leaveScope(Frame&); + + HandleSlot push(); + + void mark(HeapRootMarker&); + +private: + void grow(); + void zapTo(Frame&); + HandleSlot findFirstAfter(HandleSlot); + +#ifndef NDEBUG + size_t m_scopeDepth; +#endif + BlockStack<JSValue> m_blockStack; + Frame m_frame; +}; + +inline void HandleStack::enterScope(Frame& lastFrame) +{ +#ifndef NDEBUG + ++m_scopeDepth; +#endif + + lastFrame = m_frame; +} + + + +inline void HandleStack::zapTo(Frame& lastFrame) +{ +#ifdef NDEBUG + UNUSED_PARAM(lastFrame); +#else + const Vector<HandleSlot>& blocks = m_blockStack.blocks(); + + if (lastFrame.m_end != m_frame.m_end) { // Zapping to a frame in a different block. + int i = blocks.size() - 1; + for ( ; blocks[i] + m_blockStack.blockLength != lastFrame.m_end; --i) { + for (int j = m_blockStack.blockLength - 1; j >= 0; --j) + blocks[i][j] = JSValue(); + } + + for (HandleSlot it = blocks[i] + m_blockStack.blockLength - 1; it != lastFrame.m_next - 1; --it) + *it = JSValue(); + + return; + } + + for (HandleSlot it = m_frame.m_next - 1; it != lastFrame.m_next - 1; --it) + *it = JSValue(); +#endif +} + +inline void HandleStack::leaveScope(Frame& lastFrame) +{ +#ifndef NDEBUG + --m_scopeDepth; +#endif + + zapTo(lastFrame); + + if (lastFrame.m_end != m_frame.m_end) // Popping to a frame in a different block. + m_blockStack.shrink(lastFrame.m_end); + + m_frame = lastFrame; +} + +inline HandleSlot HandleStack::push() +{ + ASSERT(m_scopeDepth); // Creating a Local outside of a LocalScope is a memory leak. + if (m_frame.m_next == m_frame.m_end) + grow(); + return m_frame.m_next++; +} + +} + +#endif diff --git a/Source/JavaScriptCore/heap/Heap.cpp b/Source/JavaScriptCore/heap/Heap.cpp new file mode 100644 index 0000000..11999b7 --- /dev/null +++ b/Source/JavaScriptCore/heap/Heap.cpp @@ -0,0 +1,433 @@ +/* + * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. + * Copyright (C) 2007 Eric Seidel <eric@webkit.org> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#include "config.h" +#include "Heap.h" + +#include "CodeBlock.h" +#include "ConservativeRoots.h" +#include "GCActivityCallback.h" +#include "Interpreter.h" +#include "JSGlobalData.h" +#include "JSGlobalObject.h" +#include "JSLock.h" +#include "JSONObject.h" +#include "Tracing.h" +#include <algorithm> + +#define COLLECT_ON_EVERY_SLOW_ALLOCATION 0 + +using namespace std; + +namespace JSC { + +const size_t minBytesPerCycle = 512 * 1024; + +Heap::Heap(JSGlobalData* globalData) + : m_operationInProgress(NoOperation) + , m_markedSpace(globalData) + , m_markListSet(0) + , m_activityCallback(DefaultGCActivityCallback::create(this)) + , m_globalData(globalData) + , m_machineThreads(this) + , m_markStack(globalData->jsArrayVPtr) + , m_handleHeap(globalData) + , m_extraCost(0) +{ + m_markedSpace.setHighWaterMark(minBytesPerCycle); + (*m_activityCallback)(); +} + +Heap::~Heap() +{ + // The destroy function must already have been called, so assert this. + ASSERT(!m_globalData); +} + +void Heap::destroy() +{ + JSLock lock(SilenceAssertionsOnly); + + if (!m_globalData) + return; + + ASSERT(!m_globalData->dynamicGlobalObject); + ASSERT(m_operationInProgress == NoOperation); + + // The global object is not GC protected at this point, so sweeping may delete it + // (and thus the global data) before other objects that may use the global data. + RefPtr<JSGlobalData> protect(m_globalData); + +#if ENABLE(JIT) + m_globalData->jitStubs->clearHostFunctionStubs(); +#endif + + delete m_markListSet; + m_markListSet = 0; + m_markedSpace.clearMarks(); + m_handleHeap.finalizeWeakHandles(); + m_markedSpace.destroy(); + + m_globalData = 0; +} + +void Heap::reportExtraMemoryCostSlowCase(size_t cost) +{ + // Our frequency of garbage collection tries to balance memory use against speed + // by collecting based on the number of newly created values. However, for values + // that hold on to a great deal of memory that's not in the form of other JS values, + // that is not good enough - in some cases a lot of those objects can pile up and + // use crazy amounts of memory without a GC happening. So we track these extra + // memory costs. Only unusually large objects are noted, and we only keep track + // of this extra cost until the next GC. In garbage collected languages, most values + // are either very short lived temporaries, or have extremely long lifetimes. So + // if a large value survives one garbage collection, there is not much point to + // collecting more frequently as long as it stays alive. + + if (m_extraCost > maxExtraCost && m_extraCost > m_markedSpace.highWaterMark() / 2) + collectAllGarbage(); + m_extraCost += cost; +} + +void* Heap::allocateSlowCase(size_t bytes) +{ + ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable()); + ASSERT(JSLock::lockCount() > 0); + ASSERT(JSLock::currentThreadIsHoldingLock()); + ASSERT(bytes <= MarkedSpace::maxCellSize); + ASSERT(m_operationInProgress == NoOperation); + +#if COLLECT_ON_EVERY_SLOW_ALLOCATION + collectAllGarbage(); + ASSERT(m_operationInProgress == NoOperation); +#endif + + reset(DoNotSweep); + + m_operationInProgress = Allocation; + void* result = m_markedSpace.allocate(bytes); + m_operationInProgress = NoOperation; + + ASSERT(result); + return result; +} + +void Heap::protect(JSValue k) +{ + ASSERT(k); + ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance()); + + if (!k.isCell()) + return; + + m_protectedValues.add(k.asCell()); +} + +bool Heap::unprotect(JSValue k) +{ + ASSERT(k); + ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance()); + + if (!k.isCell()) + return false; + + return m_protectedValues.remove(k.asCell()); +} + +void Heap::markProtectedObjects(HeapRootMarker& heapRootMarker) +{ + ProtectCountSet::iterator end = m_protectedValues.end(); + for (ProtectCountSet::iterator it = m_protectedValues.begin(); it != end; ++it) + heapRootMarker.mark(&it->first); +} + +void Heap::pushTempSortVector(Vector<ValueStringPair>* tempVector) +{ + m_tempSortingVectors.append(tempVector); +} + +void Heap::popTempSortVector(Vector<ValueStringPair>* tempVector) +{ + ASSERT_UNUSED(tempVector, tempVector == m_tempSortingVectors.last()); + m_tempSortingVectors.removeLast(); +} + +void Heap::markTempSortVectors(HeapRootMarker& heapRootMarker) +{ + typedef Vector<Vector<ValueStringPair>* > VectorOfValueStringVectors; + + VectorOfValueStringVectors::iterator end = m_tempSortingVectors.end(); + for (VectorOfValueStringVectors::iterator it = m_tempSortingVectors.begin(); it != end; ++it) { + Vector<ValueStringPair>* tempSortingVector = *it; + + Vector<ValueStringPair>::iterator vectorEnd = tempSortingVector->end(); + for (Vector<ValueStringPair>::iterator vectorIt = tempSortingVector->begin(); vectorIt != vectorEnd; ++vectorIt) { + if (vectorIt->first) + heapRootMarker.mark(&vectorIt->first); + } + } +} + +inline RegisterFile& Heap::registerFile() +{ + return m_globalData->interpreter->registerFile(); +} + +void Heap::markRoots() +{ +#ifndef NDEBUG + if (m_globalData->isSharedInstance()) { + ASSERT(JSLock::lockCount() > 0); + ASSERT(JSLock::currentThreadIsHoldingLock()); + } +#endif + + void* dummy; + + ASSERT(m_operationInProgress == NoOperation); + if (m_operationInProgress != NoOperation) + CRASH(); + + m_operationInProgress = Collection; + + MarkStack& markStack = m_markStack; + HeapRootMarker heapRootMarker(markStack); + + // We gather conservative roots before clearing mark bits because + // conservative gathering uses the mark bits from our last mark pass to + // determine whether a reference is valid. + ConservativeRoots machineThreadRoots(this); + m_machineThreads.gatherConservativeRoots(machineThreadRoots, &dummy); + + ConservativeRoots registerFileRoots(this); + registerFile().gatherConservativeRoots(registerFileRoots); + + m_markedSpace.clearMarks(); + + markStack.append(machineThreadRoots); + markStack.drain(); + + markStack.append(registerFileRoots); + markStack.drain(); + + markProtectedObjects(heapRootMarker); + markStack.drain(); + + markTempSortVectors(heapRootMarker); + markStack.drain(); + + if (m_markListSet && m_markListSet->size()) + MarkedArgumentBuffer::markLists(heapRootMarker, *m_markListSet); + if (m_globalData->exception) + heapRootMarker.mark(&m_globalData->exception); + markStack.drain(); + + m_handleHeap.markStrongHandles(heapRootMarker); + markStack.drain(); + + m_handleStack.mark(heapRootMarker); + markStack.drain(); + + // Mark the small strings cache as late as possible, since it will clear + // itself if nothing else has marked it. + // FIXME: Change the small strings cache to use Weak<T>. + m_globalData->smallStrings.markChildren(heapRootMarker); + markStack.drain(); + + // Weak handles must be marked last, because their owners use the set of + // opaque roots to determine reachability. + int lastOpaqueRootCount; + do { + lastOpaqueRootCount = markStack.opaqueRootCount(); + m_handleHeap.markWeakHandles(heapRootMarker); + markStack.drain(); + // If the set of opaque roots has grown, more weak handles may have become reachable. + } while (lastOpaqueRootCount != markStack.opaqueRootCount()); + + markStack.reset(); + + m_operationInProgress = NoOperation; +} + +size_t Heap::objectCount() const +{ + return m_markedSpace.objectCount(); +} + +size_t Heap::size() const +{ + return m_markedSpace.size(); +} + +size_t Heap::capacity() const +{ + return m_markedSpace.capacity(); +} + +size_t Heap::globalObjectCount() +{ + return m_globalData->globalObjectCount; +} + +size_t Heap::protectedGlobalObjectCount() +{ + size_t count = m_handleHeap.protectedGlobalObjectCount(); + + ProtectCountSet::iterator end = m_protectedValues.end(); + for (ProtectCountSet::iterator it = m_protectedValues.begin(); it != end; ++it) { + if (it->first->isObject() && asObject(it->first)->isGlobalObject()) + count++; + } + + return count; +} + +size_t Heap::protectedObjectCount() +{ + return m_protectedValues.size(); +} + +class TypeCounter { +public: + TypeCounter(); + void operator()(JSCell*); + PassOwnPtr<TypeCountSet> take(); + +private: + const char* typeName(JSCell*); + OwnPtr<TypeCountSet> m_typeCountSet; +}; + +inline TypeCounter::TypeCounter() + : m_typeCountSet(new TypeCountSet) +{ +} + +inline const char* TypeCounter::typeName(JSCell* cell) +{ + if (cell->isString()) + return "string"; + if (cell->isGetterSetter()) + return "Getter-Setter"; + if (cell->isAPIValueWrapper()) + return "API wrapper"; + if (cell->isPropertyNameIterator()) + return "For-in iterator"; + if (const ClassInfo* info = cell->classInfo()) + return info->className; + if (!cell->isObject()) + return "[empty cell]"; + return "Object"; +} + +inline void TypeCounter::operator()(JSCell* cell) +{ + m_typeCountSet->add(typeName(cell)); +} + +inline PassOwnPtr<TypeCountSet> TypeCounter::take() +{ + return m_typeCountSet.release(); +} + +PassOwnPtr<TypeCountSet> Heap::protectedObjectTypeCounts() +{ + TypeCounter typeCounter; + + ProtectCountSet::iterator end = m_protectedValues.end(); + for (ProtectCountSet::iterator it = m_protectedValues.begin(); it != end; ++it) + typeCounter(it->first); + m_handleHeap.protectedObjectTypeCounts(typeCounter); + + return typeCounter.take(); +} + +void HandleHeap::protectedObjectTypeCounts(TypeCounter& typeCounter) +{ + Node* end = m_strongList.end(); + for (Node* node = m_strongList.begin(); node != end; node = node->next()) { + JSValue value = *node->slot(); + if (value && value.isCell()) + typeCounter(value.asCell()); + } +} + +PassOwnPtr<TypeCountSet> Heap::objectTypeCounts() +{ + TypeCounter typeCounter; + forEach(typeCounter); + return typeCounter.take(); +} + +bool Heap::isBusy() +{ + return m_operationInProgress != NoOperation; +} + +void Heap::collectAllGarbage() +{ + reset(DoSweep); +} + +void Heap::reset(SweepToggle sweepToggle) +{ + ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable()); + JAVASCRIPTCORE_GC_BEGIN(); + + markRoots(); + m_handleHeap.finalizeWeakHandles(); + + JAVASCRIPTCORE_GC_MARKED(); + + m_markedSpace.reset(); + m_extraCost = 0; + +#if ENABLE(JSC_ZOMBIES) + sweepToggle = DoSweep; +#endif + + if (sweepToggle == DoSweep) { + m_markedSpace.sweep(); + m_markedSpace.shrink(); + } + + // To avoid pathological GC churn in large heaps, we set the allocation high + // water mark to be proportional to the current size of the heap. The exact + // proportion is a bit arbitrary. A 2X multiplier gives a 1:1 (heap size : + // new bytes allocated) proportion, and seems to work well in benchmarks. + size_t proportionalBytes = 2 * m_markedSpace.size(); + m_markedSpace.setHighWaterMark(max(proportionalBytes, minBytesPerCycle)); + + JAVASCRIPTCORE_GC_END(); + + (*m_activityCallback)(); +} + +void Heap::setActivityCallback(PassOwnPtr<GCActivityCallback> activityCallback) +{ + m_activityCallback = activityCallback; +} + +GCActivityCallback* Heap::activityCallback() +{ + return m_activityCallback.get(); +} + +} // namespace JSC diff --git a/Source/JavaScriptCore/heap/Heap.h b/Source/JavaScriptCore/heap/Heap.h new file mode 100644 index 0000000..c2d3bac --- /dev/null +++ b/Source/JavaScriptCore/heap/Heap.h @@ -0,0 +1,178 @@ +/* + * Copyright (C) 1999-2000 Harri Porten (porten@kde.org) + * Copyright (C) 2001 Peter Kelly (pmk@post.com) + * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#ifndef Heap_h +#define Heap_h + +#include "HandleHeap.h" +#include "HandleStack.h" +#include "MarkStack.h" +#include "MarkedSpace.h" +#include <wtf/Forward.h> +#include <wtf/HashCountedSet.h> +#include <wtf/HashSet.h> + +namespace JSC { + + class GCActivityCallback; + class GlobalCodeBlock; + class HeapRootMarker; + class JSCell; + class JSGlobalData; + class JSValue; + class LiveObjectIterator; + class MarkStack; + class MarkedArgumentBuffer; + class RegisterFile; + class UString; + class WeakGCHandlePool; + + typedef std::pair<JSValue, UString> ValueStringPair; + typedef HashCountedSet<JSCell*> ProtectCountSet; + typedef HashCountedSet<const char*> TypeCountSet; + + enum OperationInProgress { NoOperation, Allocation, Collection }; + + class Heap { + WTF_MAKE_NONCOPYABLE(Heap); + public: + static Heap* heap(JSValue); // 0 for immediate values + static Heap* heap(JSCell*); + + static bool isMarked(const JSCell*); + static bool testAndSetMarked(const JSCell*); + static void setMarked(JSCell*); + + Heap(JSGlobalData*); + ~Heap(); + void destroy(); // JSGlobalData must call destroy() before ~Heap(). + + JSGlobalData* globalData() const { return m_globalData; } + MarkedSpace& markedSpace() { return m_markedSpace; } + MachineThreads& machineThreads() { return m_machineThreads; } + + GCActivityCallback* activityCallback(); + void setActivityCallback(PassOwnPtr<GCActivityCallback>); + + bool isBusy(); // true if an allocation or collection is in progress + void* allocate(size_t); + void collectAllGarbage(); + + void reportExtraMemoryCost(size_t cost); + + void protect(JSValue); + bool unprotect(JSValue); // True when the protect count drops to 0. + + bool contains(void*); + + size_t size() const; + size_t capacity() const; + size_t objectCount() const; + size_t globalObjectCount(); + size_t protectedObjectCount(); + size_t protectedGlobalObjectCount(); + PassOwnPtr<TypeCountSet> protectedObjectTypeCounts(); + PassOwnPtr<TypeCountSet> objectTypeCounts(); + + void pushTempSortVector(Vector<ValueStringPair>*); + void popTempSortVector(Vector<ValueStringPair>*); + + HashSet<MarkedArgumentBuffer*>& markListSet() { if (!m_markListSet) m_markListSet = new HashSet<MarkedArgumentBuffer*>; return *m_markListSet; } + + template <typename Functor> void forEach(Functor&); + + HandleSlot allocateGlobalHandle() { return m_handleHeap.allocate(); } + HandleSlot allocateLocalHandle() { return m_handleStack.push(); } + + HandleStack* handleStack() { return &m_handleStack; } + + private: + friend class JSGlobalData; + + static const size_t minExtraCost = 256; + static const size_t maxExtraCost = 1024 * 1024; + + void* allocateSlowCase(size_t); + void reportExtraMemoryCostSlowCase(size_t); + + void markRoots(); + void markProtectedObjects(HeapRootMarker&); + void markTempSortVectors(HeapRootMarker&); + + enum SweepToggle { DoNotSweep, DoSweep }; + void reset(SweepToggle); + + RegisterFile& registerFile(); + + OperationInProgress m_operationInProgress; + MarkedSpace m_markedSpace; + + ProtectCountSet m_protectedValues; + Vector<Vector<ValueStringPair>* > m_tempSortingVectors; + + HashSet<MarkedArgumentBuffer*>* m_markListSet; + + OwnPtr<GCActivityCallback> m_activityCallback; + + JSGlobalData* m_globalData; + + MachineThreads m_machineThreads; + MarkStack m_markStack; + HandleHeap m_handleHeap; + HandleStack m_handleStack; + + size_t m_extraCost; + }; + + inline bool Heap::isMarked(const JSCell* cell) + { + return MarkedSpace::isMarked(cell); + } + + inline bool Heap::testAndSetMarked(const JSCell* cell) + { + return MarkedSpace::testAndSetMarked(cell); + } + + inline void Heap::setMarked(JSCell* cell) + { + MarkedSpace::setMarked(cell); + } + + inline bool Heap::contains(void* p) + { + return m_markedSpace.contains(p); + } + + inline void Heap::reportExtraMemoryCost(size_t cost) + { + if (cost > minExtraCost) + reportExtraMemoryCostSlowCase(cost); + } + + template <typename Functor> inline void Heap::forEach(Functor& functor) + { + m_markedSpace.forEach(functor); + } + +} // namespace JSC + +#endif // Heap_h diff --git a/Source/JavaScriptCore/heap/Local.h b/Source/JavaScriptCore/heap/Local.h new file mode 100644 index 0000000..ac7d136 --- /dev/null +++ b/Source/JavaScriptCore/heap/Local.h @@ -0,0 +1,153 @@ +/* + * Copyright (C) 2010 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef Local_h +#define Local_h + +#include "Handle.h" +#include "JSGlobalData.h" + +/* + A strongly referenced handle whose lifetime is temporary, limited to a given + LocalScope. Use Locals for local values on the stack. It is an error to + create a Local outside of any LocalScope. +*/ + +namespace JSC { + +template <typename T> class Local : public Handle<T> { + friend class LocalScope; + using Handle<T>::slot; + +public: + typedef typename Handle<T>::ExternalType ExternalType; + + Local(JSGlobalData&, ExternalType = ExternalType()); + Local(JSGlobalData&, Handle<T>); + Local(const Local<T>&); // Adopting constructor. Used to return a Local to a calling function. + + Local& operator=(ExternalType); + Local& operator=(Handle<T>); + +private: + Local(HandleSlot, ExternalType); // Used by LocalScope::release() to move a Local to a containing scope. + void set(ExternalType); +}; + +template <typename T> inline Local<T>::Local(JSGlobalData& globalData, ExternalType value) + : Handle<T>(globalData.allocateLocalHandle()) +{ + set(value); +} + +template <typename T> inline Local<T>::Local(JSGlobalData& globalData, Handle<T> other) + : Handle<T>(globalData.allocateLocalHandle()) +{ + set(other.get()); +} + +template <typename T> inline Local<T>::Local(const Local<T>& other) + : Handle<T>(other.slot()) +{ + const_cast<Local<T>&>(other).setSlot(0); // Prevent accidental sharing. +} + +template <typename T> inline Local<T>::Local(HandleSlot slot, ExternalType value) + : Handle<T>(slot, value) +{ +} + +template <typename T> inline Local<T>& Local<T>::operator=(ExternalType value) +{ + set(value); + return *this; +} + +template <typename T> inline Local<T>& Local<T>::operator=(Handle<T> other) +{ + set(other.get()); + return *this; +} + +template <typename T> inline void Local<T>::set(ExternalType externalType) +{ + ASSERT(slot()); + ASSERT(!HandleTypes<T>::toJSValue(externalType) || !HandleTypes<T>::toJSValue(externalType).isCell() || Heap::isMarked(HandleTypes<T>::toJSValue(externalType).asCell())); + *slot() = externalType; +} + + +template <typename T, unsigned inlineCapacity = 0> class LocalStack { + typedef typename Handle<T>::ExternalType ExternalType; +public: + LocalStack(JSGlobalData& globalData) + : m_globalData(&globalData) + , m_count(0) + { + } + + ExternalType peek() const + { + ASSERT(m_count > 0); + return m_stack[m_count - 1].get(); + } + + ExternalType pop() + { + ASSERT(m_count > 0); + return m_stack[--m_count].get(); + } + + void push(ExternalType value) + { + if (m_count == m_stack.size()) + m_stack.append(Local<T>(*m_globalData, value)); + else + m_stack[m_count] = value; + m_count++; + } + + bool isEmpty() const { return !m_count; } + unsigned size() const { return m_count; } + +private: + RefPtr<JSGlobalData> m_globalData; + Vector<Local<T>, inlineCapacity> m_stack; + unsigned m_count; +}; + +} + +namespace WTF { + +template<typename T> struct VectorTraits<JSC::Local<T> > : SimpleClassVectorTraits { + static const bool needsDestruction = false; + static const bool canInitializeWithMemset = false; + static const bool canCompareWithMemcmp = false; +}; + +} + +#endif diff --git a/Source/JavaScriptCore/heap/LocalScope.h b/Source/JavaScriptCore/heap/LocalScope.h new file mode 100644 index 0000000..cd27b32 --- /dev/null +++ b/Source/JavaScriptCore/heap/LocalScope.h @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2010 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef LocalScope_h +#define LocalScope_h + +#include "HandleStack.h" +#include "Local.h" + +namespace JSC { +/* + A LocalScope is a temporary scope in which Locals are allocated. When a + LocalScope goes out of scope, all the Locals created in it are destroyed. + + LocalScope is similar in concept to NSAutoreleasePool. +*/ + +class JSGlobalData; + +class LocalScope { +public: + explicit LocalScope(JSGlobalData&); + ~LocalScope(); + + template <typename T> Local<T> release(Local<T>); // Destroys all other locals in the scope. + +private: + HandleStack* m_handleStack; + HandleStack::Frame m_lastFrame; +}; + +inline LocalScope::LocalScope(JSGlobalData& globalData) + : m_handleStack(globalData.heap.handleStack()) +{ + m_handleStack->enterScope(m_lastFrame); +} + +inline LocalScope::~LocalScope() +{ + m_handleStack->leaveScope(m_lastFrame); +} + +template <typename T> Local<T> LocalScope::release(Local<T> local) +{ + typename Local<T>::ExternalType ptr = local.get(); + + m_handleStack->leaveScope(m_lastFrame); + HandleSlot slot = m_handleStack->push(); + m_handleStack->enterScope(m_lastFrame); + + return Local<T>(slot, ptr); +} + +} + +#endif diff --git a/Source/JavaScriptCore/heap/MachineStackMarker.cpp b/Source/JavaScriptCore/heap/MachineStackMarker.cpp new file mode 100644 index 0000000..1f4f3ce --- /dev/null +++ b/Source/JavaScriptCore/heap/MachineStackMarker.cpp @@ -0,0 +1,491 @@ +/* + * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. + * Copyright (C) 2007 Eric Seidel <eric@webkit.org> + * Copyright (C) 2009 Acision BV. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#include "config.h" +#include "MachineStackMarker.h" + +#include "ConservativeRoots.h" +#include "Heap.h" +#include "JSArray.h" +#include "JSGlobalData.h" +#include <setjmp.h> +#include <stdlib.h> +#include <wtf/StdLibExtras.h> + +#if USE(PTHREAD_BASED_QT) && !defined(WTF_USE_PTHREADS) +#define WTF_USE_PTHREADS 1 +#endif + +#if OS(DARWIN) + +#include <mach/mach_init.h> +#include <mach/mach_port.h> +#include <mach/task.h> +#include <mach/thread_act.h> +#include <mach/vm_map.h> + +#elif OS(WINDOWS) + +#include <windows.h> +#include <malloc.h> + +#elif OS(HAIKU) + +#include <OS.h> + +#elif OS(UNIX) + +#include <stdlib.h> +#if !OS(HAIKU) +#include <sys/mman.h> +#endif +#include <unistd.h> + +#if OS(SOLARIS) +#include <thread.h> +#else +#include <pthread.h> +#endif + +#if HAVE(PTHREAD_NP_H) +#include <pthread_np.h> +#endif + +#if OS(QNX) +#include <fcntl.h> +#include <sys/procfs.h> +#include <stdio.h> +#include <errno.h> +#endif + +#if USE(PTHREADS) && !OS(WINDOWS) && !OS(DARWIN) +#include <signal.h> +#ifndef SA_RESTART +#error MachineThreads requires SA_RESTART +#endif +#endif + +#endif + +using namespace WTF; + +namespace JSC { + +static inline void swapIfBackwards(void*& begin, void*& end) +{ +#if OS(WINCE) + if (begin <= end) + return; + std::swap(begin, end); +#else +UNUSED_PARAM(begin); +UNUSED_PARAM(end); +#endif +} + +#if ENABLE(JSC_MULTIPLE_THREADS) + +#if OS(DARWIN) +typedef mach_port_t PlatformThread; +#elif OS(WINDOWS) +typedef HANDLE PlatformThread; +#elif USE(PTHREADS) +typedef pthread_t PlatformThread; +static const int SigThreadSuspendResume = SIGUSR2; + +static void pthreadSignalHandlerSuspendResume(int signo) +{ + sigset_t signalSet; + sigemptyset(&signalSet); + sigaddset(&signalSet, SigThreadSuspendResume); + sigsuspend(&signalSet); +} +#endif + +class MachineThreads::Thread { +public: + Thread(pthread_t pthread, const PlatformThread& platThread, void* base) + : posixThread(pthread) + , platformThread(platThread) + , stackBase(base) + { +#if USE(PTHREADS) && !OS(WINDOWS) && !OS(DARWIN) + struct sigaction action; + action.sa_handler = pthreadSignalHandlerSuspendResume; + sigemptyset(&action.sa_mask); + action.sa_flags = SA_RESTART; + sigaction(SigThreadSuspendResume, &action, 0); + + sigset_t mask; + sigemptyset(&mask); + sigaddset(&mask, SigThreadSuspendResume); + pthread_sigmask(SIG_UNBLOCK, &mask, 0); +#endif + } + + Thread* next; + pthread_t posixThread; + PlatformThread platformThread; + void* stackBase; +}; + +#endif + +MachineThreads::MachineThreads(Heap* heap) + : m_heap(heap) +#if ENABLE(JSC_MULTIPLE_THREADS) + , m_registeredThreads(0) + , m_threadSpecific(0) +#endif +{ +} + +MachineThreads::~MachineThreads() +{ +#if ENABLE(JSC_MULTIPLE_THREADS) + if (m_threadSpecific) { + int error = pthread_key_delete(m_threadSpecific); + ASSERT_UNUSED(error, !error); + } + + MutexLocker registeredThreadsLock(m_registeredThreadsMutex); + for (Thread* t = m_registeredThreads; t;) { + Thread* next = t->next; + delete t; + t = next; + } +#endif +} + +#if ENABLE(JSC_MULTIPLE_THREADS) + +static inline PlatformThread getCurrentPlatformThread() +{ +#if OS(DARWIN) + return pthread_mach_thread_np(pthread_self()); +#elif OS(WINDOWS) + return pthread_getw32threadhandle_np(pthread_self()); +#elif USE(PTHREADS) + return pthread_self(); +#endif +} + +void MachineThreads::makeUsableFromMultipleThreads() +{ + if (m_threadSpecific) + return; + + int error = pthread_key_create(&m_threadSpecific, removeThread); + if (error) + CRASH(); +} + +void MachineThreads::addCurrentThread() +{ + ASSERT(!m_heap->globalData()->exclusiveThread || m_heap->globalData()->exclusiveThread == currentThread()); + + if (!m_threadSpecific || pthread_getspecific(m_threadSpecific)) + return; + + pthread_setspecific(m_threadSpecific, this); + Thread* thread = new Thread(pthread_self(), getCurrentPlatformThread(), m_heap->globalData()->stack().origin()); + + MutexLocker lock(m_registeredThreadsMutex); + + thread->next = m_registeredThreads; + m_registeredThreads = thread; +} + +void MachineThreads::removeThread(void* p) +{ + if (p) + static_cast<MachineThreads*>(p)->removeCurrentThread(); +} + +void MachineThreads::removeCurrentThread() +{ + pthread_t currentPosixThread = pthread_self(); + + MutexLocker lock(m_registeredThreadsMutex); + + if (pthread_equal(currentPosixThread, m_registeredThreads->posixThread)) { + Thread* t = m_registeredThreads; + m_registeredThreads = m_registeredThreads->next; + delete t; + } else { + Thread* last = m_registeredThreads; + Thread* t; + for (t = m_registeredThreads->next; t; t = t->next) { + if (pthread_equal(t->posixThread, currentPosixThread)) { + last->next = t->next; + break; + } + last = t; + } + ASSERT(t); // If t is NULL, we never found ourselves in the list. + delete t; + } +} + +#endif + +#if COMPILER(GCC) +#define REGISTER_BUFFER_ALIGNMENT __attribute__ ((aligned (sizeof(void*)))) +#else +#define REGISTER_BUFFER_ALIGNMENT +#endif + +void MachineThreads::gatherFromCurrentThread(ConservativeRoots& conservativeRoots, void* stackCurrent) +{ + // setjmp forces volatile registers onto the stack + jmp_buf registers REGISTER_BUFFER_ALIGNMENT; +#if COMPILER(MSVC) +#pragma warning(push) +#pragma warning(disable: 4611) +#endif + setjmp(registers); +#if COMPILER(MSVC) +#pragma warning(pop) +#endif + + void* registersBegin = ®isters; + void* registersEnd = reinterpret_cast<void*>(roundUpToMultipleOf<sizeof(void*)>(reinterpret_cast<uintptr_t>(®isters + 1))); + swapIfBackwards(registersBegin, registersEnd); + conservativeRoots.add(registersBegin, registersEnd); + + void* stackBegin = stackCurrent; + void* stackEnd = m_heap->globalData()->stack().origin(); + swapIfBackwards(stackBegin, stackEnd); + conservativeRoots.add(stackBegin, stackEnd); +} + +#if ENABLE(JSC_MULTIPLE_THREADS) + +static inline void suspendThread(const PlatformThread& platformThread) +{ +#if OS(DARWIN) + thread_suspend(platformThread); +#elif OS(WINDOWS) + SuspendThread(platformThread); +#elif USE(PTHREADS) + pthread_kill(platformThread, SigThreadSuspendResume); +#else +#error Need a way to suspend threads on this platform +#endif +} + +static inline void resumeThread(const PlatformThread& platformThread) +{ +#if OS(DARWIN) + thread_resume(platformThread); +#elif OS(WINDOWS) + ResumeThread(platformThread); +#elif USE(PTHREADS) + pthread_kill(platformThread, SigThreadSuspendResume); +#else +#error Need a way to resume threads on this platform +#endif +} + +typedef unsigned long usword_t; // word size, assumed to be either 32 or 64 bit + +#if OS(DARWIN) + +#if CPU(X86) +typedef i386_thread_state_t PlatformThreadRegisters; +#elif CPU(X86_64) +typedef x86_thread_state64_t PlatformThreadRegisters; +#elif CPU(PPC) +typedef ppc_thread_state_t PlatformThreadRegisters; +#elif CPU(PPC64) +typedef ppc_thread_state64_t PlatformThreadRegisters; +#elif CPU(ARM) +typedef arm_thread_state_t PlatformThreadRegisters; +#else +#error Unknown Architecture +#endif + +#elif OS(WINDOWS) && CPU(X86) +typedef CONTEXT PlatformThreadRegisters; +#elif USE(PTHREADS) +typedef pthread_attr_t PlatformThreadRegisters; +#else +#error Need a thread register struct for this platform +#endif + +static size_t getPlatformThreadRegisters(const PlatformThread& platformThread, PlatformThreadRegisters& regs) +{ +#if OS(DARWIN) + +#if CPU(X86) + unsigned user_count = sizeof(regs)/sizeof(int); + thread_state_flavor_t flavor = i386_THREAD_STATE; +#elif CPU(X86_64) + unsigned user_count = x86_THREAD_STATE64_COUNT; + thread_state_flavor_t flavor = x86_THREAD_STATE64; +#elif CPU(PPC) + unsigned user_count = PPC_THREAD_STATE_COUNT; + thread_state_flavor_t flavor = PPC_THREAD_STATE; +#elif CPU(PPC64) + unsigned user_count = PPC_THREAD_STATE64_COUNT; + thread_state_flavor_t flavor = PPC_THREAD_STATE64; +#elif CPU(ARM) + unsigned user_count = ARM_THREAD_STATE_COUNT; + thread_state_flavor_t flavor = ARM_THREAD_STATE; +#else +#error Unknown Architecture +#endif + + kern_return_t result = thread_get_state(platformThread, flavor, (thread_state_t)®s, &user_count); + if (result != KERN_SUCCESS) { + WTFReportFatalError(__FILE__, __LINE__, WTF_PRETTY_FUNCTION, + "JavaScript garbage collection failed because thread_get_state returned an error (%d). This is probably the result of running inside Rosetta, which is not supported.", result); + CRASH(); + } + return user_count * sizeof(usword_t); +// end OS(DARWIN) + +#elif OS(WINDOWS) && CPU(X86) + regs.ContextFlags = CONTEXT_INTEGER | CONTEXT_CONTROL | CONTEXT_SEGMENTS; + GetThreadContext(platformThread, ®s); + return sizeof(CONTEXT); +#elif USE(PTHREADS) + pthread_attr_init(®s); +#if HAVE(PTHREAD_NP_H) || OS(NETBSD) + // e.g. on FreeBSD 5.4, neundorf@kde.org + pthread_attr_get_np(platformThread, ®s); +#else + // FIXME: this function is non-portable; other POSIX systems may have different np alternatives + pthread_getattr_np(platformThread, ®s); +#endif + return 0; +#else +#error Need a way to get thread registers on this platform +#endif +} + +static inline void* otherThreadStackPointer(const PlatformThreadRegisters& regs) +{ +#if OS(DARWIN) + +#if __DARWIN_UNIX03 + +#if CPU(X86) + return reinterpret_cast<void*>(regs.__esp); +#elif CPU(X86_64) + return reinterpret_cast<void*>(regs.__rsp); +#elif CPU(PPC) || CPU(PPC64) + return reinterpret_cast<void*>(regs.__r1); +#elif CPU(ARM) + return reinterpret_cast<void*>(regs.__sp); +#else +#error Unknown Architecture +#endif + +#else // !__DARWIN_UNIX03 + +#if CPU(X86) + return reinterpret_cast<void*>(regs.esp); +#elif CPU(X86_64) + return reinterpret_cast<void*>(regs.rsp); +#elif CPU(PPC) || CPU(PPC64) + return reinterpret_cast<void*>(regs.r1); +#else +#error Unknown Architecture +#endif + +#endif // __DARWIN_UNIX03 + +// end OS(DARWIN) +#elif CPU(X86) && OS(WINDOWS) + return reinterpret_cast<void*>((uintptr_t) regs.Esp); +#elif USE(PTHREADS) + void* stackBase = 0; + size_t stackSize = 0; + int rc = pthread_attr_getstack(®s, &stackBase, &stackSize); + (void)rc; // FIXME: Deal with error code somehow? Seems fatal. + ASSERT(stackBase); + return static_cast<char*>(stackBase) + stackSize; +#else +#error Need a way to get the stack pointer for another thread on this platform +#endif +} + +static void freePlatformThreadRegisters(PlatformThreadRegisters& regs) +{ +#if USE(PTHREADS) && !OS(WINDOWS) && !OS(DARWIN) + pthread_attr_destroy(®s); +#else + UNUSED_PARAM(regs); +#endif +} + +void MachineThreads::gatherFromOtherThread(ConservativeRoots& conservativeRoots, Thread* thread) +{ + suspendThread(thread->platformThread); + + PlatformThreadRegisters regs; + size_t regSize = getPlatformThreadRegisters(thread->platformThread, regs); + + conservativeRoots.add(static_cast<void*>(®s), static_cast<void*>(reinterpret_cast<char*>(®s) + regSize)); + + void* stackPointer = otherThreadStackPointer(regs); + void* stackBase = thread->stackBase; + swapIfBackwards(stackPointer, stackBase); + conservativeRoots.add(stackPointer, stackBase); + + resumeThread(thread->platformThread); + + freePlatformThreadRegisters(regs); +} + +#endif + +void MachineThreads::gatherConservativeRoots(ConservativeRoots& conservativeRoots, void* stackCurrent) +{ + gatherFromCurrentThread(conservativeRoots, stackCurrent); + +#if ENABLE(JSC_MULTIPLE_THREADS) + + if (m_threadSpecific) { + + MutexLocker lock(m_registeredThreadsMutex); + +#ifndef NDEBUG + // Forbid malloc during the gather phase. The gather phase suspends + // threads, so a malloc during gather would risk a deadlock with a + // thread that had been suspended while holding the malloc lock. + fastMallocForbid(); +#endif + // It is safe to access the registeredThreads list, because we earlier asserted that locks are being held, + // and since this is a shared heap, they are real locks. + for (Thread* thread = m_registeredThreads; thread; thread = thread->next) { + if (!pthread_equal(thread->posixThread, pthread_self())) + gatherFromOtherThread(conservativeRoots, thread); + } +#ifndef NDEBUG + fastMallocAllow(); +#endif + } +#endif +} + +} // namespace JSC diff --git a/Source/JavaScriptCore/heap/MachineStackMarker.h b/Source/JavaScriptCore/heap/MachineStackMarker.h new file mode 100644 index 0000000..c814ac5 --- /dev/null +++ b/Source/JavaScriptCore/heap/MachineStackMarker.h @@ -0,0 +1,73 @@ +/* + * Copyright (C) 1999-2000 Harri Porten (porten@kde.org) + * Copyright (C) 2001 Peter Kelly (pmk@post.com) + * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#ifndef MachineThreads_h +#define MachineThreads_h + +#include <wtf/Noncopyable.h> +#include <wtf/ThreadingPrimitives.h> + +#if ENABLE(JSC_MULTIPLE_THREADS) +#include <pthread.h> +#endif + +namespace JSC { + + class Heap; + class ConservativeRoots; + + class MachineThreads { + WTF_MAKE_NONCOPYABLE(MachineThreads); + public: + MachineThreads(Heap*); + ~MachineThreads(); + + void gatherConservativeRoots(ConservativeRoots&, void* stackCurrent); + +#if ENABLE(JSC_MULTIPLE_THREADS) + void makeUsableFromMultipleThreads(); + void addCurrentThread(); // Only needs to be called by clients that can use the same heap from multiple threads. +#endif + + private: + void gatherFromCurrentThread(ConservativeRoots&, void* stackCurrent); + +#if ENABLE(JSC_MULTIPLE_THREADS) + class Thread; + + static void removeThread(void*); + void removeCurrentThread(); + + void gatherFromOtherThread(ConservativeRoots&, Thread*); +#endif + + Heap* m_heap; + +#if ENABLE(JSC_MULTIPLE_THREADS) + Mutex m_registeredThreadsMutex; + Thread* m_registeredThreads; + pthread_key_t m_threadSpecific; +#endif + }; + +} // namespace JSC + +#endif // MachineThreads_h diff --git a/Source/JavaScriptCore/heap/MarkStack.cpp b/Source/JavaScriptCore/heap/MarkStack.cpp new file mode 100644 index 0000000..3fb1a98 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkStack.cpp @@ -0,0 +1,135 @@ +/* + * Copyright (C) 2009, 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY + * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "MarkStack.h" + +#include "ConservativeRoots.h" +#include "Heap.h" +#include "JSArray.h" +#include "JSCell.h" +#include "JSObject.h" +#include "ScopeChain.h" +#include "Structure.h" + +namespace JSC { + +size_t MarkStack::s_pageSize = 0; + +void MarkStack::reset() +{ + ASSERT(s_pageSize); + m_values.shrinkAllocation(s_pageSize); + m_markSets.shrinkAllocation(s_pageSize); + m_opaqueRoots.clear(); +} + +void MarkStack::append(ConservativeRoots& conservativeRoots) +{ + JSCell** roots = conservativeRoots.roots(); + size_t size = conservativeRoots.size(); + for (size_t i = 0; i < size; ++i) + internalAppend(roots[i]); +} + +inline void MarkStack::markChildren(JSCell* cell) +{ + ASSERT(Heap::isMarked(cell)); + if (cell->structure()->typeInfo().type() < CompoundType) { + cell->JSCell::markChildren(*this); + return; + } + + if (!cell->structure()->typeInfo().overridesMarkChildren()) { + ASSERT(cell->isObject()); +#ifdef NDEBUG + asObject(cell)->markChildrenDirect(*this); +#else + ASSERT(!m_isCheckingForDefaultMarkViolation); + m_isCheckingForDefaultMarkViolation = true; + cell->markChildren(*this); + ASSERT(m_isCheckingForDefaultMarkViolation); + m_isCheckingForDefaultMarkViolation = false; +#endif + return; + } + if (cell->vptr() == m_jsArrayVPtr) { + asArray(cell)->markChildrenDirect(*this); + return; + } + cell->markChildren(*this); +} + +void MarkStack::drain() +{ +#if !ASSERT_DISABLED + ASSERT(!m_isDraining); + m_isDraining = true; +#endif + while (!m_markSets.isEmpty() || !m_values.isEmpty()) { + while (!m_markSets.isEmpty() && m_values.size() < 50) { + ASSERT(!m_markSets.isEmpty()); + MarkSet& current = m_markSets.last(); + ASSERT(current.m_values); + JSValue* end = current.m_end; + ASSERT(current.m_values); + ASSERT(current.m_values != end); + findNextUnmarkedNullValue: + ASSERT(current.m_values != end); + JSValue value = *current.m_values; + current.m_values++; + + JSCell* cell; + if (!value || !value.isCell() || Heap::testAndSetMarked(cell = value.asCell())) { + if (current.m_values == end) { + m_markSets.removeLast(); + continue; + } + goto findNextUnmarkedNullValue; + } + + if (cell->structure()->typeInfo().type() < CompoundType) { + cell->JSCell::markChildren(*this); + if (current.m_values == end) { + m_markSets.removeLast(); + continue; + } + goto findNextUnmarkedNullValue; + } + + if (current.m_values == end) + m_markSets.removeLast(); + + markChildren(cell); + } + while (!m_values.isEmpty()) + markChildren(m_values.removeLast()); + } +#if !ASSERT_DISABLED + m_isDraining = false; +#endif +} + +} // namespace JSC diff --git a/Source/JavaScriptCore/heap/MarkStack.h b/Source/JavaScriptCore/heap/MarkStack.h new file mode 100644 index 0000000..aba7647 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkStack.h @@ -0,0 +1,295 @@ +/* + * Copyright (C) 2009, 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY + * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef MarkStack_h +#define MarkStack_h + +#include "JSValue.h" +#include "Register.h" +#include "WriteBarrier.h" +#include <wtf/HashSet.h> +#include <wtf/Vector.h> +#include <wtf/Noncopyable.h> +#include <wtf/OSAllocator.h> + +namespace JSC { + + class ConservativeRoots; + class JSGlobalData; + class Register; + + enum MarkSetProperties { MayContainNullValues, NoNullValues }; + + class MarkStack { + WTF_MAKE_NONCOPYABLE(MarkStack); + public: + MarkStack(void* jsArrayVPtr) + : m_jsArrayVPtr(jsArrayVPtr) +#if !ASSERT_DISABLED + , m_isCheckingForDefaultMarkViolation(false) + , m_isDraining(false) +#endif + { + } + + ~MarkStack() + { + ASSERT(m_markSets.isEmpty()); + ASSERT(m_values.isEmpty()); + } + + void deprecatedAppend(JSCell**); + template <typename T> void append(WriteBarrierBase<T>*); + + void appendValues(WriteBarrierBase<Unknown>* barriers, size_t count, MarkSetProperties properties = NoNullValues) + { + JSValue* values = barriers->slot(); + if (count) + m_markSets.append(MarkSet(values, values + count, properties)); + } + + void append(ConservativeRoots&); + + bool addOpaqueRoot(void* root) { return m_opaqueRoots.add(root).second; } + bool containsOpaqueRoot(void* root) { return m_opaqueRoots.contains(root); } + int opaqueRootCount() { return m_opaqueRoots.size(); } + + void drain(); + void reset(); + + private: + friend class HeapRootMarker; // Allowed to mark a JSValue* or JSCell** directly. + void append(JSValue*); + void append(JSValue*, size_t count); + void append(JSCell**); + + void internalAppend(JSCell*); + void internalAppend(JSValue); + void markChildren(JSCell*); + + struct MarkSet { + MarkSet(JSValue* values, JSValue* end, MarkSetProperties properties) + : m_values(values) + , m_end(end) + , m_properties(properties) + { + ASSERT(values); + } + JSValue* m_values; + JSValue* m_end; + MarkSetProperties m_properties; + }; + + static void* allocateStack(size_t size) { return OSAllocator::reserveAndCommit(size); } + static void releaseStack(void* addr, size_t size) { OSAllocator::decommitAndRelease(addr, size); } + + static void initializePagesize(); + static size_t pageSize() + { + if (!s_pageSize) + initializePagesize(); + return s_pageSize; + } + + template <typename T> struct MarkStackArray { + MarkStackArray() + : m_top(0) + , m_allocated(MarkStack::pageSize()) + , m_capacity(m_allocated / sizeof(T)) + { + m_data = reinterpret_cast<T*>(allocateStack(m_allocated)); + } + + ~MarkStackArray() + { + releaseStack(m_data, m_allocated); + } + + void expand() + { + size_t oldAllocation = m_allocated; + m_allocated *= 2; + m_capacity = m_allocated / sizeof(T); + void* newData = allocateStack(m_allocated); + memcpy(newData, m_data, oldAllocation); + releaseStack(m_data, oldAllocation); + m_data = reinterpret_cast<T*>(newData); + } + + inline void append(const T& v) + { + if (m_top == m_capacity) + expand(); + m_data[m_top++] = v; + } + + inline T removeLast() + { + ASSERT(m_top); + return m_data[--m_top]; + } + + inline T& last() + { + ASSERT(m_top); + return m_data[m_top - 1]; + } + + inline bool isEmpty() + { + return m_top == 0; + } + + inline size_t size() { return m_top; } + + inline void shrinkAllocation(size_t size) + { + ASSERT(size <= m_allocated); + ASSERT(0 == (size % MarkStack::pageSize())); + if (size == m_allocated) + return; +#if OS(WINDOWS) || OS(SYMBIAN) || PLATFORM(BREWMP) + // We cannot release a part of a region with VirtualFree. To get around this, + // we'll release the entire region and reallocate the size that we want. + releaseStack(m_data, m_allocated); + m_data = reinterpret_cast<T*>(allocateStack(size)); +#else + releaseStack(reinterpret_cast<char*>(m_data) + size, m_allocated - size); +#endif + m_allocated = size; + m_capacity = m_allocated / sizeof(T); + } + + private: + size_t m_top; + size_t m_allocated; + size_t m_capacity; + T* m_data; + }; + + void* m_jsArrayVPtr; + MarkStackArray<MarkSet> m_markSets; + MarkStackArray<JSCell*> m_values; + static size_t s_pageSize; + HashSet<void*> m_opaqueRoots; // Handle-owning data structures not visible to the garbage collector. + +#if !ASSERT_DISABLED + public: + bool m_isCheckingForDefaultMarkViolation; + bool m_isDraining; +#endif + }; + + inline void MarkStack::append(JSValue* slot, size_t count) + { + if (!count) + return; + m_markSets.append(MarkSet(slot, slot + count, NoNullValues)); + } + + template <typename T> inline void MarkStack::append(WriteBarrierBase<T>* slot) + { + internalAppend(*slot->slot()); + } + + ALWAYS_INLINE void MarkStack::deprecatedAppend(JSCell** value) + { + ASSERT(value); + internalAppend(*value); + } + + ALWAYS_INLINE void MarkStack::append(JSValue* value) + { + ASSERT(value); + internalAppend(*value); + } + + ALWAYS_INLINE void MarkStack::append(JSCell** value) + { + ASSERT(value); + internalAppend(*value); + } + + ALWAYS_INLINE void MarkStack::internalAppend(JSValue value) + { + ASSERT(value); + if (value.isCell()) + internalAppend(value.asCell()); + } + + // Privileged class for marking JSValues directly. It is only safe to use + // this class to mark direct heap roots that are marked during every GC pass. + // All other references should be wrapped in WriteBarriers and marked through + // the MarkStack. + class HeapRootMarker { + private: + friend class Heap; + HeapRootMarker(MarkStack&); + + public: + void mark(JSValue*); + void mark(JSValue*, size_t); + void mark(JSString**); + void mark(JSCell**); + + MarkStack& markStack(); + + private: + MarkStack& m_markStack; + }; + + inline HeapRootMarker::HeapRootMarker(MarkStack& markStack) + : m_markStack(markStack) + { + } + + inline void HeapRootMarker::mark(JSValue* slot) + { + m_markStack.append(slot); + } + + inline void HeapRootMarker::mark(JSValue* slot, size_t count) + { + m_markStack.append(slot, count); + } + + inline void HeapRootMarker::mark(JSString** slot) + { + m_markStack.append(reinterpret_cast<JSCell**>(slot)); + } + + inline void HeapRootMarker::mark(JSCell** slot) + { + m_markStack.append(slot); + } + + inline MarkStack& HeapRootMarker::markStack() + { + return m_markStack; + } + +} // namespace JSC + +#endif diff --git a/Source/JavaScriptCore/heap/MarkStackPosix.cpp b/Source/JavaScriptCore/heap/MarkStackPosix.cpp new file mode 100644 index 0000000..2a5b298 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkStackPosix.cpp @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2009 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY + * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "MarkStack.h" + +#if OS(UNIX) && !OS(SYMBIAN) + +#include <unistd.h> +#include <sys/mman.h> + +namespace JSC { + +void MarkStack::initializePagesize() +{ + MarkStack::s_pageSize = getpagesize(); +} + +} + +#endif diff --git a/Source/JavaScriptCore/heap/MarkStackSymbian.cpp b/Source/JavaScriptCore/heap/MarkStackSymbian.cpp new file mode 100644 index 0000000..a3893d7 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkStackSymbian.cpp @@ -0,0 +1,38 @@ +/* + Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Library General Public + License as published by the Free Software Foundation; either + version 2 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Library General Public License for more details. + + You should have received a copy of the GNU Library General Public License + along with this library; see the file COPYING.LIB. If not, write to + the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1301, USA. +*/ + +#include "config.h" +#include "MarkStack.h" + +#if OS(SYMBIAN) + +#include <e32hal.h> + +namespace JSC { + +void MarkStack::initializePagesize() +{ + TInt page_size; + UserHal::PageSizeInBytes(page_size); + MarkStack::s_pageSize = page_size; +} + +} + +#endif diff --git a/Source/JavaScriptCore/heap/MarkStackWin.cpp b/Source/JavaScriptCore/heap/MarkStackWin.cpp new file mode 100644 index 0000000..2d2a1b3 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkStackWin.cpp @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2009 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY + * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "MarkStack.h" + +#if OS(WINDOWS) + +#include "windows.h" + +namespace JSC { + +void MarkStack::initializePagesize() +{ + SYSTEM_INFO system_info; + GetSystemInfo(&system_info); + MarkStack::s_pageSize = system_info.dwPageSize; +} + +} + +#endif diff --git a/Source/JavaScriptCore/heap/MarkedBlock.cpp b/Source/JavaScriptCore/heap/MarkedBlock.cpp new file mode 100644 index 0000000..a10a778 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkedBlock.cpp @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include "config.h" +#include "MarkedBlock.h" + +#include "JSCell.h" +#include "JSObject.h" +#include "JSZombie.h" +#include "ScopeChain.h" + +namespace JSC { + +MarkedBlock* MarkedBlock::create(JSGlobalData* globalData, size_t cellSize) +{ + PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, blockSize, OSAllocator::JSGCHeapPages); + if (!static_cast<bool>(allocation)) + CRASH(); + return new (allocation.base()) MarkedBlock(allocation, globalData, cellSize); +} + +void MarkedBlock::destroy(MarkedBlock* block) +{ + for (size_t i = block->firstAtom(); i < block->m_endAtom; i += block->m_atomsPerCell) + reinterpret_cast<JSCell*>(&block->atoms()[i])->~JSCell(); + block->m_allocation.deallocate(); +} + +MarkedBlock::MarkedBlock(const PageAllocationAligned& allocation, JSGlobalData* globalData, size_t cellSize) + : m_nextAtom(firstAtom()) + , m_allocation(allocation) + , m_heap(&globalData->heap) + , m_prev(0) + , m_next(0) +{ + m_atomsPerCell = (cellSize + atomSize - 1) / atomSize; + m_endAtom = atomsPerBlock - m_atomsPerCell + 1; + + Structure* dummyMarkableCellStructure = globalData->dummyMarkableCellStructure.get(); + for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) + new (&atoms()[i]) JSCell(*globalData, dummyMarkableCellStructure); +} + +void MarkedBlock::sweep() +{ + Structure* dummyMarkableCellStructure = m_heap->globalData()->dummyMarkableCellStructure.get(); + + for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) { + if (m_marks.get(i)) + continue; + + JSCell* cell = reinterpret_cast<JSCell*>(&atoms()[i]); +#if ENABLE(JSC_ZOMBIES) + if (cell->structure() && cell->structure() != dummyMarkableCellStructure && !cell->isZombie()) { + const ClassInfo* info = cell->classInfo(); + cell->~JSCell(); + new (cell) JSZombie(*m_heap->globalData(), info, m_heap->globalData()->zombieStructure.get()); + m_marks.set(i); + } +#else + cell->~JSCell(); + new (cell) JSCell(*m_heap->globalData(), dummyMarkableCellStructure); +#endif + } +} + +} // namespace JSC diff --git a/Source/JavaScriptCore/heap/MarkedBlock.h b/Source/JavaScriptCore/heap/MarkedBlock.h new file mode 100644 index 0000000..0e2b59c --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkedBlock.h @@ -0,0 +1,224 @@ +/* + * Copyright (C) 1999-2000 Harri Porten (porten@kde.org) + * Copyright (C) 2001 Peter Kelly (pmk@post.com) + * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#ifndef MarkedBlock_h +#define MarkedBlock_h + +#include <wtf/Bitmap.h> +#include <wtf/PageAllocationAligned.h> +#include <wtf/StdLibExtras.h> + +namespace JSC { + + class Heap; + class JSCell; + class JSGlobalData; + + typedef uintptr_t Bits; + + static const size_t KB = 1024; + + class MarkedBlock { + public: + static const size_t atomSize = sizeof(double); // Ensures natural alignment for all built-in types. + + static MarkedBlock* create(JSGlobalData*, size_t cellSize); + static void destroy(MarkedBlock*); + + static bool isAtomAligned(const void*); + static MarkedBlock* blockFor(const void*); + static size_t firstAtom(); + + Heap* heap() const; + + void setPrev(MarkedBlock*); + void setNext(MarkedBlock*); + MarkedBlock* prev() const; + MarkedBlock* next() const; + + void* allocate(); + void reset(); + void sweep(); + + bool isEmpty(); + + void clearMarks(); + size_t markCount(); + + size_t cellSize(); + + size_t size(); + size_t capacity(); + + bool contains(const void*); + size_t atomNumber(const void*); + bool isMarked(const void*); + bool testAndSetMarked(const void*); + void setMarked(const void*); + + template <typename Functor> void forEach(Functor&); + + private: + static const size_t blockSize = 16 * KB; + static const size_t blockMask = ~(blockSize - 1); // blockSize must be a power of two. + + static const size_t atomMask = ~(atomSize - 1); // atomSize must be a power of two. + + static const size_t atomsPerBlock = blockSize / atomSize; + + typedef char Atom[atomSize]; + + MarkedBlock(const PageAllocationAligned&, JSGlobalData*, size_t cellSize); + Atom* atoms(); + + size_t m_nextAtom; + size_t m_endAtom; // This is a fuzzy end. Always test for < m_endAtom. + size_t m_atomsPerCell; + WTF::Bitmap<blockSize / atomSize> m_marks; + PageAllocationAligned m_allocation; + Heap* m_heap; + MarkedBlock* m_prev; + MarkedBlock* m_next; + }; + + inline size_t MarkedBlock::firstAtom() + { + return WTF::roundUpToMultipleOf<atomSize>(sizeof(MarkedBlock)) / atomSize; + } + + inline MarkedBlock::Atom* MarkedBlock::atoms() + { + return reinterpret_cast<Atom*>(this); + } + + inline bool MarkedBlock::isAtomAligned(const void* p) + { + return !((intptr_t)(p) & ~atomMask); + } + + inline MarkedBlock* MarkedBlock::blockFor(const void* p) + { + return reinterpret_cast<MarkedBlock*>(reinterpret_cast<uintptr_t>(p) & blockMask); + } + + inline Heap* MarkedBlock::heap() const + { + return m_heap; + } + + inline void MarkedBlock::setPrev(MarkedBlock* prev) + { + m_prev = prev; + } + + inline void MarkedBlock::setNext(MarkedBlock* next) + { + m_next = next; + } + + inline MarkedBlock* MarkedBlock::prev() const + { + return m_prev; + } + + inline MarkedBlock* MarkedBlock::next() const + { + return m_next; + } + + inline void MarkedBlock::reset() + { + m_nextAtom = firstAtom(); + } + + inline bool MarkedBlock::isEmpty() + { + return m_marks.isEmpty(); + } + + inline void MarkedBlock::clearMarks() + { + m_marks.clearAll(); + } + + inline size_t MarkedBlock::markCount() + { + return m_marks.count(); + } + + inline size_t MarkedBlock::cellSize() + { + return m_atomsPerCell * atomSize; + } + + inline size_t MarkedBlock::size() + { + return markCount() * cellSize(); + } + + inline size_t MarkedBlock::capacity() + { + return m_allocation.size(); + } + + inline bool MarkedBlock::contains(const void* p) + { + // Since we mark the first atom of every cell when allocating and/or + // marking, any pointer to a marked atom points to the head of a valid, + // live cell. Checking the mark bit guards against reviving an object + // in a zombie state. + + ASSERT(p && isAtomAligned(p)); + return isMarked(p); + } + + inline size_t MarkedBlock::atomNumber(const void* p) + { + return (reinterpret_cast<uintptr_t>(p) - reinterpret_cast<uintptr_t>(this)) / atomSize; + } + + inline bool MarkedBlock::isMarked(const void* p) + { + return m_marks.get(atomNumber(p)); + } + + inline bool MarkedBlock::testAndSetMarked(const void* p) + { + return m_marks.testAndSet(atomNumber(p)); + } + + inline void MarkedBlock::setMarked(const void* p) + { + m_marks.set(atomNumber(p)); + } + + template <typename Functor> inline void MarkedBlock::forEach(Functor& functor) + { + for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) { + if (!m_marks.get(i)) + continue; + functor(reinterpret_cast<JSCell*>(&atoms()[i])); + } + } + +} // namespace JSC + +#endif // MarkedSpace_h diff --git a/Source/JavaScriptCore/heap/MarkedSpace.cpp b/Source/JavaScriptCore/heap/MarkedSpace.cpp new file mode 100644 index 0000000..d9a1e42 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkedSpace.cpp @@ -0,0 +1,166 @@ +/* + * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. + * Copyright (C) 2007 Eric Seidel <eric@webkit.org> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#include "config.h" +#include "MarkedSpace.h" + +#include "JSCell.h" +#include "JSGlobalData.h" +#include "JSLock.h" +#include "JSObject.h" +#include "ScopeChain.h" + +namespace JSC { + +class Structure; + +MarkedSpace::MarkedSpace(JSGlobalData* globalData) + : m_waterMark(0) + , m_highWaterMark(0) + , m_globalData(globalData) +{ + for (size_t cellSize = preciseStep; cellSize < preciseCutoff; cellSize += preciseStep) + sizeClassFor(cellSize).cellSize = cellSize; + + for (size_t cellSize = impreciseStep; cellSize < impreciseCutoff; cellSize += impreciseStep) + sizeClassFor(cellSize).cellSize = cellSize; +} + +void MarkedSpace::destroy() +{ + clearMarks(); + shrink(); + ASSERT(!size()); +} + +MarkedBlock* MarkedSpace::allocateBlock(SizeClass& sizeClass) +{ + MarkedBlock* block = MarkedBlock::create(globalData(), sizeClass.cellSize); + sizeClass.blockList.append(block); + sizeClass.nextBlock = block; + m_blocks.add(block); + + return block; +} + +void MarkedSpace::freeBlocks(DoublyLinkedList<MarkedBlock>& blocks) +{ + MarkedBlock* next; + for (MarkedBlock* block = blocks.head(); block; block = next) { + next = block->next(); + + blocks.remove(block); + m_blocks.remove(block); + MarkedBlock::destroy(block); + } +} + +void* MarkedSpace::allocateFromSizeClass(SizeClass& sizeClass) +{ + for (MarkedBlock*& block = sizeClass.nextBlock ; block; block = block->next()) { + if (void* result = block->allocate()) + return result; + + m_waterMark += block->capacity(); + } + + if (m_waterMark < m_highWaterMark) + return allocateBlock(sizeClass)->allocate(); + + return 0; +} + +void MarkedSpace::shrink() +{ + // We record a temporary list of empties to avoid modifying m_blocks while iterating it. + DoublyLinkedList<MarkedBlock> empties; + + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) { + MarkedBlock* block = *it; + if (block->isEmpty()) { + SizeClass& sizeClass = sizeClassFor(block->cellSize()); + sizeClass.blockList.remove(block); + sizeClass.nextBlock = sizeClass.blockList.head(); + empties.append(block); + } + } + + freeBlocks(empties); + ASSERT(empties.isEmpty()); +} + +void MarkedSpace::clearMarks() +{ + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) + (*it)->clearMarks(); +} + +void MarkedSpace::sweep() +{ + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) + (*it)->sweep(); +} + +size_t MarkedSpace::objectCount() const +{ + size_t result = 0; + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) + result += (*it)->markCount(); + return result; +} + +size_t MarkedSpace::size() const +{ + size_t result = 0; + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) + result += (*it)->size(); + return result; +} + +size_t MarkedSpace::capacity() const +{ + size_t result = 0; + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) + result += (*it)->capacity(); + return result; +} + +void MarkedSpace::reset() +{ + m_waterMark = 0; + + for (size_t cellSize = preciseStep; cellSize < preciseCutoff; cellSize += preciseStep) + sizeClassFor(cellSize).reset(); + + for (size_t cellSize = impreciseStep; cellSize < impreciseCutoff; cellSize += impreciseStep) + sizeClassFor(cellSize).reset(); + + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) + (*it)->reset(); +} + +} // namespace JSC diff --git a/Source/JavaScriptCore/heap/MarkedSpace.h b/Source/JavaScriptCore/heap/MarkedSpace.h new file mode 100644 index 0000000..29a8cd0 --- /dev/null +++ b/Source/JavaScriptCore/heap/MarkedSpace.h @@ -0,0 +1,172 @@ +/* + * Copyright (C) 1999-2000 Harri Porten (porten@kde.org) + * Copyright (C) 2001 Peter Kelly (pmk@post.com) + * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + */ + +#ifndef MarkedSpace_h +#define MarkedSpace_h + +#include "MachineStackMarker.h" +#include "MarkedBlock.h" +#include "PageAllocationAligned.h" +#include <wtf/Bitmap.h> +#include <wtf/DoublyLinkedList.h> +#include <wtf/FixedArray.h> +#include <wtf/HashSet.h> +#include <wtf/Noncopyable.h> +#include <wtf/Vector.h> + +#define ASSERT_CLASS_FITS_IN_CELL(class) COMPILE_ASSERT(sizeof(class) < MarkedSpace::maxCellSize, class_fits_in_cell) + +namespace JSC { + + class Heap; + class JSCell; + class JSGlobalData; + class LiveObjectIterator; + class MarkStack; + class WeakGCHandle; + + class MarkedSpace { + WTF_MAKE_NONCOPYABLE(MarkedSpace); + public: + // Currently public for use in assertions. + static const size_t maxCellSize = 1024; + + static Heap* heap(JSCell*); + + static bool isMarked(const JSCell*); + static bool testAndSetMarked(const JSCell*); + static void setMarked(const JSCell*); + + MarkedSpace(JSGlobalData*); + void destroy(); + + JSGlobalData* globalData() { return m_globalData; } + + size_t highWaterMark() { return m_highWaterMark; } + void setHighWaterMark(size_t highWaterMark) { m_highWaterMark = highWaterMark; } + + void* allocate(size_t); + + void clearMarks(); + void markRoots(); + void reset(); + void sweep(); + void shrink(); + + size_t size() const; + size_t capacity() const; + size_t objectCount() const; + + bool contains(const void*); + + template<typename Functor> void forEach(Functor&); + + private: + // [ 8, 16... 128 ) + static const size_t preciseStep = MarkedBlock::atomSize; + static const size_t preciseCutoff = 128; + static const size_t preciseCount = preciseCutoff / preciseStep - 1; + + // [ 128, 256... 1024 ) + static const size_t impreciseStep = preciseCutoff; + static const size_t impreciseCutoff = maxCellSize; + static const size_t impreciseCount = impreciseCutoff / impreciseStep - 1; + + typedef HashSet<MarkedBlock*>::iterator BlockIterator; + + struct SizeClass { + SizeClass(); + void reset(); + + MarkedBlock* nextBlock; + DoublyLinkedList<MarkedBlock> blockList; + size_t cellSize; + }; + + MarkedBlock* allocateBlock(SizeClass&); + void freeBlocks(DoublyLinkedList<MarkedBlock>&); + + SizeClass& sizeClassFor(size_t); + void* allocateFromSizeClass(SizeClass&); + + void clearMarks(MarkedBlock*); + + SizeClass m_preciseSizeClasses[preciseCount]; + SizeClass m_impreciseSizeClasses[impreciseCount]; + HashSet<MarkedBlock*> m_blocks; + size_t m_waterMark; + size_t m_highWaterMark; + JSGlobalData* m_globalData; + }; + + inline Heap* MarkedSpace::heap(JSCell* cell) + { + return MarkedBlock::blockFor(cell)->heap(); + } + + inline bool MarkedSpace::isMarked(const JSCell* cell) + { + return MarkedBlock::blockFor(cell)->isMarked(cell); + } + + inline bool MarkedSpace::testAndSetMarked(const JSCell* cell) + { + return MarkedBlock::blockFor(cell)->testAndSetMarked(cell); + } + + inline void MarkedSpace::setMarked(const JSCell* cell) + { + MarkedBlock::blockFor(cell)->setMarked(cell); + } + + inline bool MarkedSpace::contains(const void* x) + { + if (!MarkedBlock::isAtomAligned(x)) + return false; + + MarkedBlock* block = MarkedBlock::blockFor(x); + if (!block || !m_blocks.contains(block)) + return false; + + return block->contains(x); + } + + template <typename Functor> inline void MarkedSpace::forEach(Functor& functor) + { + BlockIterator end = m_blocks.end(); + for (BlockIterator it = m_blocks.begin(); it != end; ++it) + (*it)->forEach(functor); + } + + inline MarkedSpace::SizeClass::SizeClass() + : nextBlock(0) + , cellSize(0) + { + } + + inline void MarkedSpace::SizeClass::reset() + { + nextBlock = blockList.head(); + } + +} // namespace JSC + +#endif // MarkedSpace_h diff --git a/Source/JavaScriptCore/heap/Strong.h b/Source/JavaScriptCore/heap/Strong.h new file mode 100644 index 0000000..9f2aa05 --- /dev/null +++ b/Source/JavaScriptCore/heap/Strong.h @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2011 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef Strong_h +#define Strong_h + +#include "Assertions.h" +#include "Handle.h" +#include "HandleHeap.h" + +namespace JSC { + +class JSGlobalData; +HandleSlot allocateGlobalHandle(JSGlobalData&); + +// A strongly referenced handle that prevents the object it points to from being garbage collected. +template <typename T> class Strong : public Handle<T> { + using Handle<T>::slot; + using Handle<T>::setSlot; + +public: + typedef typename Handle<T>::ExternalType ExternalType; + + Strong() + : Handle<T>() + { + } + + Strong(JSGlobalData& globalData, ExternalType value = ExternalType()) + : Handle<T>(allocateGlobalHandle(globalData)) + { + set(value); + } + + Strong(JSGlobalData& globalData, Handle<T> handle) + : Handle<T>(allocateGlobalHandle(globalData)) + { + set(handle.get()); + } + + Strong(const Strong& other) + : Handle<T>() + { + if (!other.slot()) + return; + setSlot(HandleHeap::heapFor(other.slot())->allocate()); + set(other.get()); + } + + template <typename U> Strong(const Strong<U>& other) + : Handle<T>() + { + if (!other.slot()) + return; + setSlot(HandleHeap::heapFor(other.slot())->allocate()); + set(other.get()); + } + + enum HashTableDeletedValueTag { HashTableDeletedValue }; + bool isHashTableDeletedValue() const { return slot() == hashTableDeletedValue(); } + Strong(HashTableDeletedValueTag) + : Handle<T>(hashTableDeletedValue()) + { + } + + ~Strong() + { + clear(); + } + + void swap(Strong& other) + { + Handle<T>::swap(other); + } + + void set(JSGlobalData& globalData, ExternalType value) + { + if (!slot()) + setSlot(allocateGlobalHandle(globalData)); + set(value); + } + + template <typename U> Strong& operator=(const Strong<U>& other) + { + if (!other.slot()) { + clear(); + return *this; + } + + set(*HandleHeap::heapFor(other.slot())->globalData(), other.get()); + return *this; + } + + Strong& operator=(const Strong& other) + { + if (!other.slot()) { + clear(); + return *this; + } + + set(*HandleHeap::heapFor(other.slot())->globalData(), other.get()); + return *this; + } + + void clear() + { + if (!slot()) + return; + HandleHeap::heapFor(slot())->deallocate(slot()); + setSlot(0); + } + +private: + static HandleSlot hashTableDeletedValue() { return reinterpret_cast<HandleSlot>(-1); } + + void set(ExternalType externalType) + { + ASSERT(slot()); + JSValue value = HandleTypes<T>::toJSValue(externalType); + HandleHeap::heapFor(slot())->writeBarrier(slot(), value); + *slot() = value; + } +}; + +template<class T> inline void swap(Strong<T>& a, Strong<T>& b) +{ + a.swap(b); +} + +} // namespace JSC + +namespace WTF { + +template<typename T> struct VectorTraits<JSC::Strong<T> > : SimpleClassVectorTraits { + static const bool canCompareWithMemcmp = false; +}; + +template<typename P> struct HashTraits<JSC::Strong<P> > : SimpleClassHashTraits<JSC::Strong<P> > { }; + +} + +#endif // Strong_h diff --git a/Source/JavaScriptCore/heap/Weak.h b/Source/JavaScriptCore/heap/Weak.h new file mode 100644 index 0000000..62e2596 --- /dev/null +++ b/Source/JavaScriptCore/heap/Weak.h @@ -0,0 +1,155 @@ +/* + * Copyright (C) 2009 Apple Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF + * THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef Weak_h +#define Weak_h + +#include "Assertions.h" +#include "Handle.h" +#include "HandleHeap.h" +#include "JSGlobalData.h" + +namespace JSC { + +// A weakly referenced handle that becomes 0 when the value it points to is garbage collected. +template <typename T> class Weak : public Handle<T> { + using Handle<T>::slot; + using Handle<T>::setSlot; + +public: + typedef typename Handle<T>::ExternalType ExternalType; + + Weak() + : Handle<T>() + { + } + + Weak(JSGlobalData& globalData, ExternalType value = ExternalType(), WeakHandleOwner* weakOwner = 0, void* context = 0) + : Handle<T>(globalData.allocateGlobalHandle()) + { + HandleHeap::heapFor(slot())->makeWeak(slot(), weakOwner, context); + set(value); + } + + Weak(const Weak& other) + : Handle<T>() + { + if (!other.slot()) + return; + setSlot(HandleHeap::heapFor(other.slot())->copyWeak(other.slot())); + } + + template <typename U> Weak(const Weak<U>& other) + : Handle<T>() + { + if (!other.slot()) + return; + setSlot(HandleHeap::heapFor(other.slot())->copyWeak(other.slot())); + } + + enum HashTableDeletedValueTag { HashTableDeletedValue }; + bool isHashTableDeletedValue() const { return slot() == hashTableDeletedValue(); } + Weak(HashTableDeletedValueTag) + : Handle<T>(hashTableDeletedValue()) + { + } + + ~Weak() + { + clear(); + } + + void swap(Weak& other) + { + Handle<T>::swap(other); + } + + ExternalType get() const { return HandleTypes<T>::getFromSlot(slot()); } + + void clear() + { + if (!slot()) + return; + HandleHeap::heapFor(slot())->deallocate(slot()); + setSlot(0); + } + + void set(JSGlobalData& globalData, ExternalType value, WeakHandleOwner* weakOwner = 0, void* context = 0) + { + if (!slot()) { + setSlot(globalData.allocateGlobalHandle()); + HandleHeap::heapFor(slot())->makeWeak(slot(), weakOwner, context); + } + ASSERT(HandleHeap::heapFor(slot())->hasWeakOwner(slot(), weakOwner)); + set(value); + } + + template <typename U> Weak& operator=(const Weak<U>& other) + { + clear(); + if (other.slot()) + setSlot(HandleHeap::heapFor(other.slot())->copyWeak(other.slot())); + return *this; + } + + Weak& operator=(const Weak& other) + { + clear(); + if (other.slot()) + setSlot(HandleHeap::heapFor(other.slot())->copyWeak(other.slot())); + return *this; + } + +private: + static HandleSlot hashTableDeletedValue() { return reinterpret_cast<HandleSlot>(-1); } + + void set(ExternalType externalType) + { + ASSERT(slot()); + JSValue value = HandleTypes<T>::toJSValue(externalType); + ASSERT(!value || !value.isCell() || Heap::isMarked(value.asCell())); + HandleHeap::heapFor(slot())->writeBarrier(slot(), value); + *slot() = value; + } +}; + +template<class T> inline void swap(Weak<T>& a, Weak<T>& b) +{ + a.swap(b); +} + +} // namespace JSC + +namespace WTF { + +template<typename T> struct VectorTraits<JSC::Weak<T> > : SimpleClassVectorTraits { + static const bool canCompareWithMemcmp = false; +}; + +template<typename P> struct HashTraits<JSC::Weak<P> > : SimpleClassHashTraits<JSC::Weak<P> > { }; + +} + +#endif // Weak_h |