summaryrefslogtreecommitdiffstats
path: root/V8Binding/v8/src/heap.cc
diff options
context:
space:
mode:
Diffstat (limited to 'V8Binding/v8/src/heap.cc')
-rw-r--r--V8Binding/v8/src/heap.cc235
1 files changed, 191 insertions, 44 deletions
diff --git a/V8Binding/v8/src/heap.cc b/V8Binding/v8/src/heap.cc
index 213eec5..7c91778 100644
--- a/V8Binding/v8/src/heap.cc
+++ b/V8Binding/v8/src/heap.cc
@@ -681,33 +681,11 @@ void Heap::Scavenge() {
// Copy objects reachable from weak pointers.
GlobalHandles::IterateWeakRoots(&scavenge_visitor);
-#ifdef V8_HOST_ARCH_64_BIT
- // TODO(X64): Make this go away again. We currently disable RSets for
- // 64-bit-mode.
- HeapObjectIterator old_pointer_iterator(old_pointer_space_);
- while (old_pointer_iterator.has_next()) {
- HeapObject* heap_object = old_pointer_iterator.next();
- heap_object->Iterate(&scavenge_visitor);
- }
- HeapObjectIterator map_iterator(map_space_);
- while (map_iterator.has_next()) {
- HeapObject* heap_object = map_iterator.next();
- heap_object->Iterate(&scavenge_visitor);
- }
- LargeObjectIterator lo_iterator(lo_space_);
- while (lo_iterator.has_next()) {
- HeapObject* heap_object = lo_iterator.next();
- if (heap_object->IsFixedArray()) {
- heap_object->Iterate(&scavenge_visitor);
- }
- }
-#else // !defined(V8_HOST_ARCH_64_BIT)
// Copy objects reachable from the old generation. By definition,
// there are no intergenerational pointers in code or data spaces.
IterateRSet(old_pointer_space_, &ScavengePointer);
IterateRSet(map_space_, &ScavengePointer);
lo_space_->IterateRSet(&ScavengePointer);
-#endif
// Copy objects reachable from cells by scavenging cell values directly.
HeapObjectIterator cell_iterator(cell_space_);
@@ -830,13 +808,11 @@ class UpdateRSetVisitor: public ObjectVisitor {
int Heap::UpdateRSet(HeapObject* obj) {
-#ifndef V8_HOST_ARCH_64_BIT
- // TODO(X64) Reenable RSet when we have a working 64-bit layout of Page.
ASSERT(!InNewSpace(obj));
// Special handling of fixed arrays to iterate the body based on the start
// address and offset. Just iterating the pointers as in UpdateRSetVisitor
// will not work because Page::SetRSet needs to have the start of the
- // object.
+ // object for large object pages.
if (obj->IsFixedArray()) {
FixedArray* array = FixedArray::cast(obj);
int length = array->length();
@@ -853,7 +829,6 @@ int Heap::UpdateRSet(HeapObject* obj) {
UpdateRSetVisitor v;
obj->Iterate(&v);
}
-#endif // V8_HOST_ARCH_64_BIT
return obj->Size();
}
@@ -1191,6 +1166,10 @@ bool Heap::CreateInitialMaps() {
if (obj->IsFailure()) return false;
set_byte_array_map(Map::cast(obj));
+ obj = AllocateMap(PIXEL_ARRAY_TYPE, PixelArray::kAlignedSize);
+ if (obj->IsFailure()) return false;
+ set_pixel_array_map(Map::cast(obj));
+
obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
if (obj->IsFailure()) return false;
set_code_map(Map::cast(obj));
@@ -1407,6 +1386,12 @@ bool Heap::CreateInitialObjects() {
if (obj->IsFailure()) return false;
set_the_hole_value(obj);
+ obj = CreateOddball(
+ oddball_map(), "no_interceptor_result_sentinel", Smi::FromInt(-2));
+ if (obj->IsFailure()) return false;
+ set_no_interceptor_result_sentinel(obj);
+
+
// Allocate the empty string.
obj = AllocateRawAsciiString(0, TENURED);
if (obj->IsFailure()) return false;
@@ -1433,13 +1418,15 @@ bool Heap::CreateInitialObjects() {
if (obj->IsFailure()) return false;
set_prototype_accessors(Proxy::cast(obj));
- // Allocate the code_stubs dictionary.
- obj = NumberDictionary::Allocate(4);
+ // Allocate the code_stubs dictionary. The initial size is set to avoid
+ // expanding the dictionary during bootstrapping.
+ obj = NumberDictionary::Allocate(128);
if (obj->IsFailure()) return false;
set_code_stubs(NumberDictionary::cast(obj));
- // Allocate the non_monomorphic_cache used in stub-cache.cc
- obj = NumberDictionary::Allocate(4);
+ // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
+ // is set to avoid expanding the dictionary during bootstrapping.
+ obj = NumberDictionary::Allocate(64);
if (obj->IsFailure()) return false;
set_non_monomorphic_cache(NumberDictionary::cast(obj));
@@ -1576,8 +1563,7 @@ Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
// Statically ensure that it is safe to allocate proxies in paged spaces.
STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
- AllocationSpace space =
- (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
Object* result = Allocate(proxy_map(), space);
if (result->IsFailure()) return result;
@@ -1683,7 +1669,7 @@ Object* Heap::AllocateSlicedString(String* buffer,
int length = end - start;
// If the resulting string is small make a sub string.
- if (end - start <= String::kMinNonFlatLength) {
+ if (length <= String::kMinNonFlatLength) {
return Heap::AllocateSubString(buffer, start, end);
}
@@ -1859,6 +1845,23 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
}
+Object* Heap::AllocatePixelArray(int length,
+ uint8_t* external_pointer,
+ PretenureFlag pretenure) {
+ AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+
+ Object* result = AllocateRaw(PixelArray::kAlignedSize, space, OLD_DATA_SPACE);
+
+ if (result->IsFailure()) return result;
+
+ reinterpret_cast<PixelArray*>(result)->set_map(pixel_array_map());
+ reinterpret_cast<PixelArray*>(result)->set_length(length);
+ reinterpret_cast<PixelArray*>(result)->set_external_pointer(external_pointer);
+
+ return result;
+}
+
+
Object* Heap::CreateCode(const CodeDesc& desc,
ZoneScopeInfo* sinfo,
Code::Flags flags,
@@ -2077,6 +2080,11 @@ Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
// properly initialized.
ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
+ // Both types of globla objects should be allocated using
+ // AllocateGloblaObject to be properly initialized.
+ ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
+ ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
+
// Allocate the backing storage for the properties.
int prop_size = map->unused_property_fields() - map->inobject_properties();
Object* properties = AllocateFixedArray(prop_size, pretenure);
@@ -2117,24 +2125,62 @@ Object* Heap::AllocateJSObject(JSFunction* constructor,
Object* Heap::AllocateGlobalObject(JSFunction* constructor) {
ASSERT(constructor->has_initial_map());
+ Map* map = constructor->initial_map();
+
// Make sure no field properties are described in the initial map.
// This guarantees us that normalizing the properties does not
// require us to change property values to JSGlobalPropertyCells.
- ASSERT(constructor->initial_map()->NextFreePropertyIndex() == 0);
+ ASSERT(map->NextFreePropertyIndex() == 0);
// Make sure we don't have a ton of pre-allocated slots in the
// global objects. They will be unused once we normalize the object.
- ASSERT(constructor->initial_map()->unused_property_fields() == 0);
- ASSERT(constructor->initial_map()->inobject_properties() == 0);
+ ASSERT(map->unused_property_fields() == 0);
+ ASSERT(map->inobject_properties() == 0);
+
+ // Initial size of the backing store to avoid resize of the storage during
+ // bootstrapping. The size differs between the JS global object ad the
+ // builtins object.
+ int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
+
+ // Allocate a dictionary object for backing storage.
+ Object* obj =
+ StringDictionary::Allocate(
+ map->NumberOfDescribedProperties() * 2 + initial_size);
+ if (obj->IsFailure()) return obj;
+ StringDictionary* dictionary = StringDictionary::cast(obj);
+
+ // The global object might be created from an object template with accessors.
+ // Fill these accessors into the dictionary.
+ DescriptorArray* descs = map->instance_descriptors();
+ for (int i = 0; i < descs->number_of_descriptors(); i++) {
+ PropertyDetails details = descs->GetDetails(i);
+ ASSERT(details.type() == CALLBACKS); // Only accessors are expected.
+ PropertyDetails d =
+ PropertyDetails(details.attributes(), CALLBACKS, details.index());
+ Object* value = descs->GetCallbacksObject(i);
+ value = Heap::AllocateJSGlobalPropertyCell(value);
+ if (value->IsFailure()) return value;
+
+ Object* result = dictionary->Add(descs->GetKey(i), value, d);
+ if (result->IsFailure()) return result;
+ dictionary = StringDictionary::cast(result);
+ }
- // Allocate the object based on the constructors initial map.
- Object* result = AllocateJSObjectFromMap(constructor->initial_map(), TENURED);
- if (result->IsFailure()) return result;
+ // Allocate the global object and initialize it with the backing store.
+ obj = Allocate(map, OLD_POINTER_SPACE);
+ if (obj->IsFailure()) return obj;
+ JSObject* global = JSObject::cast(obj);
+ InitializeJSObjectFromMap(global, dictionary, map);
- // Normalize the result.
- JSObject* global = JSObject::cast(result);
- result = global->NormalizeProperties(CLEAR_INOBJECT_PROPERTIES);
- if (result->IsFailure()) return result;
+ // Create a new map for the global object.
+ obj = map->CopyDropDescriptors();
+ if (obj->IsFailure()) return obj;
+ Map* new_map = Map::cast(obj);
+
+ // Setup the global object as a normalized object.
+ global->set_map(new_map);
+ global->map()->set_instance_descriptors(Heap::empty_descriptor_array());
+ global->set_properties(dictionary);
// Make sure result is a global object with properties in dictionary.
ASSERT(global->IsGlobalObject());
@@ -3391,6 +3437,100 @@ void HeapIterator::reset() {
}
+#ifdef ENABLE_LOGGING_AND_PROFILING
+namespace {
+
+// JSConstructorProfile is responsible for gathering and logging
+// "constructor profile" of JS object allocated on heap.
+// It is run during garbage collection cycle, thus it doesn't need
+// to use handles.
+class JSConstructorProfile BASE_EMBEDDED {
+ public:
+ JSConstructorProfile() : zscope_(DELETE_ON_EXIT) {}
+ void CollectStats(JSObject* obj);
+ void PrintStats();
+ // Used by ZoneSplayTree::ForEach.
+ void Call(String* name, const NumberAndSizeInfo& number_and_size);
+ private:
+ struct TreeConfig {
+ typedef String* Key;
+ typedef NumberAndSizeInfo Value;
+ static const Key kNoKey;
+ static const Value kNoValue;
+ // Strings are unique, so it is sufficient to compare their pointers.
+ static int Compare(const Key& a, const Key& b) {
+ return a == b ? 0 : (a < b ? -1 : 1);
+ }
+ };
+
+ typedef ZoneSplayTree<TreeConfig> JSObjectsInfoTree;
+ static int CalculateJSObjectNetworkSize(JSObject* obj);
+
+ ZoneScope zscope_;
+ JSObjectsInfoTree js_objects_info_tree_;
+};
+
+const JSConstructorProfile::TreeConfig::Key
+ JSConstructorProfile::TreeConfig::kNoKey = NULL;
+const JSConstructorProfile::TreeConfig::Value
+ JSConstructorProfile::TreeConfig::kNoValue;
+
+
+int JSConstructorProfile::CalculateJSObjectNetworkSize(JSObject* obj) {
+ int size = obj->Size();
+ // If 'properties' and 'elements' are non-empty (thus, non-shared),
+ // take their size into account.
+ if (FixedArray::cast(obj->properties())->length() != 0) {
+ size += obj->properties()->Size();
+ }
+ if (FixedArray::cast(obj->elements())->length() != 0) {
+ size += obj->elements()->Size();
+ }
+ return size;
+}
+
+
+void JSConstructorProfile::Call(String* name,
+ const NumberAndSizeInfo& number_and_size) {
+ SmartPointer<char> s_name;
+ if (name != NULL) {
+ s_name = name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+ }
+ LOG(HeapSampleJSConstructorEvent(*s_name,
+ number_and_size.number(),
+ number_and_size.bytes()));
+}
+
+
+void JSConstructorProfile::CollectStats(JSObject* obj) {
+ String* constructor_func = NULL;
+ if (obj->map()->constructor()->IsJSFunction()) {
+ JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
+ SharedFunctionInfo* sfi = constructor->shared();
+ String* name = String::cast(sfi->name());
+ constructor_func = name->length() > 0 ? name : sfi->inferred_name();
+ } else if (obj->IsJSFunction()) {
+ constructor_func = Heap::function_class_symbol();
+ }
+ JSObjectsInfoTree::Locator loc;
+ if (!js_objects_info_tree_.Find(constructor_func, &loc)) {
+ js_objects_info_tree_.Insert(constructor_func, &loc);
+ }
+ NumberAndSizeInfo number_and_size = loc.value();
+ number_and_size.increment_number(1);
+ number_and_size.increment_bytes(CalculateJSObjectNetworkSize(obj));
+ loc.set_value(number_and_size);
+}
+
+
+void JSConstructorProfile::PrintStats() {
+ js_objects_info_tree_.ForEach(this);
+}
+
+} // namespace
+#endif
+
+
//
// HeapProfiler class implementation.
//
@@ -3415,9 +3555,14 @@ void HeapProfiler::WriteSample() {
INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
#undef DEF_TYPE_NAME
+ JSConstructorProfile js_cons_profile;
HeapIterator iterator;
while (iterator.has_next()) {
- CollectStats(iterator.next(), info);
+ HeapObject* obj = iterator.next();
+ CollectStats(obj, info);
+ if (obj->IsJSObject()) {
+ js_cons_profile.CollectStats(JSObject::cast(obj));
+ }
}
// Lump all the string types together.
@@ -3439,6 +3584,8 @@ void HeapProfiler::WriteSample() {
}
}
+ js_cons_profile.PrintStats();
+
LOG(HeapSampleEndEvent("Heap", "allocated"));
}