summaryrefslogtreecommitdiffstats
path: root/V8Binding/v8/src/x64/stub-cache-x64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'V8Binding/v8/src/x64/stub-cache-x64.cc')
-rw-r--r--V8Binding/v8/src/x64/stub-cache-x64.cc714
1 files changed, 670 insertions, 44 deletions
diff --git a/V8Binding/v8/src/x64/stub-cache-x64.cc b/V8Binding/v8/src/x64/stub-cache-x64.cc
index c577615..ce7886b 100644
--- a/V8Binding/v8/src/x64/stub-cache-x64.cc
+++ b/V8Binding/v8/src/x64/stub-cache-x64.cc
@@ -36,32 +36,202 @@
namespace v8 {
namespace internal {
-#define __ ACCESS_MASM((&masm_))
-
-
-Object* CallStubCompiler::CompileCallConstant(Object* a,
- JSObject* b,
- JSFunction* c,
- String* d,
- StubCompiler::CheckType e) {
- UNIMPLEMENTED();
- return NULL;
+#define __ ACCESS_MASM((masm()))
+
+
+Object* CallStubCompiler::CompileCallConstant(Object* object,
+ JSObject* holder,
+ JSFunction* function,
+ String* name,
+ StubCompiler::CheckType check) {
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ // rsp[0] return address
+ // rsp[8] argument argc
+ // rsp[16] argument argc - 1
+ // ...
+ // rsp[argc * 8] argument 1
+ // rsp[(argc + 1) * 8] argument 0 = reciever
+ // rsp[(argc + 2) * 8] function name
+
+ Label miss;
+
+ // Get the receiver from the stack.
+ const int argc = arguments().immediate();
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Check that the receiver isn't a smi.
+ if (check != NUMBER_CHECK) {
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ }
+
+ // Make sure that it's okay not to patch the on stack receiver
+ // unless we're doing a receiver map check.
+ ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
+
+ switch (check) {
+ case RECEIVER_MAP_CHECK:
+ // Check that the maps haven't changed.
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
+ rbx, rcx, name, &miss);
+
+ // Patch the receiver on the stack with the global proxy if
+ // necessary.
+ if (object->IsGlobalObject()) {
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
+ }
+ break;
+
+ case STRING_CHECK:
+ // Check that the object is a two-byte string or a symbol.
+ __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rcx);
+ __ j(above_equal, &miss);
+ // Check that the maps starting from the prototype haven't changed.
+ GenerateLoadGlobalFunctionPrototype(masm(),
+ Context::STRING_FUNCTION_INDEX,
+ rcx);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
+ rbx, rdx, name, &miss);
+ break;
+
+ case NUMBER_CHECK: {
+ Label fast;
+ // Check that the object is a smi or a heap number.
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &fast);
+ __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
+ __ j(not_equal, &miss);
+ __ bind(&fast);
+ // Check that the maps starting from the prototype haven't changed.
+ GenerateLoadGlobalFunctionPrototype(masm(),
+ Context::NUMBER_FUNCTION_INDEX,
+ rcx);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
+ rbx, rdx, name, &miss);
+ break;
+ }
+
+ case BOOLEAN_CHECK: {
+ Label fast;
+ // Check that the object is a boolean.
+ __ Cmp(rdx, Factory::true_value());
+ __ j(equal, &fast);
+ __ Cmp(rdx, Factory::false_value());
+ __ j(not_equal, &miss);
+ __ bind(&fast);
+ // Check that the maps starting from the prototype haven't changed.
+ GenerateLoadGlobalFunctionPrototype(masm(),
+ Context::BOOLEAN_FUNCTION_INDEX,
+ rcx);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), rcx, holder,
+ rbx, rdx, name, &miss);
+ break;
+ }
+
+ case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
+ rbx, rcx, name, &miss);
+ // Make sure object->elements()->map() != Heap::dictionary_array_map()
+ // Get the elements array of the object.
+ __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
+ // Check that the object is in fast mode (not dictionary).
+ __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
+ Factory::hash_table_map());
+ __ j(equal, &miss);
+ break;
+
+ default:
+ UNREACHABLE();
+ }
+
+ // Get the function and setup the context.
+ __ Move(rdi, Handle<JSFunction>(function));
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments(),
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ String* function_name = NULL;
+ if (function->shared()->name()->IsString()) {
+ function_name = String::cast(function->shared()->name());
+ }
+ return GetCode(CONSTANT_FUNCTION, function_name);
}
-Object* CallStubCompiler::CompileCallField(Object* a,
- JSObject* b,
- int c,
- String* d) {
- UNIMPLEMENTED();
- return NULL;
+
+Object* CallStubCompiler::CompileCallField(Object* object,
+ JSObject* holder,
+ int index,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ // rsp[0] return address
+ // rsp[8] argument argc
+ // rsp[16] argument argc - 1
+ // ...
+ // rsp[argc * 8] argument 1
+ // rsp[(argc + 1) * 8] argument 0 = receiver
+ // rsp[(argc + 2) * 8] function name
+ Label miss;
+
+ // Get the receiver from the stack.
+ const int argc = arguments().immediate();
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // Check that the receiver isn't a smi.
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+
+ // Do the right check and compute the holder register.
+ Register reg =
+ CheckPrototypes(JSObject::cast(object), rdx, holder,
+ rbx, rcx, name, &miss);
+
+ GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
+
+ // Check that the function really is a function.
+ __ testl(rdi, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
+ __ j(not_equal, &miss);
+
+ // Patch the receiver on the stack with the global proxy if
+ // necessary.
+ if (object->IsGlobalObject()) {
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
+ }
+
+ // Invoke the function.
+ __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(FIELD, name);
}
Object* CallStubCompiler::CompileCallInterceptor(Object* a,
JSObject* b,
String* c) {
- UNIMPLEMENTED();
- return NULL;
+ // TODO(X64): Implement a real stub.
+ return Failure::InternalError();
}
@@ -71,8 +241,69 @@ Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
JSGlobalPropertyCell* cell,
JSFunction* function,
String* name) {
- UNIMPLEMENTED();
- return NULL;
+ // ----------- S t a t e -------------
+ // -----------------------------------
+ // rsp[0] return address
+ // rsp[8] argument argc
+ // rsp[16] argument argc - 1
+ // ...
+ // rsp[argc * 8] argument 1
+ // rsp[(argc + 1) * 8] argument 0 = receiver
+ // rsp[(argc + 2) * 8] function name
+ Label miss;
+
+ __ IncrementCounter(&Counters::call_global_inline, 1);
+
+ // Get the number of arguments.
+ const int argc = arguments().immediate();
+
+ // Get the receiver from the stack.
+ __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
+
+ // If the object is the holder then we know that it's a global
+ // object which can only happen for contextual calls. In this case,
+ // the receiver cannot be a smi.
+ if (object != holder) {
+ __ testl(rdx, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ }
+
+ // Check that the maps haven't changed.
+ CheckPrototypes(object, rdx, holder, rbx, rcx, name, &miss);
+
+ // Get the value from the cell.
+ __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
+ __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
+
+ // Check that the cell contains the same function.
+ __ Cmp(rdi, Handle<JSFunction>(function));
+ __ j(not_equal, &miss);
+
+ // Patch the receiver on the stack with the global proxy.
+ if (object->IsGlobalObject()) {
+ __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
+ __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
+ }
+
+ // Setup the context (function already in edi).
+ __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
+
+ // Jump to the cached code (tail call).
+ ASSERT(function->is_compiled());
+ Handle<Code> code(function->code());
+ ParameterCount expected(function->shared()->formal_parameter_count());
+ __ InvokeCode(code, expected, arguments(),
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
+
+ // Handle call cache miss.
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::call_global_inline, 1);
+ __ IncrementCounter(&Counters::call_global_inline_miss, 1);
+ Handle<Code> ic = ComputeCallMiss(arguments().immediate());
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
}
@@ -80,34 +311,58 @@ Object* LoadStubCompiler::CompileLoadCallback(JSObject* a,
JSObject* b,
AccessorInfo* c,
String* d) {
- UNIMPLEMENTED();
- return NULL;
+ // TODO(X64): Implement a real stub.
+ return Failure::InternalError();
}
-Object* LoadStubCompiler::CompileLoadConstant(JSObject* a,
- JSObject* b,
- Object* c,
- String* d) {
- UNIMPLEMENTED();
- return NULL;
+Object* LoadStubCompiler::CompileLoadConstant(JSObject* object,
+ JSObject* holder,
+ Object* value,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, (Operand(rsp, kPointerSize)));
+ GenerateLoadConstant(object, holder, rax, rbx, rdx, value, name, &miss);
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(CONSTANT_FUNCTION, name);
}
-Object* LoadStubCompiler::CompileLoadField(JSObject* a,
- JSObject* b,
- int c,
- String* d) {
- UNIMPLEMENTED();
- return NULL;
+Object* LoadStubCompiler::CompileLoadField(JSObject* object,
+ JSObject* holder,
+ int index,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ movq(rax, (Operand(rsp, kPointerSize)));
+ GenerateLoadField(object, holder, rax, rbx, rdx, index, name, &miss);
+ __ bind(&miss);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(FIELD, name);
}
Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* a,
JSObject* b,
String* c) {
- UNIMPLEMENTED();
- return NULL;
+ // TODO(X64): Implement a real stub.
+ return Failure::InternalError();
}
@@ -116,8 +371,51 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
JSGlobalPropertyCell* cell,
String* name,
bool is_dont_delete) {
- UNIMPLEMENTED();
- return NULL;
+ // ----------- S t a t e -------------
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ __ IncrementCounter(&Counters::named_load_global_inline, 1);
+
+ // Get the receiver from the stack.
+ __ movq(rax, (Operand(rsp, kPointerSize)));
+
+ // If the object is the holder then we know that it's a global
+ // object which can only happen for contextual loads. In this case,
+ // the receiver cannot be a smi.
+ if (object != holder) {
+ __ testl(rax, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+ }
+
+ // Check that the maps haven't changed.
+ CheckPrototypes(object, rax, holder, rbx, rdx, name, &miss);
+
+ // Get the value from the cell.
+ __ Move(rax, Handle<JSGlobalPropertyCell>(cell));
+ __ movq(rax, FieldOperand(rax, JSGlobalPropertyCell::kValueOffset));
+
+ // Check for deleted property if property can actually be deleted.
+ if (!is_dont_delete) {
+ __ Cmp(rax, Factory::the_hole_value());
+ __ j(equal, &miss);
+ } else if (FLAG_debug_code) {
+ __ Cmp(rax, Factory::the_hole_value());
+ __ Check(not_equal, "DontDelete cells can't contain the hole");
+ }
+
+ __ ret(0);
+
+ __ bind(&miss);
+ __ DecrementCounter(&Counters::named_load_global_inline, 1);
+ __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
+ GenerateLoadMiss(masm(), Code::LOAD_IC);
+
+ // Return the generated code.
+ return GetCode(NORMAL, name);
}
@@ -129,12 +427,38 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* a,
}
-Object* StoreStubCompiler::CompileStoreField(JSObject* a,
- int b,
- Map* c,
- String* d) {
- UNIMPLEMENTED();
- return NULL;
+Object* StoreStubCompiler::CompileStoreField(JSObject* object,
+ int index,
+ Map* transition,
+ String* name) {
+ // ----------- S t a t e -------------
+ // -- rax : value
+ // -- rcx : name
+ // -- rsp[0] : return address
+ // -- rsp[8] : receiver
+ // -----------------------------------
+ Label miss;
+
+ // Get the object from the stack.
+ __ movq(rbx, Operand(rsp, 1 * kPointerSize));
+
+ // Generate store field code. Trashes the name register.
+ GenerateStoreField(masm(),
+ Builtins::StoreIC_ExtendStorage,
+ object,
+ index,
+ transition,
+ rbx, rcx, rdx,
+ &miss);
+
+ // Handle store cache miss.
+ __ bind(&miss);
+ __ Move(rcx, Handle<String>(name)); // restore name
+ Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+
+ // Return the generated code.
+ return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
}
@@ -175,6 +499,308 @@ Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
return GetCodeWithFlags(flags, "LazyCompileStub");
}
+
+Register StubCompiler::CheckPrototypes(JSObject* object,
+ Register object_reg,
+ JSObject* holder,
+ Register holder_reg,
+ Register scratch,
+ String* name,
+ Label* miss) {
+ // Check that the maps haven't changed.
+ Register result =
+ __ CheckMaps(object, object_reg, holder, holder_reg, scratch, miss);
+
+ // If we've skipped any global objects, it's not enough to verify
+ // that their maps haven't changed.
+ while (object != holder) {
+ if (object->IsGlobalObject()) {
+ GlobalObject* global = GlobalObject::cast(object);
+ Object* probe = global->EnsurePropertyCell(name);
+ if (probe->IsFailure()) {
+ set_failure(Failure::cast(probe));
+ return result;
+ }
+ JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
+ ASSERT(cell->value()->IsTheHole());
+ __ Move(scratch, Handle<Object>(cell));
+ __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
+ Factory::the_hole_value());
+ __ j(not_equal, miss);
+ }
+ object = JSObject::cast(object->GetPrototype());
+ }
+
+ // Return the register containing the holder.
+ return result;
+}
+
+
+void StubCompiler::GenerateLoadField(JSObject* object,
+ JSObject* holder,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ int index,
+ String* name,
+ Label* miss) {
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss);
+
+ // Check the prototype chain.
+ Register reg =
+ CheckPrototypes(object, receiver, holder,
+ scratch1, scratch2, name, miss);
+
+ // Get the value from the properties.
+ GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
+ __ ret(0);
+}
+
+
+void StubCompiler::GenerateLoadConstant(JSObject* object,
+ JSObject* holder,
+ Register receiver,
+ Register scratch1,
+ Register scratch2,
+ Object* value,
+ String* name,
+ Label* miss) {
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, miss);
+
+ // Check that the maps haven't changed.
+ Register reg =
+ CheckPrototypes(object, receiver, holder,
+ scratch1, scratch2, name, miss);
+
+ // Return the constant value.
+ __ Move(rax, Handle<Object>(value));
+ __ ret(0);
+}
+
+
+#undef __
+
+//-----------------------------------------------------------------------------
+// StubCompiler static helper functions
+
+#define __ ACCESS_MASM(masm)
+
+
+static void ProbeTable(MacroAssembler* masm,
+ Code::Flags flags,
+ StubCache::Table table,
+ Register name,
+ Register offset) {
+ ExternalReference key_offset(SCTableReference::keyReference(table));
+ Label miss;
+
+ __ movq(kScratchRegister, key_offset);
+ // Check that the key in the entry matches the name.
+ __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
+ __ j(not_equal, &miss);
+ // Get the code entry from the cache.
+ // Use key_offset + kPointerSize, rather than loading value_offset.
+ __ movq(kScratchRegister,
+ Operand(kScratchRegister, offset, times_4, kPointerSize));
+ // Check that the flags match what we're looking for.
+ __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
+ __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
+ __ cmpl(offset, Immediate(flags));
+ __ j(not_equal, &miss);
+
+ // Jump to the first instruction in the code stub.
+ __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
+ __ jmp(kScratchRegister);
+
+ __ bind(&miss);
+}
+
+
+void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
+ ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
+ Code* code = NULL;
+ if (kind == Code::LOAD_IC) {
+ code = Builtins::builtin(Builtins::LoadIC_Miss);
+ } else {
+ code = Builtins::builtin(Builtins::KeyedLoadIC_Miss);
+ }
+
+ Handle<Code> ic(code);
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+}
+
+
+void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
+ int index,
+ Register prototype) {
+ // Load the global or builtins object from the current context.
+ __ movq(prototype,
+ Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ // Load the global context from the global or builtins object.
+ __ movq(prototype,
+ FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
+ // Load the function from the global context.
+ __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
+ // Load the initial map. The global functions all have initial maps.
+ __ movq(prototype,
+ FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
+ // Load the prototype from the initial map.
+ __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
+}
+
+
+// Load a fast property out of a holder object (src). In-object properties
+// are loaded directly otherwise the property is loaded from the properties
+// fixed array.
+void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
+ Register dst, Register src,
+ JSObject* holder, int index) {
+ // Adjust for the number of properties stored in the holder.
+ index -= holder->map()->inobject_properties();
+ if (index < 0) {
+ // Get the property straight out of the holder.
+ int offset = holder->map()->instance_size() + (index * kPointerSize);
+ __ movq(dst, FieldOperand(src, offset));
+ } else {
+ // Calculate the offset into the properties array.
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
+ __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
+ __ movq(dst, FieldOperand(dst, offset));
+ }
+}
+
+
+void StubCache::GenerateProbe(MacroAssembler* masm,
+ Code::Flags flags,
+ Register receiver,
+ Register name,
+ Register scratch,
+ Register extra) {
+ Label miss;
+ USE(extra); // The register extra is not used on the X64 platform.
+ // Make sure that code is valid. The shifting code relies on the
+ // entry size being 16.
+ ASSERT(sizeof(Entry) == 16);
+
+ // Make sure the flags do not name a specific type.
+ ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
+
+ // Make sure that there are no register conflicts.
+ ASSERT(!scratch.is(receiver));
+ ASSERT(!scratch.is(name));
+
+ // Check that the receiver isn't a smi.
+ __ testl(receiver, Immediate(kSmiTagMask));
+ __ j(zero, &miss);
+
+ // Get the map of the receiver and compute the hash.
+ __ movl(scratch, FieldOperand(name, String::kLengthOffset));
+ // Use only the low 32 bits of the map pointer.
+ __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ xor_(scratch, Immediate(flags));
+ __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
+
+ // Probe the primary table.
+ ProbeTable(masm, flags, kPrimary, name, scratch);
+
+ // Primary miss: Compute hash for secondary probe.
+ __ movl(scratch, FieldOperand(name, String::kLengthOffset));
+ __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
+ __ xor_(scratch, Immediate(flags));
+ __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
+ __ subl(scratch, name);
+ __ addl(scratch, Immediate(flags));
+ __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
+
+ // Probe the secondary table.
+ ProbeTable(masm, flags, kSecondary, name, scratch);
+
+ // Cache miss: Fall-through and let caller handle the miss by
+ // entering the runtime system.
+ __ bind(&miss);
+}
+
+
+void StubCompiler::GenerateStoreField(MacroAssembler* masm,
+ Builtins::Name storage_extend,
+ JSObject* object,
+ int index,
+ Map* transition,
+ Register receiver_reg,
+ Register name_reg,
+ Register scratch,
+ Label* miss_label) {
+ // Check that the object isn't a smi.
+ __ testl(receiver_reg, Immediate(kSmiTagMask));
+ __ j(zero, miss_label);
+
+ // Check that the map of the object hasn't changed.
+ __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
+ Handle<Map>(object->map()));
+ __ j(not_equal, miss_label);
+
+ // Perform global security token check if needed.
+ if (object->IsJSGlobalProxy()) {
+ __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
+ }
+
+ // Stub never generated for non-global objects that require access
+ // checks.
+ ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
+
+ // Perform map transition for the receiver if necessary.
+ if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
+ // The properties must be extended before we can store the value.
+ // We jump to a runtime call that extends the properties array.
+ __ Move(rcx, Handle<Map>(transition));
+ Handle<Code> ic(Builtins::builtin(storage_extend));
+ __ Jump(ic, RelocInfo::CODE_TARGET);
+ return;
+ }
+
+ if (transition != NULL) {
+ // Update the map of the object; no write barrier updating is
+ // needed because the map is never in new space.
+ __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
+ Handle<Map>(transition));
+ }
+
+ // Adjust for the number of properties stored in the object. Even in the
+ // face of a transition we can use the old map here because the size of the
+ // object and the number of in-object properties is not going to change.
+ index -= object->map()->inobject_properties();
+
+ if (index < 0) {
+ // Set the property straight into the object.
+ int offset = object->map()->instance_size() + (index * kPointerSize);
+ __ movq(FieldOperand(receiver_reg, offset), rax);
+
+ // Update the write barrier for the array address.
+ // Pass the value being stored in the now unused name_reg.
+ __ movq(name_reg, rax);
+ __ RecordWrite(receiver_reg, offset, name_reg, scratch);
+ } else {
+ // Write to the properties array.
+ int offset = index * kPointerSize + FixedArray::kHeaderSize;
+ // Get the properties array (optimistically).
+ __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
+ __ movq(FieldOperand(scratch, offset), rax);
+
+ // Update the write barrier for the array address.
+ // Pass the value being stored in the now unused name_reg.
+ __ movq(name_reg, rax);
+ __ RecordWrite(scratch, offset, name_reg, receiver_reg);
+ }
+
+ // Return the value (register rax).
+ __ ret(0);
+}
+
+
#undef __