summaryrefslogtreecommitdiffstats
path: root/JavaScriptCore/jit
diff options
context:
space:
mode:
Diffstat (limited to 'JavaScriptCore/jit')
-rw-r--r--JavaScriptCore/jit/ExecutableAllocator.cpp2
-rw-r--r--JavaScriptCore/jit/ExecutableAllocator.h35
-rw-r--r--JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp5
-rw-r--r--JavaScriptCore/jit/JIT.cpp7
-rw-r--r--JavaScriptCore/jit/JIT.h42
-rw-r--r--JavaScriptCore/jit/JITArithmetic.cpp45
-rw-r--r--JavaScriptCore/jit/JITOpcodes.cpp10
-rw-r--r--JavaScriptCore/jit/JITOpcodes32_64.cpp13
-rw-r--r--JavaScriptCore/jit/JITPropertyAccess.cpp60
-rw-r--r--JavaScriptCore/jit/JITPropertyAccess32_64.cpp74
-rw-r--r--JavaScriptCore/jit/JITStubs.cpp239
-rw-r--r--JavaScriptCore/jit/JITStubs.h4
-rw-r--r--JavaScriptCore/jit/SpecializedThunkJIT.h3
13 files changed, 252 insertions, 287 deletions
diff --git a/JavaScriptCore/jit/ExecutableAllocator.cpp b/JavaScriptCore/jit/ExecutableAllocator.cpp
index e3525f5..4800613 100644
--- a/JavaScriptCore/jit/ExecutableAllocator.cpp
+++ b/JavaScriptCore/jit/ExecutableAllocator.cpp
@@ -52,6 +52,8 @@ void ExecutableAllocator::intializePageSize()
ExecutablePool::Allocation ExecutablePool::systemAlloc(size_t size)
{
PageAllocation allocation = PageAllocation::allocate(size, PageAllocation::JSJITCodePages, EXECUTABLE_POOL_WRITABLE, true);
+ if (!allocation)
+ CRASH();
return allocation;
}
diff --git a/JavaScriptCore/jit/ExecutableAllocator.h b/JavaScriptCore/jit/ExecutableAllocator.h
index b60d591..576f889 100644
--- a/JavaScriptCore/jit/ExecutableAllocator.h
+++ b/JavaScriptCore/jit/ExecutableAllocator.h
@@ -107,7 +107,10 @@ public:
#endif
typedef Vector<Allocation, 2> AllocationList;
- static PassRefPtr<ExecutablePool> create(size_t n);
+ static PassRefPtr<ExecutablePool> create(size_t n)
+ {
+ return adoptRef(new ExecutablePool(n));
+ }
void* alloc(size_t n)
{
@@ -146,7 +149,7 @@ private:
static Allocation systemAlloc(size_t n);
static void systemRelease(Allocation& alloc);
- ExecutablePool(Allocation&);
+ ExecutablePool(size_t n);
void* poolAllocate(size_t n);
@@ -164,11 +167,8 @@ public:
{
if (!pageSize)
intializePageSize();
- if (isValid()) {
+ if (isValid())
m_smallAllocationPool = ExecutablePool::create(JIT_ALLOCATOR_LARGE_ALLOC_SIZE);
- if (!m_smallAllocationPool)
- CRASH();
- }
#if !ENABLE(INTERPRETER)
else
CRASH();
@@ -185,7 +185,7 @@ public:
return m_smallAllocationPool;
// If the request is large, we just provide a unshared allocator
- if (n > JIT_ALLOCATOR_LARGE_ALLOC_SIZE)
+ if (n > JIT_ALLOCATOR_LARGE_ALLOC_SIZE)
return ExecutablePool::create(n);
// Create a new allocator
@@ -308,20 +308,15 @@ private:
static void intializePageSize();
};
-inline PassRefPtr<ExecutablePool> ExecutablePool::create(size_t n)
- {
- Allocation mem = systemAlloc(roundUpAllocationSize(n, JIT_ALLOCATOR_PAGE_SIZE));
- if (!mem)
- return 0;
- return adoptRef(new ExecutablePool(mem));
- }
-
-inline ExecutablePool::ExecutablePool(Allocation& mem)
+inline ExecutablePool::ExecutablePool(size_t n)
{
- ASSERT(!!mem);
+ size_t allocSize = roundUpAllocationSize(n, JIT_ALLOCATOR_PAGE_SIZE);
+ Allocation mem = systemAlloc(allocSize);
m_pools.append(mem);
m_freePtr = static_cast<char*>(mem.base());
- m_end = m_freePtr + mem.size();
+ if (!m_freePtr)
+ CRASH(); // Failed to allocate
+ m_end = m_freePtr + allocSize;
}
inline void* ExecutablePool::poolAllocate(size_t n)
@@ -330,8 +325,8 @@ inline void* ExecutablePool::poolAllocate(size_t n)
Allocation result = systemAlloc(allocSize);
if (!result.base())
- return 0; // Failed to allocate
-
+ CRASH(); // Failed to allocate
+
ASSERT(m_end >= m_freePtr);
if ((allocSize - n) > static_cast<size_t>(m_end - m_freePtr)) {
// Replace allocation pool
diff --git a/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp b/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp
index f05e919..b34204f 100644
--- a/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp
+++ b/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp
@@ -360,14 +360,15 @@ private:
// Search m_freeList for a suitable sized chunk to allocate memory from.
FreeListEntry* entry = m_freeList.search(size, m_freeList.GREATER_EQUAL);
- // This is bad news.
+ // This would be bad news.
if (!entry) {
// Errk! Lets take a last-ditch desperation attempt at defragmentation...
coalesceFreeSpace();
// Did that free up a large enough chunk?
entry = m_freeList.search(size, m_freeList.GREATER_EQUAL);
+ // No?... *BOOM!*
if (!entry)
- return 0;
+ CRASH();
}
ASSERT(entry->size != m_commonSize);
diff --git a/JavaScriptCore/jit/JIT.cpp b/JavaScriptCore/jit/JIT.cpp
index cd5944a..4466fbd 100644
--- a/JavaScriptCore/jit/JIT.cpp
+++ b/JavaScriptCore/jit/JIT.cpp
@@ -509,12 +509,7 @@ JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck)
ASSERT(m_jmpTable.isEmpty());
- RefPtr<ExecutablePool> executablePool = m_globalData->executableAllocator.poolForSize(m_assembler.size());
- if (!executablePool)
- return JITCode();
- LinkBuffer patchBuffer(this, executablePool.release(), m_linkerOffset);
- if (!patchBuffer.allocationSuccessful())
- return JITCode();
+ LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), m_linkerOffset);
// Translate vPC offsets into addresses in JIT generated code, for switch tables.
for (unsigned i = 0; i < m_switches.size(); ++i) {
diff --git a/JavaScriptCore/jit/JIT.h b/JavaScriptCore/jit/JIT.h
index 393c771..7c03a47 100644
--- a/JavaScriptCore/jit/JIT.h
+++ b/JavaScriptCore/jit/JIT.h
@@ -183,38 +183,38 @@ namespace JSC {
return JIT(globalData, codeBlock, offsetBase).privateCompile(functionEntryArityCheck);
}
- static bool compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
+ static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
{
JIT jit(globalData, codeBlock);
- return jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
+ jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
}
- static bool compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
+ static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
JIT jit(globalData, codeBlock);
- return jit.privateCompileGetByIdSelfList(stubInfo, structure, ident, slot, cachedOffset);
+ jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
}
- static bool compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
+ static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
JIT jit(globalData, codeBlock);
- return jit.privateCompileGetByIdProtoList(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
+ jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
}
- static bool compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
+ static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
JIT jit(globalData, codeBlock);
- return jit.privateCompileGetByIdChainList(stubInfo, structure, chain, count, ident, slot, cachedOffset, callFrame);
+ jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
}
- static bool compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
+ static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
{
JIT jit(globalData, codeBlock);
- return jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
+ jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
}
- static bool compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
+ static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
{
JIT jit(globalData, codeBlock);
- return jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
+ jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
}
static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, TrampolineStructure *trampolines)
@@ -237,10 +237,10 @@ namespace JSC {
static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
static void patchMethodCallProto(CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*, ReturnAddressPtr);
- static bool compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, ReturnAddressPtr returnAddress)
+ static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
{
JIT jit(globalData, codeBlock);
- return jit.privateCompilePatchGetArrayLength(stubInfo, returnAddress);
+ return jit.privateCompilePatchGetArrayLength(returnAddress);
}
static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, CodePtr, CallLinkInfo*, int callerArgCount, JSGlobalData*);
@@ -265,17 +265,17 @@ namespace JSC {
void privateCompileLinkPass();
void privateCompileSlowCases();
JITCode privateCompile(CodePtr* functionEntryArityCheck);
- bool privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
- bool privateCompileGetByIdSelfList(StructureStubInfo*, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
- bool privateCompileGetByIdProtoList(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
- bool privateCompileGetByIdChainList(StructureStubInfo*, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
- bool privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
- bool privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
+ void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
+ void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
+ void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
+ void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
+ void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
+ void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, TrampolineStructure *trampolines);
Label privateCompileCTINativeCall(JSGlobalData*, bool isConstruct = false);
CodePtr privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executablePool, JSGlobalData* data, NativeFunction func);
- bool privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnAddressPtr returnAddress);
+ void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
void addSlowCase(Jump);
void addSlowCase(JumpList);
diff --git a/JavaScriptCore/jit/JITArithmetic.cpp b/JavaScriptCore/jit/JITArithmetic.cpp
index a9d0bcd..d75f8b5 100644
--- a/JavaScriptCore/jit/JITArithmetic.cpp
+++ b/JavaScriptCore/jit/JITArithmetic.cpp
@@ -1139,7 +1139,7 @@ void JIT::emitSlow_op_pre_dec(Instruction* currentInstruction, Vector<SlowCaseEn
/* ------------------------------ BEGIN: OP_MOD ------------------------------ */
-#if CPU(X86) || CPU(X86_64)
+#if CPU(X86) || CPU(X86_64) || CPU(MIPS)
void JIT::emit_op_mod(Instruction* currentInstruction)
{
@@ -1147,21 +1147,34 @@ void JIT::emit_op_mod(Instruction* currentInstruction)
unsigned op1 = currentInstruction[2].u.operand;
unsigned op2 = currentInstruction[3].u.operand;
- emitGetVirtualRegisters(op1, X86Registers::eax, op2, X86Registers::ecx);
- emitJumpSlowCaseIfNotImmediateInteger(X86Registers::eax);
- emitJumpSlowCaseIfNotImmediateInteger(X86Registers::ecx);
+#if CPU(X86) || CPU(X86_64)
+ // Make sure registers are correct for x86 IDIV instructions.
+ ASSERT(regT0 == X86Registers::eax);
+ ASSERT(regT1 == X86Registers::edx);
+ ASSERT(regT2 == X86Registers::ecx);
+#endif
+
+ emitGetVirtualRegisters(op1, regT0, op2, regT2);
+ emitJumpSlowCaseIfNotImmediateInteger(regT0);
+ emitJumpSlowCaseIfNotImmediateInteger(regT2);
+
#if USE(JSVALUE64)
- addSlowCase(branchPtr(Equal, X86Registers::ecx, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))));
+ addSlowCase(branchPtr(Equal, regT2, ImmPtr(JSValue::encode(jsNumber(m_globalData, 0)))));
m_assembler.cdq();
- m_assembler.idivl_r(X86Registers::ecx);
+ m_assembler.idivl_r(regT2);
#else
- emitFastArithDeTagImmediate(X86Registers::eax);
- addSlowCase(emitFastArithDeTagImmediateJumpIfZero(X86Registers::ecx));
+ emitFastArithDeTagImmediate(regT0);
+ addSlowCase(emitFastArithDeTagImmediateJumpIfZero(regT2));
+#if CPU(X86) || CPU(X86_64)
m_assembler.cdq();
- m_assembler.idivl_r(X86Registers::ecx);
- signExtend32ToPtr(X86Registers::edx, X86Registers::edx);
+ m_assembler.idivl_r(regT2);
+ signExtend32ToPtr(regT1, regT1);
+#elif CPU(MIPS)
+ m_assembler.div(regT0, regT2);
+ m_assembler.mfhi(regT1);
+#endif
#endif
- emitFastArithReTagImmediate(X86Registers::edx, X86Registers::eax);
+ emitFastArithReTagImmediate(regT1, regT0);
emitPutVirtualRegister(result);
}
@@ -1177,18 +1190,18 @@ void JIT::emitSlow_op_mod(Instruction* currentInstruction, Vector<SlowCaseEntry>
Jump notImm1 = getSlowCase(iter);
Jump notImm2 = getSlowCase(iter);
linkSlowCase(iter);
- emitFastArithReTagImmediate(X86Registers::eax, X86Registers::eax);
- emitFastArithReTagImmediate(X86Registers::ecx, X86Registers::ecx);
+ emitFastArithReTagImmediate(regT0, regT0);
+ emitFastArithReTagImmediate(regT2, regT2);
notImm1.link(this);
notImm2.link(this);
#endif
JITStubCall stubCall(this, cti_op_mod);
- stubCall.addArgument(X86Registers::eax);
- stubCall.addArgument(X86Registers::ecx);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(regT2);
stubCall.call(result);
}
-#else // CPU(X86) || CPU(X86_64)
+#else // CPU(X86) || CPU(X86_64) || CPU(MIPS)
void JIT::emit_op_mod(Instruction* currentInstruction)
{
diff --git a/JavaScriptCore/jit/JITOpcodes.cpp b/JavaScriptCore/jit/JITOpcodes.cpp
index 28ef4ca..2bfba83 100644
--- a/JavaScriptCore/jit/JITOpcodes.cpp
+++ b/JavaScriptCore/jit/JITOpcodes.cpp
@@ -161,14 +161,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
#endif
// All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
- *executablePool = m_globalData->executableAllocator.poolForSize(m_assembler.size());
- // We can't run without the JIT trampolines!
- if (!*executablePool)
- CRASH();
- LinkBuffer patchBuffer(this, *executablePool, 0);
- // We can't run without the JIT trampolines!
- if (!patchBuffer.allocationSuccessful())
- CRASH();
+ LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), 0);
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
@@ -183,6 +176,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
patchBuffer.link(callCompileConstruct, FunctionPtr(cti_op_construct_jitCompile));
CodeRef finalCode = patchBuffer.finalizeCode();
+ *executablePool = finalCode.m_executablePool;
trampolines->ctiVirtualCallLink = patchBuffer.trampolineAt(virtualCallLinkBegin);
trampolines->ctiVirtualConstructLink = patchBuffer.trampolineAt(virtualConstructLinkBegin);
diff --git a/JavaScriptCore/jit/JITOpcodes32_64.cpp b/JavaScriptCore/jit/JITOpcodes32_64.cpp
index 939aa8c..035325a 100644
--- a/JavaScriptCore/jit/JITOpcodes32_64.cpp
+++ b/JavaScriptCore/jit/JITOpcodes32_64.cpp
@@ -159,14 +159,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
#endif
// All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
- *executablePool = m_globalData->executableAllocator.poolForSize(m_assembler.size());
- // We can't run without the JIT trampolines!
- if (!*executablePool)
- CRASH();
- LinkBuffer patchBuffer(this, *executablePool, 0);
- // We can't run without the JIT trampolines!
- if (!patchBuffer.allocationSuccessful())
- CRASH();
+ LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()), 0);
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
@@ -181,6 +174,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
patchBuffer.link(callCompileCconstruct, FunctionPtr(cti_op_construct_jitCompile));
CodeRef finalCode = patchBuffer.finalizeCode();
+ *executablePool = finalCode.m_executablePool;
trampolines->ctiVirtualCall = patchBuffer.trampolineAt(virtualCallBegin);
trampolines->ctiVirtualConstruct = patchBuffer.trampolineAt(virtualConstructBegin);
@@ -363,9 +357,6 @@ JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executa
// All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
LinkBuffer patchBuffer(this, executablePool, 0);
- // We can't continue if we can't call a function!
- if (!patchBuffer.allocationSuccessful())
- CRASH();
patchBuffer.link(nativeCall, FunctionPtr(func));
patchBuffer.finalizeCode();
diff --git a/JavaScriptCore/jit/JITPropertyAccess.cpp b/JavaScriptCore/jit/JITPropertyAccess.cpp
index 6b2a2fe..7c129a5 100644
--- a/JavaScriptCore/jit/JITPropertyAccess.cpp
+++ b/JavaScriptCore/jit/JITPropertyAccess.cpp
@@ -78,9 +78,6 @@ JIT::CodePtr JIT::stringGetByValStubGenerator(JSGlobalData* globalData, Executab
jit.ret();
LinkBuffer patchBuffer(&jit, pool, 0);
- // We can't run without the JIT trampolines!
- if (!patchBuffer.allocationSuccessful())
- CRASH();
return patchBuffer.finalizeCode().m_code;
}
@@ -601,7 +598,7 @@ void JIT::testPrototype(JSValue prototype, JumpList& failureCases)
#endif
}
-bool JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
+void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
{
JumpList failureCases;
// Check eax is an object of the right Structure.
@@ -653,8 +650,6 @@ bool JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
Call failureCall = tailRecursiveCall();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
@@ -664,10 +659,9 @@ bool JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
}
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relinkCallerToTrampoline(returnAddress, entryLabel);
- stubInfo->initPutByIdTransition(oldStructure, newStructure, chain, entryLabel);
- return true;
}
void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
@@ -730,8 +724,10 @@ void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo,
repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset), offset);
}
-bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnAddressPtr returnAddress)
+void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
{
+ StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
+
// Check eax is an array
Jump failureCases1 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr));
@@ -744,8 +740,6 @@ bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnA
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
@@ -757,6 +751,7 @@ bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnA
// Track the stub we have created so that it will be deleted later.
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
@@ -765,11 +760,9 @@ bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnA
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
- stubInfo->stubRoutine = entryLabel;
- return true;
}
-bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
{
// The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
// referencing the prototype object - let's speculatively load it's table nice and early!)
@@ -810,8 +803,6 @@ bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
@@ -829,6 +820,7 @@ bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
}
// Track the stub we have created so that it will be deleted later.
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
@@ -837,15 +829,10 @@ bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
- stubInfo->initGetByIdProto(structure, prototypeStructure, entryLabel);
- return true;
}
-bool JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
+void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
- PolymorphicAccessStructureList* polymorphicStructures = stubInfo->u.getByIdSelfList.structureList;
- int currentIndex = stubInfo->u.getByIdSelfList.listSize;
-
Jump failureCase = checkStructure(regT0, structure);
bool needsStubLink = false;
if (slot.cachedPropertyType() == PropertySlot::Getter) {
@@ -873,8 +860,6 @@ bool JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, Structure*
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
@@ -902,15 +887,10 @@ bool JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, Structure*
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
- stubInfo->u.getByIdSelfList.listSize++;
- return true;
}
-bool JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
+void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
{
- PolymorphicAccessStructureList* prototypeStructures = stubInfo->u.getByIdProtoList.structureList;
- int currentIndex = stubInfo->u.getByIdProtoList.listSize;
-
// The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
// referencing the prototype object - let's speculatively load it's table nice and early!)
JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
@@ -951,8 +931,6 @@ bool JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Structure*
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
@@ -979,15 +957,10 @@ bool JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Structure*
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
- stubInfo->u.getByIdProtoList.listSize++;
- return true;
}
-bool JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
+void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
{
- PolymorphicAccessStructureList* prototypeStructures = stubInfo->u.getByIdProtoList.structureList;
- int currentIndex = stubInfo->u.getByIdProtoList.listSize;
-
ASSERT(count);
JumpList bucketsOfFail;
@@ -1027,8 +1000,6 @@ bool JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Structure*
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
@@ -1056,11 +1027,9 @@ bool JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Structure*
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
- stubInfo->u.getByIdProtoList.listSize++;
- return true;
}
-bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
{
ASSERT(count);
@@ -1101,8 +1070,6 @@ bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
@@ -1119,6 +1086,7 @@ bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
// Track the stub we have created so that it will be deleted later.
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
@@ -1127,8 +1095,6 @@ bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
- stubInfo->initGetByIdChain(structure, chain, entryLabel);
- return true;
}
/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
diff --git a/JavaScriptCore/jit/JITPropertyAccess32_64.cpp b/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
index 9239641..31ecfed 100644
--- a/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
+++ b/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
@@ -296,9 +296,6 @@ JIT::CodePtr JIT::stringGetByValStubGenerator(JSGlobalData* globalData, Executab
jit.ret();
LinkBuffer patchBuffer(&jit, pool, 0);
- // We can't run without the JIT trampolines!
- if (!patchBuffer.allocationSuccessful())
- CRASH();
return patchBuffer.finalizeCode().m_code;
}
@@ -605,7 +602,7 @@ void JIT::testPrototype(JSValue prototype, JumpList& failureCases)
#endif
}
-bool JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
+void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
{
// It is assumed that regT0 contains the basePayload and regT1 contains the baseTag. The value can be found on the stack.
@@ -657,9 +654,7 @@ bool JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
Call failureCall = tailRecursiveCall();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
-
+
patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
if (willNeedStorageRealloc) {
@@ -668,10 +663,9 @@ bool JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
}
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relinkCallerToTrampoline(returnAddress, entryLabel);
- stubInfo->initPutByIdTransition(oldStructure, newStructure, chain, entryLabel);
- return true;
}
void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
@@ -736,8 +730,10 @@ void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo,
repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
}
-bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnAddressPtr returnAddress)
+void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
{
+ StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
+
// regT0 holds a JSCell*
// Check for array
@@ -753,9 +749,7 @@ bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnA
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
-
+
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
patchBuffer.link(failureCases1, slowCaseBegin);
@@ -766,7 +760,8 @@ bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnA
// Track the stub we have created so that it will be deleted later.
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
-
+ stubInfo->stubRoutine = entryLabel;
+
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
@@ -774,11 +769,9 @@ bool JIT::privateCompilePatchGetArrayLength(StructureStubInfo* stubInfo, ReturnA
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
- stubInfo->stubRoutine = entryLabel;
- return true;
}
-bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
{
// regT0 holds a JSCell*
@@ -820,9 +813,7 @@ bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
-
+
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
patchBuffer.link(failureCases1, slowCaseBegin);
@@ -840,6 +831,7 @@ bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// Track the stub we have created so that it will be deleted later.
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
@@ -848,16 +840,11 @@ bool JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
- stubInfo->initGetByIdProto(structure, prototypeStructure, entryLabel);
- return true;
}
-bool JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
+void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
- PolymorphicAccessStructureList* polymorphicStructures = stubInfo->u.getByIdSelfList.structureList;
- int currentIndex = stubInfo->u.getByIdSelfList.listSize;
-
// regT0 holds a JSCell*
Jump failureCase = checkStructure(regT0, structure);
bool needsStubLink = false;
@@ -887,9 +874,6 @@ bool JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, Structure*
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
-
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
if (iter->to)
@@ -915,15 +899,10 @@ bool JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, Structure*
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
- stubInfo->u.getByIdSelfList.listSize++;
- return true;
}
-bool JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
+void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
{
- PolymorphicAccessStructureList* prototypeStructures = stubInfo->u.getByIdProtoList.structureList;
- int currentIndex = stubInfo->u.getByIdProtoList.listSize;
-
// regT0 holds a JSCell*
// The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
@@ -965,9 +944,6 @@ bool JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Structure*
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
-
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
if (iter->to)
@@ -992,15 +968,10 @@ bool JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Structure*
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
- stubInfo->u.getByIdProtoList.listSize++;
- return true;
}
-bool JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
+void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
{
- PolymorphicAccessStructureList* prototypeStructures = stubInfo->u.getByIdProtoList.structureList;
- int currentIndex = stubInfo->u.getByIdProtoList.listSize;
-
// regT0 holds a JSCell*
ASSERT(count);
@@ -1042,9 +1013,6 @@ bool JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Structure*
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
-
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
if (iter->to)
@@ -1070,11 +1038,9 @@ bool JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Structure*
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
- stubInfo->u.getByIdProtoList.listSize++;
- return true;
}
-bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
{
// regT0 holds a JSCell*
ASSERT(count);
@@ -1116,9 +1082,6 @@ bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool(), 0);
- if (!patchBuffer.allocationSuccessful())
- return false;
-
if (needsStubLink) {
for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
if (iter->to)
@@ -1133,7 +1096,8 @@ bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
// Track the stub we have created so that it will be deleted later.
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
-
+ stubInfo->stubRoutine = entryLabel;
+
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
RepatchBuffer repatchBuffer(m_codeBlock);
@@ -1141,8 +1105,6 @@ bool JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
- stubInfo->initGetByIdChain(structure, chain, entryLabel);
- return true;
}
/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
diff --git a/JavaScriptCore/jit/JITStubs.cpp b/JavaScriptCore/jit/JITStubs.cpp
index b53e655..e17c7cb 100644
--- a/JavaScriptCore/jit/JITStubs.cpp
+++ b/JavaScriptCore/jit/JITStubs.cpp
@@ -824,89 +824,111 @@ JITThunks::~JITThunks()
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-NEVER_INLINE bool JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
+NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
{
// The interpreter checks for recursion here; I do not believe this can occur in CTI.
if (!baseValue.isCell())
- return false;
+ return;
// Uncacheable: give up.
- if (!slot.isCacheable())
- return false;
+ if (!slot.isCacheable()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
+ return;
+ }
JSCell* baseCell = asCell(baseValue);
Structure* structure = baseCell->structure();
- if (structure->isUncacheableDictionary())
- return false;
+ if (structure->isUncacheableDictionary()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
+ return;
+ }
// If baseCell != base, then baseCell must be a proxy for another object.
- if (baseCell != slot.base())
- return false;
+ if (baseCell != slot.base()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
+ return;
+ }
// Cache hit: Specialize instruction and ref Structures.
// Structure transition, cache transition info
if (slot.type() == PutPropertySlot::NewProperty) {
- if (structure->isDictionary())
- return false;
+ if (structure->isDictionary()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
+ return;
+ }
// put_by_id_transition checks the prototype chain for setters.
normalizePrototypeChain(callFrame, baseCell);
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
- return JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
+ stubInfo->initPutByIdTransition(structure->previousID(), structure, prototypeChain);
+ JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
+ return;
}
+
+ stubInfo->initPutByIdReplace(structure);
JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
- stubInfo->initPutByIdReplace(structure);
- return true;
}
-NEVER_INLINE bool JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
+NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
{
// FIXME: Write a test that proves we need to check for recursion here just
// like the interpreter does, then add a check for recursion.
// FIXME: Cache property access for immediates.
- if (!baseValue.isCell())
- return false;
+ if (!baseValue.isCell()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
+ return;
+ }
JSGlobalData* globalData = &callFrame->globalData();
- if (isJSArray(globalData, baseValue) && propertyName == callFrame->propertyNames().length)
- return JIT::compilePatchGetArrayLength(callFrame->scopeChain()->globalData, codeBlock, stubInfo, returnAddress);
+ if (isJSArray(globalData, baseValue) && propertyName == callFrame->propertyNames().length) {
+ JIT::compilePatchGetArrayLength(callFrame->scopeChain()->globalData, codeBlock, returnAddress);
+ return;
+ }
if (isJSString(globalData, baseValue) && propertyName == callFrame->propertyNames().length) {
// The tradeoff of compiling an patched inline string length access routine does not seem
// to pay off, so we currently only do this for arrays.
ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs->ctiStringLengthTrampoline());
- return true;
+ return;
}
// Uncacheable: give up.
- if (!slot.isCacheable())
- return false;
+ if (!slot.isCacheable()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
+ return;
+ }
JSCell* baseCell = asCell(baseValue);
Structure* structure = baseCell->structure();
- if (structure->isUncacheableDictionary())
- return false;
+ if (structure->isUncacheableDictionary()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
+ return;
+ }
// Cache hit: Specialize instruction and ref Structures.
if (slot.slotBase() == baseValue) {
- if (slot.cachedPropertyType() != PropertySlot::Value)
- return false;
- JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ // set this up, so derefStructures can do it's job.
stubInfo->initGetByIdSelf(structure);
- return true;
+ if (slot.cachedPropertyType() != PropertySlot::Value)
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
+ else
+ JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ return;
}
- if (structure->isDictionary())
- return false;
+ if (structure->isDictionary()) {
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
+ return;
+ }
if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
ASSERT(slot.slotBase().isObject());
@@ -920,20 +942,25 @@ NEVER_INLINE bool JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
slotBaseObject->flattenDictionaryObject();
offset = slotBaseObject->structure()->get(propertyName);
}
+
+ stubInfo->initGetByIdProto(structure, slotBaseObject->structure());
+
ASSERT(!structure->isDictionary());
ASSERT(!slotBaseObject->structure()->isDictionary());
- return JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
+ JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
+ return;
}
size_t offset = slot.cachedOffset();
size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset);
if (!count) {
stubInfo->accessType = access_get_by_id_generic;
- return true;
+ return;
}
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
- return JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
+ stubInfo->initGetByIdChain(structure, prototypeChain);
+ JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
}
#endif // ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
@@ -1386,13 +1413,9 @@ DEFINE_STUB_FUNCTION(void, op_put_by_id)
StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
if (!stubInfo->seenOnce())
stubInfo->setSeen();
- else {
- JSValue baseValue = stackFrame.args[0].jsValue();
- bool cached = JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, slot, stubInfo, false);
- if (!cached && baseValue.isCell())
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_id_generic));
- }
-
+ else
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
+
CHECK_FOR_EXCEPTION_AT_END();
}
@@ -1409,13 +1432,9 @@ DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
if (!stubInfo->seenOnce())
stubInfo->setSeen();
- else {
- JSValue baseValue = stackFrame.args[0].jsValue();
- bool cached = JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, slot, stubInfo, true);
- if (!cached && baseValue.isCell())
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_id_direct_generic));
- }
-
+ else
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
+
CHECK_FOR_EXCEPTION_AT_END();
}
@@ -1547,11 +1566,8 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
if (!stubInfo->seenOnce())
stubInfo->setSeen();
- else {
- bool cached = JITThunks::tryCacheGetByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, ident, slot, stubInfo);
- if (!cached)
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
- }
+ else
+ JITThunks::tryCacheGetByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, ident, slot, stubInfo);
CHECK_FOR_EXCEPTION_AT_END();
return JSValue::encode(result);
@@ -1580,28 +1596,57 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_self_fail)
ASSERT(slot.slotBase().isObject());
- // If this is a regular self access (not yet upgraded to list), then switch the stubInfo over.
- if (stubInfo->accessType == access_get_by_id_self)
- stubInfo->initGetByIdSelfList(new PolymorphicAccessStructureList(stubInfo->stubRoutine, stubInfo->u.getByIdSelf.baseObjectStructure));
+ PolymorphicAccessStructureList* polymorphicStructureList;
+ int listIndex = 1;
- // If there is room in the list, try to add a cached entry.
- if (stubInfo->u.getByIdSelfList.listSize < POLYMORPHIC_LIST_CACHE_SIZE) {
- bool cached = JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, asCell(baseValue)->structure(), ident, slot, slot.cachedOffset());
- if (cached)
- return JSValue::encode(result);
+ if (stubInfo->accessType == access_get_by_id_self) {
+ ASSERT(!stubInfo->stubRoutine);
+ polymorphicStructureList = new PolymorphicAccessStructureList(CodeLocationLabel(), stubInfo->u.getByIdSelf.baseObjectStructure);
+ stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
+ } else {
+ polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
+ listIndex = stubInfo->u.getByIdSelfList.listSize;
}
- }
- ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ stubInfo->u.getByIdSelfList.listSize++;
+ JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), ident, slot, slot.cachedOffset());
+
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ }
+ } else
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
return JSValue::encode(result);
}
-static void setupPolymorphicProtoList(StructureStubInfo* stubInfo)
-{
- if (stubInfo->accessType == access_get_by_id_proto)
- stubInfo->initGetByIdProtoList(new PolymorphicAccessStructureList(stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure, stubInfo->u.getByIdProto.prototypeStructure));
- else if (stubInfo->accessType == access_get_by_id_chain)
- stubInfo->initGetByIdProtoList(new PolymorphicAccessStructureList(stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure, stubInfo->u.getByIdChain.chain));
- ASSERT(stubInfo->accessType == access_get_by_id_proto_list);
+static PolymorphicAccessStructureList* getPolymorphicAccessStructureListSlot(StructureStubInfo* stubInfo, int& listIndex)
+{
+ PolymorphicAccessStructureList* prototypeStructureList = 0;
+ listIndex = 1;
+
+ switch (stubInfo->accessType) {
+ case access_get_by_id_proto:
+ prototypeStructureList = new PolymorphicAccessStructureList(stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure, stubInfo->u.getByIdProto.prototypeStructure);
+ stubInfo->stubRoutine = CodeLocationLabel();
+ stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
+ break;
+ case access_get_by_id_chain:
+ prototypeStructureList = new PolymorphicAccessStructureList(stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure, stubInfo->u.getByIdChain.chain);
+ stubInfo->stubRoutine = CodeLocationLabel();
+ stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
+ break;
+ case access_get_by_id_proto_list:
+ prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
+ listIndex = stubInfo->u.getByIdProtoList.listSize;
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
+ stubInfo->u.getByIdProtoList.listSize++;
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ }
+
+ ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
+ return prototypeStructureList;
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
@@ -1662,36 +1707,40 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
size_t offset = slot.cachedOffset();
- // Don't mix self & proto/chain accesses in the same list
- if (slot.slotBase() != baseValue) {
- if (slot.slotBase() == asCell(baseValue)->structure()->prototypeForLookup(callFrame)) {
- ASSERT(!asCell(baseValue)->structure()->isDictionary());
- // Since we're accessing a prototype in a loop, it's a good bet that it
- // should not be treated as a dictionary.
- if (slotBaseObject->structure()->isDictionary()) {
- slotBaseObject->flattenDictionaryObject();
- offset = slotBaseObject->structure()->get(propertyName);
- }
+ if (slot.slotBase() == baseValue)
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
+ else if (slot.slotBase() == asCell(baseValue)->structure()->prototypeForLookup(callFrame)) {
+ ASSERT(!asCell(baseValue)->structure()->isDictionary());
+ // Since we're accessing a prototype in a loop, it's a good bet that it
+ // should not be treated as a dictionary.
+ if (slotBaseObject->structure()->isDictionary()) {
+ slotBaseObject->flattenDictionaryObject();
+ offset = slotBaseObject->structure()->get(propertyName);
+ }
- setupPolymorphicProtoList(stubInfo);
- if (stubInfo->u.getByIdProtoList.listSize < POLYMORPHIC_LIST_CACHE_SIZE) {
- bool cached = JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset);
- if (cached)
- return JSValue::encode(result);
- }
- } else if (size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset)) {
- ASSERT(!asCell(baseValue)->structure()->isDictionary());
-
- setupPolymorphicProtoList(stubInfo);
- if (stubInfo->u.getByIdProtoList.listSize < POLYMORPHIC_LIST_CACHE_SIZE) {
- bool cached = JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, structure->prototypeChain(callFrame), count, propertyName, slot, offset);
- if (cached)
- return JSValue::encode(result);
- }
+ int listIndex;
+ PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
+
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
}
- }
+ } else if (size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset)) {
+ ASSERT(!asCell(baseValue)->structure()->isDictionary());
+ int listIndex;
+ PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
+
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ StructureChain* protoChain = structure->prototypeChain(callFrame);
+ JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
+
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ }
+ } else
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
return JSValue::encode(result);
}
diff --git a/JavaScriptCore/jit/JITStubs.h b/JavaScriptCore/jit/JITStubs.h
index 43f3d19..4e73070 100644
--- a/JavaScriptCore/jit/JITStubs.h
+++ b/JavaScriptCore/jit/JITStubs.h
@@ -252,8 +252,8 @@ namespace JSC {
JITThunks(JSGlobalData*);
~JITThunks();
- static bool tryCacheGetByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot&, StructureStubInfo* stubInfo);
- static bool tryCachePutByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot&, StructureStubInfo* stubInfo, bool direct);
+ static void tryCacheGetByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot&, StructureStubInfo* stubInfo);
+ static void tryCachePutByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot&, StructureStubInfo* stubInfo, bool direct);
MacroAssemblerCodePtr ctiStringLengthTrampoline() { return m_trampolineStructure.ctiStringLengthTrampoline; }
MacroAssemblerCodePtr ctiVirtualCallLink() { return m_trampolineStructure.ctiVirtualCallLink; }
diff --git a/JavaScriptCore/jit/SpecializedThunkJIT.h b/JavaScriptCore/jit/SpecializedThunkJIT.h
index ba95498..57515fb 100644
--- a/JavaScriptCore/jit/SpecializedThunkJIT.h
+++ b/JavaScriptCore/jit/SpecializedThunkJIT.h
@@ -130,9 +130,6 @@ namespace JSC {
MacroAssemblerCodePtr finalize(MacroAssemblerCodePtr fallback)
{
LinkBuffer patchBuffer(this, m_pool.get(), 0);
- // We can't continue if we can't call a function!
- if (!patchBuffer.allocationSuccessful())
- CRASH();
patchBuffer.link(m_failures, CodeLocationLabel(fallback));
return patchBuffer.finalizeCode().m_code;
}