summaryrefslogtreecommitdiffstats
path: root/JavaScriptCore/jit
diff options
context:
space:
mode:
Diffstat (limited to 'JavaScriptCore/jit')
-rw-r--r--JavaScriptCore/jit/ExecutableAllocator.h5
-rw-r--r--JavaScriptCore/jit/JITInlineMethods.h2
-rw-r--r--JavaScriptCore/jit/JITOpcodes.cpp51
-rw-r--r--JavaScriptCore/jit/JITOpcodes32_64.cpp94
-rw-r--r--JavaScriptCore/jit/JITPropertyAccess32_64.cpp34
-rw-r--r--JavaScriptCore/jit/JITStubs.cpp21
6 files changed, 169 insertions, 38 deletions
diff --git a/JavaScriptCore/jit/ExecutableAllocator.h b/JavaScriptCore/jit/ExecutableAllocator.h
index 610b788..703f63f 100644
--- a/JavaScriptCore/jit/ExecutableAllocator.h
+++ b/JavaScriptCore/jit/ExecutableAllocator.h
@@ -25,7 +25,6 @@
#ifndef ExecutableAllocator_h
#define ExecutableAllocator_h
-
#include <stddef.h> // for ptrdiff_t
#include <limits>
#include <wtf/Assertions.h>
@@ -80,7 +79,7 @@ inline size_t roundUpAllocationSize(size_t request, size_t granularity)
}
-#if ENABLE(ASSEMBLER)
+#if ENABLE(JIT) && ENABLE(ASSEMBLER)
namespace JSC {
@@ -316,6 +315,6 @@ inline void* ExecutablePool::poolAllocate(size_t n)
}
-#endif // ENABLE(ASSEMBLER)
+#endif // ENABLE(JIT) && ENABLE(ASSEMBLER)
#endif // !defined(ExecutableAllocator)
diff --git a/JavaScriptCore/jit/JITInlineMethods.h b/JavaScriptCore/jit/JITInlineMethods.h
index cba290b..04f7158 100644
--- a/JavaScriptCore/jit/JITInlineMethods.h
+++ b/JavaScriptCore/jit/JITInlineMethods.h
@@ -645,7 +645,7 @@ ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
{
storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
- m_lastResultBytecodeRegister = (from == cachedResultRegister) ? dst : std::numeric_limits<int>::max();
+ m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
}
ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
diff --git a/JavaScriptCore/jit/JITOpcodes.cpp b/JavaScriptCore/jit/JITOpcodes.cpp
index 0848348..9a34931 100644
--- a/JavaScriptCore/jit/JITOpcodes.cpp
+++ b/JavaScriptCore/jit/JITOpcodes.cpp
@@ -73,12 +73,14 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
// VirtualCallLink Trampoline
// regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
+ JumpList callLazyLinkFailures;
Label virtualCallLinkBegin = align();
compileOpCallInitializeCallFrame();
preserveReturnAddressAfterCall(regT3);
emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
restoreArgumentReference();
Call callLazyLinkCall = call();
+ callLazyLinkFailures.append(branchTestPtr(Zero, regT0));
restoreReturnAddressBeforeReturn(regT3);
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
jump(regT0);
@@ -91,10 +93,24 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
restoreArgumentReference();
Call callLazyLinkConstruct = call();
+ callLazyLinkFailures.append(branchTestPtr(Zero, regT0));
restoreReturnAddressBeforeReturn(regT3);
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
jump(regT0);
+ // If the parser fails we want to be able to be able to keep going,
+ // So we handle this as a parse failure.
+ callLazyLinkFailures.link(this);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
+ restoreReturnAddressBeforeReturn(regT1);
+ move(ImmPtr(&globalData->exceptionLocation), regT2);
+ storePtr(regT1, regT2);
+ poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
+ poke(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
+ ret();
+
+
// VirtualCall Trampoline
// regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
Label virtualCallBegin = align();
@@ -181,6 +197,7 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
Label nativeCallThunk = align();
+#if CPU(X86_64)
// Load caller frame's scope chain into this callframe so that whatever we call can
// get to its global data.
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
@@ -190,7 +207,6 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
peek(regT1);
emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
-#if CPU(X86_64)
// Calling convention: f(edi, esi, edx, ecx, ...);
// Host function signature: f(ExecState*);
move(callFrameRegister, X86Registers::edi);
@@ -204,6 +220,27 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
addPtr(Imm32(16 - sizeof(void*)), stackPointerRegister);
+#elif CPU(ARM)
+ // Load caller frame's scope chain into this callframe so that whatever we call can
+ // get to its global data.
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
+ emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
+
+ preserveReturnAddressAfterCall(regT3); // Callee preserved
+ emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
+
+ // Calling convention: f(r0 == regT0, r1 == regT1, ...);
+ // Host function signature: f(ExecState*);
+ move(callFrameRegister, ARMRegisters::r0);
+
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
+ move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
+ loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+ call(Address(regT2, executableOffsetToFunction));
+
+ restoreReturnAddressBeforeReturn(regT3);
+
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
#else
@@ -220,12 +257,18 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
// Handle an exception
exceptionHandler.link(this);
+
// Grab the return address.
- peek(regT1);
+ preserveReturnAddressAfterCall(regT1);
+
move(ImmPtr(&globalData->exceptionLocation), regT2);
storePtr(regT1, regT2);
- poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
- poke(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
+ poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
+
+ // Set the return address.
+ move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
+ restoreReturnAddressBeforeReturn(regT1);
+
ret();
return nativeCallThunk;
diff --git a/JavaScriptCore/jit/JITOpcodes32_64.cpp b/JavaScriptCore/jit/JITOpcodes32_64.cpp
index 239751f..a44a8a1 100644
--- a/JavaScriptCore/jit/JITOpcodes32_64.cpp
+++ b/JavaScriptCore/jit/JITOpcodes32_64.cpp
@@ -66,8 +66,8 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
#endif
// (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
-
#if ENABLE(JIT_OPTIMIZE_CALL)
+ JumpList callLazyLinkFailures;
// VirtualCallLink Trampoline
// regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
Label virtualCallLinkBegin = align();
@@ -76,6 +76,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
restoreArgumentReference();
Call callLazyLinkCall = call();
+ callLazyLinkFailures.append(branchTestPtr(Zero, regT0));
restoreReturnAddressBeforeReturn(regT3);
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
jump(regT0);
@@ -89,8 +90,22 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
restoreArgumentReference();
Call callLazyLinkConstruct = call();
restoreReturnAddressBeforeReturn(regT3);
+ callLazyLinkFailures.append(branchTestPtr(Zero, regT0));
emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT1);
jump(regT0);
+
+ // If the parser fails we want to be able to be able to keep going,
+ // So we handle this as a parse failure.
+ callLazyLinkFailures.link(this);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
+ restoreReturnAddressBeforeReturn(regT1);
+ move(ImmPtr(&globalData->exceptionLocation), regT2);
+ storePtr(regT1, regT2);
+ poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
+ poke(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
+ ret();
+
#endif // ENABLE(JIT_OPTIMIZE_CALL)
// VirtualCall Trampoline
@@ -181,6 +196,7 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
Label nativeCallThunk = align();
+#if CPU(X86)
// Load caller frame's scope chain into this callframe so that whatever we call can
// get to its global data.
emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT0);
@@ -190,7 +206,6 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
peek(regT1);
emitPutToCallFrameHeader(regT1, RegisterFile::ReturnPC);
-#if CPU(X86)
// Calling convention: f(ecx, edx, ...);
// Host function signature: f(ExecState*);
move(callFrameRegister, X86Registers::ecx);
@@ -205,6 +220,28 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
addPtr(Imm32(16 - sizeof(void*)), stackPointerRegister);
+#elif CPU(ARM)
+ // Load caller frame's scope chain into this callframe so that whatever we call can
+ // get to its global data.
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
+ emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
+
+ preserveReturnAddressAfterCall(regT3); // Callee preserved
+ emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
+
+ // Calling convention: f(r0 == regT0, r1 == regT1, ...);
+ // Host function signature: f(ExecState*);
+ move(callFrameRegister, ARMRegisters::r0);
+
+ // call the function
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
+ move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
+ loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+ call(Address(regT2, executableOffsetToFunction));
+
+ restoreReturnAddressBeforeReturn(regT3);
+
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
#else
@@ -220,11 +257,18 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
// Handle an exception
sawException.link(this);
- peek(regT1);
+
+ // Grab the return address.
+ preserveReturnAddressAfterCall(regT1);
+
move(ImmPtr(&globalData->exceptionLocation), regT2);
storePtr(regT1, regT2);
- poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
- poke(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
+ poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
+
+ // Set the return address.
+ move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
+ restoreReturnAddressBeforeReturn(regT1);
+
ret();
return nativeCallThunk;
@@ -258,6 +302,29 @@ JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executa
addPtr(Imm32(16 - sizeof(void*)), stackPointerRegister);
+#elif CPU(ARM)
+ // Load caller frame's scope chain into this callframe so that whatever we call can
+ // get to its global data.
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT2);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT2);
+ emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
+
+ preserveReturnAddressAfterCall(regT3); // Callee preserved
+ emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
+
+ // Calling convention: f(r0 == regT0, r1 == regT1, ...);
+ // Host function signature: f(ExecState*);
+ move(callFrameRegister, ARMRegisters::r0);
+
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, ARMRegisters::r1);
+ move(regT2, callFrameRegister); // Eagerly restore caller frame register to avoid loading from stack.
+ loadPtr(Address(ARMRegisters::r1, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+
+ // call the function
+ nativeCall = call();
+
+ restoreReturnAddressBeforeReturn(regT3);
+
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
#else
@@ -272,11 +339,18 @@ JIT::CodePtr JIT::privateCompileCTINativeCall(PassRefPtr<ExecutablePool> executa
// Handle an exception
sawException.link(this);
- peek(regT1);
+
+ // Grab the return address.
+ preserveReturnAddressAfterCall(regT1);
+
move(ImmPtr(&globalData->exceptionLocation), regT2);
storePtr(regT1, regT2);
- poke(callFrameRegister, 1 + OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
- poke(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()));
+ poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
+
+ // Set the return address.
+ move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
+ restoreReturnAddressBeforeReturn(regT1);
+
ret();
// All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
@@ -607,8 +681,8 @@ void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
// Load property.
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
load32(offsetAddr, regT3);
- load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
- load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
+ load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
+ load32(BaseIndex(regT2, regT3, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
emitStore(dst, regT1, regT0);
map(m_bytecodeOffset + dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
}
diff --git a/JavaScriptCore/jit/JITPropertyAccess32_64.cpp b/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
index 6b8af7c..16cf84a 100644
--- a/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
+++ b/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
@@ -314,8 +314,8 @@ void JIT::emit_op_get_by_val(Instruction* currentInstruction)
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT3);
addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, OBJECT_OFFSETOF(JSArray, m_vectorLength))));
- load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), regT1); // tag
- load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0); // payload
+ load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
+ load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
addSlowCase(branch32(Equal, regT1, Imm32(JSValue::EmptyValueTag)));
emitStore(dst, regT1, regT0);
@@ -366,12 +366,12 @@ void JIT::emit_op_put_by_val(Instruction* currentInstruction)
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT3);
- Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), Imm32(JSValue::EmptyValueTag));
+ Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::EmptyValueTag));
Label storeResult(this);
emitLoad(value, regT1, regT0);
- store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); // payload
- store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4)); // tag
+ store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
+ store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
Jump end = jump();
empty.link(this);
@@ -571,8 +571,8 @@ void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, Register
void JIT::compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
{
if (base->isUsingInlineStorage()) {
- load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]), resultPayload);
- load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]) + 4, resultTag);
+ load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload), resultPayload);
+ load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag), resultTag);
return;
}
@@ -580,8 +580,8 @@ void JIT::compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID res
PropertyStorage* protoPropertyStorage = &base->m_externalStorage;
loadPtr(static_cast<void*>(protoPropertyStorage), temp);
- load32(Address(temp, offset), resultPayload);
- load32(Address(temp, offset + 4), resultTag);
+ load32(Address(temp, offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
+ load32(Address(temp, offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
}
void JIT::testPrototype(Structure* structure, JumpList& failureCases)
@@ -628,8 +628,8 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
add32(Imm32(1), AbsoluteAddress(newStructure->addressOfCount()));
storePtr(ImmPtr(newStructure), Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)));
- load32(Address(stackPointerRegister, offsetof(struct JITStackFrame, args[2]) + sizeof(void*)), regT3);
- load32(Address(stackPointerRegister, offsetof(struct JITStackFrame, args[2]) + sizeof(void*) + 4), regT2);
+ load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
+ load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
// Write the value
compilePutDirectOffset(regT0, regT2, regT3, newStructure, cachedOffset);
@@ -673,8 +673,8 @@ void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, St
// Patch the offset into the propoerty map to load from, then patch the Structure to look for.
repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset1), offset); // payload
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset2), offset + 4); // tag
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
}
void JIT::patchMethodCallProto(CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, JSFunction* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress)
@@ -714,8 +714,8 @@ void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo,
// Patch the offset into the propoerty map to load from, then patch the Structure to look for.
repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset1), offset); // payload
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + 4); // tag
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset1), offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
}
void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
@@ -1122,8 +1122,8 @@ void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, Register
ASSERT(sizeof(JSValue) == 8);
Jump notUsingInlineStorage = branch32(NotEqual, Address(structure, OBJECT_OFFSETOF(Structure, m_propertyStorageCapacity)), Imm32(JSObject::inlineStorageCapacity));
- loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
- loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
Jump finishedLoad = jump();
notUsingInlineStorage.link(this);
loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
diff --git a/JavaScriptCore/jit/JITStubs.cpp b/JavaScriptCore/jit/JITStubs.cpp
index ff0dc3f..c4a6507 100644
--- a/JavaScriptCore/jit/JITStubs.cpp
+++ b/JavaScriptCore/jit/JITStubs.cpp
@@ -1967,7 +1967,11 @@ DEFINE_STUB_FUNCTION(void*, vm_lazyLinkCall)
codePtr = executable->generatedJITCodeForCall().addressForCall();
else {
FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
- codeBlock = &functionExecutable->bytecodeForCall(stackFrame.callFrame, callee->scope().node());
+ codeBlock = functionExecutable->bytecodeForCall(stackFrame.callFrame, callee->scope().node());
+ if (!codeBlock) {
+ stackFrame.callFrame->globalData().exception = createStackOverflowError(callFrame);
+ return 0;
+ }
functionExecutable->jitCodeForCall(callFrame, callee->scope().node());
if (callFrame->argumentCountIncludingThis() == static_cast<size_t>(codeBlock->m_numParameters))
codePtr = functionExecutable->generatedJITCodeForCall().addressForCall();
@@ -1997,7 +2001,11 @@ DEFINE_STUB_FUNCTION(void*, vm_lazyLinkConstruct)
codePtr = executable->generatedJITCodeForConstruct().addressForCall();
else {
FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
- codeBlock = &functionExecutable->bytecodeForConstruct(stackFrame.callFrame, callee->scope().node());
+ codeBlock = functionExecutable->bytecodeForConstruct(stackFrame.callFrame, callee->scope().node());
+ if (!codeBlock) {
+ throwStackOverflowError(callFrame, stackFrame.globalData, ReturnAddressPtr(callFrame->returnPC()), STUB_RETURN_ADDRESS);
+ VM_THROW_EXCEPTION();
+ }
functionExecutable->jitCodeForConstruct(callFrame, callee->scope().node());
if (callFrame->argumentCountIncludingThis() == static_cast<size_t>(codeBlock->m_numParameters))
codePtr = functionExecutable->generatedJITCodeForConstruct().addressForCall();
@@ -2040,7 +2048,10 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_NotJSFunction)
int argCount = stackFrame.args[2].int32();
CallFrame* previousCallFrame = stackFrame.callFrame;
CallFrame* callFrame = CallFrame::create(previousCallFrame->registers() + registerOffset);
-
+ if (!stackFrame.registerFile->grow(callFrame->registers())) {
+ throwStackOverflowError(previousCallFrame, stackFrame.globalData, callFrame->returnPC(), STUB_RETURN_ADDRESS);
+ VM_THROW_EXCEPTION();
+ }
callFrame->init(0, static_cast<Instruction*>((STUB_RETURN_ADDRESS).value()), previousCallFrame->scopeChain(), previousCallFrame, argCount, asObject(funcVal));
stackFrame.callFrame = callFrame;
@@ -2175,6 +2186,10 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_construct_NotJSConstruct)
int argCount = stackFrame.args[2].int32();
CallFrame* previousCallFrame = stackFrame.callFrame;
CallFrame* callFrame = CallFrame::create(previousCallFrame->registers() + registerOffset);
+ if (!stackFrame.registerFile->grow(callFrame->registers())) {
+ throwStackOverflowError(previousCallFrame, stackFrame.globalData, callFrame->returnPC(), STUB_RETURN_ADDRESS);
+ VM_THROW_EXCEPTION();
+ }
callFrame->init(0, static_cast<Instruction*>((STUB_RETURN_ADDRESS).value()), previousCallFrame->scopeChain(), previousCallFrame, argCount, asObject(constrVal));
stackFrame.callFrame = callFrame;