summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/webkit/JavaScriptCore/jit
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/webkit/JavaScriptCore/jit')
-rw-r--r--src/3rdparty/webkit/JavaScriptCore/jit/ExecutableAllocator.h71
-rw-r--r--src/3rdparty/webkit/JavaScriptCore/jit/JIT.cpp56
-rw-r--r--src/3rdparty/webkit/JavaScriptCore/jit/JIT.h16
-rw-r--r--src/3rdparty/webkit/JavaScriptCore/jit/JITCode.h7
-rw-r--r--src/3rdparty/webkit/JavaScriptCore/jit/JITInlineMethods.h4
-rw-r--r--src/3rdparty/webkit/JavaScriptCore/jit/JITPropertyAccess.cpp37
-rw-r--r--src/3rdparty/webkit/JavaScriptCore/jit/JITStubs.cpp69
7 files changed, 129 insertions, 131 deletions
diff --git a/src/3rdparty/webkit/JavaScriptCore/jit/ExecutableAllocator.h b/src/3rdparty/webkit/JavaScriptCore/jit/ExecutableAllocator.h
index 0de4f79..4ed47e3 100644
--- a/src/3rdparty/webkit/JavaScriptCore/jit/ExecutableAllocator.h
+++ b/src/3rdparty/webkit/JavaScriptCore/jit/ExecutableAllocator.h
@@ -156,7 +156,7 @@ public:
return pool.release();
}
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE) || !(PLATFORM(X86) || PLATFORM(X86_64))
+#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
static void makeWritable(void* start, size_t size)
{
reprotectRegion(start, size, Writable);
@@ -165,58 +165,47 @@ public:
static void makeExecutable(void* start, size_t size)
{
reprotectRegion(start, size, Executable);
- cacheFlush(start, size);
}
-
- // If ASSEMBLER_WX_EXCLUSIVE protection is turned on, or on non-x86 platforms,
- // we need to track start & size so we can makeExecutable/cacheFlush at the end.
- class MakeWritable {
- public:
- MakeWritable(void* start, size_t size)
- : m_start(start)
- , m_size(size)
- {
- makeWritable(start, size);
- }
-
- ~MakeWritable()
- {
- makeExecutable(m_start, m_size);
- }
-
- private:
- void* m_start;
- size_t m_size;
- };
#else
static void makeWritable(void*, size_t) {}
static void makeExecutable(void*, size_t) {}
-
- // On x86, without ASSEMBLER_WX_EXCLUSIVE, there is nothing to do here.
- class MakeWritable { public: MakeWritable(void*, size_t) {} };
#endif
-private:
-
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE) || !(PLATFORM(X86) || PLATFORM(X86_64))
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
- static void reprotectRegion(void*, size_t, ProtectionSeting);
-#else
- static void reprotectRegion(void*, size_t, ProtectionSeting) {}
-#endif
- static void cacheFlush(void* code, size_t size)
- {
#if PLATFORM(X86) || PLATFORM(X86_64)
- UNUSED_PARAM(code);
- UNUSED_PARAM(size);
+ static void cacheFlush(void*, size_t)
+ {
+ }
#elif PLATFORM_ARM_ARCH(7) && PLATFORM(IPHONE)
+ static void cacheFlush(void* code, size_t size)
+ {
sys_dcache_flush(code, size);
sys_icache_invalidate(code, size);
-#else
-#error "ExecutableAllocator::cacheFlush not implemented on this platform."
-#endif
}
+#elif PLATFORM(ARM)
+ static void cacheFlush(void* code, size_t size)
+ {
+ #if COMPILER(GCC) && (GCC_VERSION >= 30406)
+ __clear_cache(reinterpret_cast<char*>(code), reinterpret_cast<char*>(code) + size);
+ #else
+ const int syscall = 0xf0002;
+ __asm __volatile (
+ "mov r0, %0\n"
+ "mov r1, %1\n"
+ "mov r7, %2\n"
+ "mov r2, #0x0\n"
+ "swi 0x00000000\n"
+ :
+ : "r" (code), "r" (reinterpret_cast<char*>(code) + size), "r" (syscall)
+ : "r0", "r1", "r7");
+ #endif // COMPILER(GCC) && (GCC_VERSION >= 30406)
+ }
+#endif
+
+private:
+
+#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
+ static void reprotectRegion(void*, size_t, ProtectionSeting);
#endif
RefPtr<ExecutablePool> m_smallAllocationPool;
diff --git a/src/3rdparty/webkit/JavaScriptCore/jit/JIT.cpp b/src/3rdparty/webkit/JavaScriptCore/jit/JIT.cpp
index f1b22c0..a0e462b 100644
--- a/src/3rdparty/webkit/JavaScriptCore/jit/JIT.cpp
+++ b/src/3rdparty/webkit/JavaScriptCore/jit/JIT.cpp
@@ -26,6 +26,12 @@
#include "config.h"
#include "JIT.h"
+// This probably does not belong here; adding here for now as a quick Windows build fix.
+#if ENABLE(ASSEMBLER) && PLATFORM(X86) && !PLATFORM(MAC)
+#include "MacroAssembler.h"
+JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2;
+#endif
+
#if ENABLE(JIT)
#include "CodeBlock.h"
@@ -34,6 +40,8 @@
#include "JITStubCall.h"
#include "JSArray.h"
#include "JSFunction.h"
+#include "LinkBuffer.h"
+#include "RepatchBuffer.h"
#include "ResultType.h"
#include "SamplingTool.h"
@@ -45,21 +53,21 @@ using namespace std;
namespace JSC {
-void ctiPatchNearCallByReturnAddress(ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
+void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
{
- MacroAssembler::RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(codeblock);
repatchBuffer.relinkNearCallerToTrampoline(returnAddress, newCalleeFunction);
}
-void ctiPatchCallByReturnAddress(ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
+void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
{
- MacroAssembler::RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(codeblock);
repatchBuffer.relinkCallerToTrampoline(returnAddress, newCalleeFunction);
}
-void ctiPatchCallByReturnAddress(ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction)
+void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction)
{
- MacroAssembler::RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(codeblock);
repatchBuffer.relinkCallerToFunction(returnAddress, newCalleeFunction);
}
@@ -396,7 +404,7 @@ void JIT::privateCompile()
#endif
// Could use a pop_m, but would need to offset the following instruction if so.
- preverveReturnAddressAfterCall(regT2);
+ preserveReturnAddressAfterCall(regT2);
emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC);
Jump slowRegisterFileCheck;
@@ -488,6 +496,7 @@ void JIT::privateCompile()
#if ENABLE(JIT_OPTIMIZE_CALL)
for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
+ info.ownerCodeBlock = m_codeBlock;
info.callReturnLocation = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].callReturnLocation);
info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther);
@@ -553,7 +562,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
Jump hasCodeBlock1 = branchTestPtr(NonZero, regT0);
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
restoreArgumentReference();
Call callJSFunction1 = call();
emitGetJITStubArg(1, regT2);
@@ -565,7 +574,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
// Check argCount matches callee arity.
Jump arityCheckOkay1 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
emitPutJITStubArg(regT3, 2);
emitPutJITStubArg(regT0, 4);
restoreArgumentReference();
@@ -579,7 +588,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
compileOpCallInitializeCallFrame();
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
emitPutJITStubArg(regT3, 2);
restoreArgumentReference();
Call callDontLazyLinkCall = call();
@@ -594,7 +603,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
Jump hasCodeBlock2 = branchTestPtr(NonZero, regT0);
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
restoreArgumentReference();
Call callJSFunction2 = call();
emitGetJITStubArg(1, regT2);
@@ -606,7 +615,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
// Check argCount matches callee arity.
Jump arityCheckOkay2 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
emitPutJITStubArg(regT3, 2);
emitPutJITStubArg(regT0, 4);
restoreArgumentReference();
@@ -620,7 +629,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
compileOpCallInitializeCallFrame();
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
emitPutJITStubArg(regT3, 2);
restoreArgumentReference();
Call callLazyLinkCall = call();
@@ -634,7 +643,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
loadPtr(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_body)), regT3);
loadPtr(Address(regT3, OBJECT_OFFSETOF(FunctionBodyNode, m_code)), regT0);
Jump hasCodeBlock3 = branchTestPtr(NonZero, regT0);
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
restoreArgumentReference();
Call callJSFunction3 = call();
emitGetJITStubArg(1, regT2);
@@ -647,7 +656,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
// Check argCount matches callee arity.
Jump arityCheckOkay3 = branch32(Equal, Address(regT0, OBJECT_OFFSETOF(CodeBlock, m_numParameters)), regT1);
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
emitPutJITStubArg(regT3, 2);
emitPutJITStubArg(regT0, 4);
restoreArgumentReference();
@@ -668,7 +677,7 @@ void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executable
Label nativeCallThunk = align();
- preverveReturnAddressAfterCall(regT0);
+ preserveReturnAddressAfterCall(regT0);
emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
// Load caller frame's scope chain into this callframe so that whatever we call can
@@ -903,14 +912,14 @@ void JIT::unlinkCall(CallLinkInfo* callLinkInfo)
// When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid
// (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive
// match). Reset the check so it no longer matches.
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock);
repatchBuffer.repatch(callLinkInfo->hotPathBegin, JSValue::encode(JSValue()));
}
-void JIT::linkCall(JSFunction* callee, CodeBlock* calleeCodeBlock, JITCode& code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
+void JIT::linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode& code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
{
ASSERT(calleeCodeBlock);
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(callerCodeBlock);
// Currently we only link calls with the exact number of arguments.
// If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
@@ -931,12 +940,3 @@ void JIT::linkCall(JSFunction* callee, CodeBlock* calleeCodeBlock, JITCode& code
} // namespace JSC
#endif // ENABLE(JIT)
-
-// This probably does not belong here; adding here for now as a quick Windows build fix.
-#if ENABLE(ASSEMBLER)
-
-#if PLATFORM(X86) && !PLATFORM(MAC)
-JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2;
-#endif
-
-#endif
diff --git a/src/3rdparty/webkit/JavaScriptCore/jit/JIT.h b/src/3rdparty/webkit/JavaScriptCore/jit/JIT.h
index db3f38a..ceffe59 100644
--- a/src/3rdparty/webkit/JavaScriptCore/jit/JIT.h
+++ b/src/3rdparty/webkit/JavaScriptCore/jit/JIT.h
@@ -171,9 +171,9 @@ namespace JSC {
};
// Near calls can only be patched to other JIT code, regular calls can be patched to JIT code or relinked to stub functions.
- void ctiPatchNearCallByReturnAddress(ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
- void ctiPatchCallByReturnAddress(ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
- void ctiPatchCallByReturnAddress(ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
+ void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
+ void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
+ void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
class JIT : private MacroAssembler {
friend class JITStubCall;
@@ -379,9 +379,9 @@ namespace JSC {
jit.privateCompileCTIMachineTrampolines(executablePool, globalData, ctiArrayLengthTrampoline, ctiStringLengthTrampoline, ctiVirtualCallPreLink, ctiVirtualCallLink, ctiVirtualCall, ctiNativeCallThunk);
}
- static void patchGetByIdSelf(StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
- static void patchPutByIdReplace(StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
- static void patchMethodCallProto(MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*);
+ static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
+ static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
+ static void patchMethodCallProto(CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*);
static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
{
@@ -389,7 +389,7 @@ namespace JSC {
return jit.privateCompilePatchGetArrayLength(returnAddress);
}
- static void linkCall(JSFunction* callee, CodeBlock* calleeCodeBlock, JITCode&, CallLinkInfo*, int callerArgCount, JSGlobalData*);
+ static void linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JITCode&, CallLinkInfo*, int callerArgCount, JSGlobalData*);
static void unlinkCall(CallLinkInfo*);
private:
@@ -663,7 +663,7 @@ namespace JSC {
void restoreArgumentReferenceForTrampoline();
Call emitNakedCall(CodePtr function = CodePtr());
- void preverveReturnAddressAfterCall(RegisterID);
+ void preserveReturnAddressAfterCall(RegisterID);
void restoreReturnAddressBeforeReturn(RegisterID);
void restoreReturnAddressBeforeReturn(Address);
diff --git a/src/3rdparty/webkit/JavaScriptCore/jit/JITCode.h b/src/3rdparty/webkit/JavaScriptCore/jit/JITCode.h
index 7ee644b..b502c8a 100644
--- a/src/3rdparty/webkit/JavaScriptCore/jit/JITCode.h
+++ b/src/3rdparty/webkit/JavaScriptCore/jit/JITCode.h
@@ -83,13 +83,16 @@ namespace JSC {
m_ref.m_code.executableAddress(), registerFile, callFrame, exception, Profiler::enabledProfilerReference(), globalData));
}
-#ifndef NDEBUG
+ void* start()
+ {
+ return m_ref.m_code.dataLocation();
+ }
+
size_t size()
{
ASSERT(m_ref.m_code.executableAddress());
return m_ref.m_size;
}
-#endif
ExecutablePool* getExecutablePool()
{
diff --git a/src/3rdparty/webkit/JavaScriptCore/jit/JITInlineMethods.h b/src/3rdparty/webkit/JavaScriptCore/jit/JITInlineMethods.h
index deca0d1..f03d635 100644
--- a/src/3rdparty/webkit/JavaScriptCore/jit/JITInlineMethods.h
+++ b/src/3rdparty/webkit/JavaScriptCore/jit/JITInlineMethods.h
@@ -179,7 +179,7 @@ ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
#if PLATFORM(X86) || PLATFORM(X86_64)
-ALWAYS_INLINE void JIT::preverveReturnAddressAfterCall(RegisterID reg)
+ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
{
pop(reg);
}
@@ -196,7 +196,7 @@ ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
#elif PLATFORM_ARM_ARCH(7)
-ALWAYS_INLINE void JIT::preverveReturnAddressAfterCall(RegisterID reg)
+ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
{
move(linkRegister, reg);
}
diff --git a/src/3rdparty/webkit/JavaScriptCore/jit/JITPropertyAccess.cpp b/src/3rdparty/webkit/JavaScriptCore/jit/JITPropertyAccess.cpp
index ed8f48f..c1e5c29 100644
--- a/src/3rdparty/webkit/JavaScriptCore/jit/JITPropertyAccess.cpp
+++ b/src/3rdparty/webkit/JavaScriptCore/jit/JITPropertyAccess.cpp
@@ -34,6 +34,8 @@
#include "JSArray.h"
#include "JSFunction.h"
#include "Interpreter.h"
+#include "LinkBuffer.h"
+#include "RepatchBuffer.h"
#include "ResultType.h"
#include "SamplingTool.h"
@@ -461,7 +463,7 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
if (willNeedStorageRealloc) {
// This trampoline was called to like a JIT stub; before we can can call again we need to
// remove the return address from the stack, to prevent the stack from becoming misaligned.
- preverveReturnAddressAfterCall(regT3);
+ preserveReturnAddressAfterCall(regT3);
JITStubCall stubCall(this, JITStubs::cti_op_put_by_id_transition_realloc);
stubCall.addArgument(regT0);
@@ -501,13 +503,13 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
stubInfo->stubRoutine = entryLabel;
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relinkCallerToTrampoline(returnAddress, entryLabel);
}
-void JIT::patchGetByIdSelf(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
+void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
{
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(codeBlock);
// We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
// Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
@@ -525,23 +527,28 @@ void JIT::patchGetByIdSelf(StructureStubInfo* stubInfo, Structure* structure, si
repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset), offset);
}
-void JIT::patchMethodCallProto(MethodCallLinkInfo& methodCallLinkInfo, JSFunction* callee, Structure* structure, JSObject* proto)
+void JIT::patchMethodCallProto(CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, JSFunction* callee, Structure* structure, JSObject* proto)
{
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(codeBlock);
ASSERT(!methodCallLinkInfo.cachedStructure);
methodCallLinkInfo.cachedStructure = structure;
structure->ref();
+ Structure* prototypeStructure = proto->structure();
+ ASSERT(!methodCallLinkInfo.cachedPrototypeStructure);
+ methodCallLinkInfo.cachedPrototypeStructure = prototypeStructure;
+ prototypeStructure->ref();
+
repatchBuffer.repatch(methodCallLinkInfo.structureLabel, structure);
repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoObj), proto);
- repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoStruct), proto->structure());
+ repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoStruct), prototypeStructure);
repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckPutFunction), callee);
}
-void JIT::patchPutByIdReplace(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
+void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
{
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(codeBlock);
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
// Should probably go to JITStubs::cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
@@ -591,7 +598,7 @@ void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
@@ -637,7 +644,7 @@ void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
@@ -669,7 +676,7 @@ void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, Polymorphic
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
}
@@ -714,7 +721,7 @@ void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Polymorphi
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
}
@@ -768,7 +775,7 @@ void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Polymorphi
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
}
@@ -816,7 +823,7 @@ void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
// Finally patch the jump to slow case back in the hot path to jump here instead.
CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer;
+ RepatchBuffer repatchBuffer(m_codeBlock);
repatchBuffer.relink(jumpLocation, entryLabel);
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
diff --git a/src/3rdparty/webkit/JavaScriptCore/jit/JITStubs.cpp b/src/3rdparty/webkit/JavaScriptCore/jit/JITStubs.cpp
index 02bf7c0..5049477 100644
--- a/src/3rdparty/webkit/JavaScriptCore/jit/JITStubs.cpp
+++ b/src/3rdparty/webkit/JavaScriptCore/jit/JITStubs.cpp
@@ -358,7 +358,7 @@ NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* co
// Uncacheable: give up.
if (!slot.isCacheable()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
return;
}
@@ -366,13 +366,13 @@ NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* co
Structure* structure = baseCell->structure();
if (structure->isDictionary()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
return;
}
// If baseCell != base, then baseCell must be a proxy for another object.
if (baseCell != slot.base()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
return;
}
@@ -384,7 +384,7 @@ NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* co
if (slot.type() == PutPropertySlot::NewProperty) {
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
if (!prototypeChain->isCacheable()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_put_by_id_generic));
return;
}
stubInfo->initPutByIdTransition(structure->previousID(), structure, prototypeChain);
@@ -394,7 +394,7 @@ NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* co
stubInfo->initPutByIdReplace(structure);
- JIT::patchPutByIdReplace(stubInfo, structure, slot.cachedOffset(), returnAddress);
+ JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
}
NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot)
@@ -404,7 +404,7 @@ NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
// FIXME: Cache property access for immediates.
if (!baseValue.isCell()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
return;
}
@@ -418,13 +418,13 @@ NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
if (isJSString(globalData, baseValue) && propertyName == callFrame->propertyNames().length) {
// The tradeoff of compiling an patched inline string length access routine does not seem
// to pay off, so we currently only do this for arrays.
- ctiPatchCallByReturnAddress(returnAddress, globalData->jitStubs.ctiStringLengthTrampoline());
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs.ctiStringLengthTrampoline());
return;
}
// Uncacheable: give up.
if (!slot.isCacheable()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
return;
}
@@ -432,7 +432,7 @@ NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
Structure* structure = baseCell->structure();
if (structure->isDictionary()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
return;
}
@@ -447,7 +447,7 @@ NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
// set this up, so derefStructures can do it's job.
stubInfo->initGetByIdSelf(structure);
- JIT::patchGetByIdSelf(stubInfo, structure, slot.cachedOffset(), returnAddress);
+ JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
return;
}
@@ -475,7 +475,7 @@ NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* co
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
if (!prototypeChain->isCacheable()) {
- ctiPatchCallByReturnAddress(returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(JITStubs::cti_op_get_by_id_generic));
return;
}
stubInfo->initGetByIdChain(structure, prototypeChain);
@@ -777,7 +777,7 @@ DEFINE_STUB_FUNCTION(void, op_put_by_id)
PutPropertySlot slot;
stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_id_second));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_id_second));
CHECK_FOR_EXCEPTION_AT_END();
}
@@ -831,7 +831,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
PropertySlot slot(baseValue);
JSValue result = baseValue.get(callFrame, ident, slot);
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_second));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_second));
CHECK_FOR_EXCEPTION_AT_END();
return JSValue::encode(result);
@@ -848,7 +848,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_method_check)
PropertySlot slot(baseValue);
JSValue result = baseValue.get(callFrame, ident, slot);
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_method_check_second));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_method_check_second));
CHECK_FOR_EXCEPTION_AT_END();
return JSValue::encode(result);
@@ -900,7 +900,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_method_check_second)
// Check to see if the function is on the object's prototype. Patch up the code to optimize.
if (slot.slotBase() == structure->prototypeForLookup(callFrame))
- JIT::patchMethodCallProto(methodCallLinkInfo, callee, structure, slotBaseObject);
+ JIT::patchMethodCallProto(callFrame->codeBlock(), methodCallLinkInfo, callee, structure, slotBaseObject);
// Check to see if the function is on the object itself.
// Since we generate the method-check to check both the structure and a prototype-structure (since this
// is the common case) we have a problem - we need to patch the prototype structure check to do something
@@ -908,13 +908,13 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_method_check_second)
// for now. For now it performs a check on a special object on the global object only used for this
// purpose. The object is in no way exposed, and as such the check will always pass.
else if (slot.slotBase() == baseValue)
- JIT::patchMethodCallProto(methodCallLinkInfo, callee, structure, callFrame->scopeChain()->globalObject()->methodCallDummy());
+ JIT::patchMethodCallProto(callFrame->codeBlock(), methodCallLinkInfo, callee, structure, callFrame->scopeChain()->globalObject()->methodCallDummy());
// For now let any other case be cached as a normal get_by_id.
}
// Revert the get_by_id op back to being a regular get_by_id - allow it to cache like normal, if it needs to.
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id));
return JSValue::encode(result);
}
@@ -975,10 +975,9 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_self_fail)
JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), slot.cachedOffset());
if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
- } else {
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
- }
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ } else
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
return JSValue::encode(result);
}
@@ -1024,7 +1023,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
CHECK_FOR_EXCEPTION();
if (!baseValue.isCell() || !slot.isCacheable() || asCell(baseValue)->structure()->isDictionary()) {
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
return JSValue::encode(result);
}
@@ -1036,7 +1035,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
JSObject* slotBaseObject = asObject(slot.slotBase());
if (slot.slotBase() == baseValue)
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
else if (slot.slotBase() == asCell(baseValue)->structure()->prototypeForLookup(callFrame)) {
// Since we're accessing a prototype in a loop, it's a good bet that it
// should not be treated as a dictionary.
@@ -1049,11 +1048,11 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), slot.cachedOffset());
if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
} else if (size_t count = countPrototypeChainEntriesAndCheckForProxies(callFrame, baseValue, slot)) {
StructureChain* protoChain = structure->prototypeChain(callFrame);
if (!protoChain->isCacheable()) {
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
return JSValue::encode(result);
}
@@ -1062,9 +1061,9 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, slot.cachedOffset());
if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
} else
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
return JSValue::encode(result);
}
@@ -1271,7 +1270,7 @@ DEFINE_STUB_FUNCTION(void*, vm_dontLazyLinkCall)
JSGlobalData* globalData = stackFrame.globalData;
JSFunction* callee = asFunction(stackFrame.args[0].jsValue());
- ctiPatchNearCallByReturnAddress(stackFrame.args[1].returnAddress(), globalData->jitStubs.ctiVirtualCallLink());
+ ctiPatchNearCallByReturnAddress(stackFrame.callFrame->callerFrame()->codeBlock(), stackFrame.args[1].returnAddress(), globalData->jitStubs.ctiVirtualCallLink());
return callee->body()->generatedJITCode().addressForCall().executableAddress();
}
@@ -1290,7 +1289,7 @@ DEFINE_STUB_FUNCTION(void*, vm_lazyLinkCall)
codeBlock = &callee->body()->generatedBytecode();
CallLinkInfo* callLinkInfo = &stackFrame.callFrame->callerFrame()->codeBlock()->getCallLinkInfo(stackFrame.args[1].returnAddress());
- JIT::linkCall(callee, codeBlock, jitCode, callLinkInfo, stackFrame.args[2].int32(), stackFrame.globalData);
+ JIT::linkCall(callee, stackFrame.callFrame->callerFrame()->codeBlock(), codeBlock, jitCode, callLinkInfo, stackFrame.args[2].int32(), stackFrame.globalData);
return jitCode.addressForCall().executableAddress();
}
@@ -1530,11 +1529,11 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val)
result = jsArray->JSArray::get(callFrame, i);
} else if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i)) {
// All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_string));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_string));
result = asString(baseValue)->getIndex(stackFrame.globalData, i);
} else if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
// All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_byte_array));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_byte_array));
return JSValue::encode(asByteArray(baseValue)->getIndex(callFrame, i));
} else
result = baseValue.get(callFrame, i);
@@ -1566,7 +1565,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_string)
else {
result = baseValue.get(callFrame, i);
if (!isJSString(globalData, baseValue))
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
}
} else {
Identifier property(callFrame, subscript.toString(callFrame));
@@ -1599,7 +1598,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_byte_array)
result = baseValue.get(callFrame, i);
if (!isJSByteArray(globalData, baseValue))
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
} else {
Identifier property(callFrame, subscript.toString(callFrame));
result = baseValue.get(callFrame, property);
@@ -1692,7 +1691,7 @@ DEFINE_STUB_FUNCTION(void, op_put_by_val)
jsArray->JSArray::put(callFrame, i, value);
} else if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
JSByteArray* jsByteArray = asByteArray(baseValue);
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val_byte_array));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val_byte_array));
// All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
if (value.isInt32Fast()) {
jsByteArray->setIndex(i, value.getInt32Fast());
@@ -1776,7 +1775,7 @@ DEFINE_STUB_FUNCTION(void, op_put_by_val_byte_array)
}
if (!isJSByteArray(globalData, baseValue))
- ctiPatchCallByReturnAddress(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val));
+ ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val));
baseValue.put(callFrame, i, value);
} else {
Identifier property(callFrame, subscript.toString(callFrame));