summaryrefslogtreecommitdiffstats
path: root/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITPropertyAccess.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/3rdparty/javascriptcore/JavaScriptCore/jit/JITPropertyAccess.cpp')
-rw-r--r--src/3rdparty/javascriptcore/JavaScriptCore/jit/JITPropertyAccess.cpp240
1 files changed, 188 insertions, 52 deletions
diff --git a/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITPropertyAccess.cpp b/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITPropertyAccess.cpp
index 9edfd01..ef95f99 100644
--- a/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITPropertyAccess.cpp
+++ b/src/3rdparty/javascriptcore/JavaScriptCore/jit/JITPropertyAccess.cpp
@@ -33,6 +33,7 @@
#include "JITStubCall.h"
#include "JSArray.h"
#include "JSFunction.h"
+#include "JSPropertyNameIterator.h"
#include "Interpreter.h"
#include "LinkBuffer.h"
#include "RepatchBuffer.h"
@@ -211,12 +212,17 @@ void JIT::emit_op_method_check(Instruction* currentInstruction)
emitLoad(base, regT1, regT0);
emitJumpSlowCaseIfNotJSCell(base, regT1);
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
+
Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), info.structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(ImmPtr(0), regT2);
Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), protoStructureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
// This will be relinked to load the function without doing a load.
DataLabelPtr putFunction = moveWithPatch(ImmPtr(0), regT0);
+
+ END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
+
move(Imm32(JSValue::CellTag), regT1);
Jump match = jump();
@@ -373,6 +379,9 @@ void JIT::compileGetByIdHotPath()
// Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
// to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
// to jump back to if one of these trampolies finds a match.
+
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
+
Label hotPathBegin(this);
m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
m_propertyAccessInstructionIndex++;
@@ -395,6 +404,8 @@ void JIT::compileGetByIdHotPath()
Label putResult(this);
ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult);
+
+ END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
}
void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
@@ -416,13 +427,18 @@ void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<Sl
linkSlowCaseIfNotJSCell(iter, base);
linkSlowCase(iter);
- Label coldPathBegin(this);
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
+#ifndef NDEBUG
+ Label coldPathBegin(this);
+#endif
JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
stubCall.addArgument(regT1, regT0);
stubCall.addArgument(ImmPtr(ident));
Call call = stubCall.call(dst);
+ END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
+
ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall);
// Track the location of the call; this will be used to recover patch information.
@@ -443,6 +459,8 @@ void JIT::emit_op_put_by_id(Instruction* currentInstruction)
emitJumpSlowCaseIfNotJSCell(base, regT1);
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
+
Label hotPathBegin(this);
m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
m_propertyAccessInstructionIndex++;
@@ -460,6 +478,9 @@ void JIT::emit_op_put_by_id(Instruction* currentInstruction)
DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT0, patchGetByIdDefaultOffset)); // payload
DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT0, patchGetByIdDefaultOffset)); // tag
+
+ END_UNINTERRUPTED_SEQUENCE(sequencePutById);
+
ASSERT(differenceBetween(hotPathBegin, displacementLabel1) == patchOffsetPutByIdPropertyMapOffset1);
ASSERT(differenceBetween(hotPathBegin, displacementLabel2) == patchOffsetPutByIdPropertyMapOffset2);
}
@@ -521,22 +542,26 @@ void JIT::compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID res
load32(Address(temp, offset + 4), resultTag);
}
+void JIT::testPrototype(Structure* structure, JumpList& failureCases)
+{
+ if (structure->m_prototype.isNull())
+ return;
+
+ failureCases.append(branchPtr(NotEqual, AbsoluteAddress(&asCell(structure->m_prototype)->m_structure), ImmPtr(asCell(structure->m_prototype)->m_structure)));
+}
+
void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
{
// It is assumed that regT0 contains the basePayload and regT1 contains the baseTag. The value can be found on the stack.
JumpList failureCases;
failureCases.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
-
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- failureCases.append(branchPtr(NotEqual, regT2, ImmPtr(oldStructure)));
+ failureCases.append(branchPtr(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(oldStructure)));
+ testPrototype(oldStructure, failureCases);
// Verify that nothing in the prototype chain has a setter for this property.
- for (RefPtr<Structure>* it = chain->head(); *it; ++it) {
- loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- failureCases.append(branchPtr(NotEqual, regT2, ImmPtr(it->get())));
- }
+ for (RefPtr<Structure>* it = chain->head(); *it; ++it)
+ testPrototype(it->get(), failureCases);
// Reallocate property storage if needed.
Call callTarget;
@@ -705,7 +730,7 @@ void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(prototypeStructure), regT3);
Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
#else
@@ -785,7 +810,7 @@ void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Polymorphi
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(prototypeStructure), regT3);
Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
#else
@@ -838,7 +863,7 @@ void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Polymorphi
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(currStructure), regT3);
bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
#else
@@ -893,7 +918,7 @@ void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(currStructure), regT3);
bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
#else
@@ -930,6 +955,69 @@ void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
#endif // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID structure, RegisterID offset)
+{
+ ASSERT(sizeof(((Structure*)0)->m_propertyStorageCapacity) == sizeof(int32_t));
+ ASSERT(sizeof(JSObject::inlineStorageCapacity) == sizeof(int32_t));
+ ASSERT(sizeof(JSValue) == 8);
+
+ Jump notUsingInlineStorage = branch32(NotEqual, Address(structure, OBJECT_OFFSETOF(Structure, m_propertyStorageCapacity)), Imm32(JSObject::inlineStorageCapacity));
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
+ Jump finishedLoad = jump();
+ notUsingInlineStorage.link(this);
+ loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
+ finishedLoad.link(this);
+}
+
+void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+ unsigned expected = currentInstruction[4].u.operand;
+ unsigned iter = currentInstruction[5].u.operand;
+ unsigned i = currentInstruction[6].u.operand;
+
+ emitLoad2(property, regT1, regT0, base, regT3, regT2);
+ emitJumpSlowCaseIfNotJSCell(property, regT1);
+ addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
+ // Property registers are now available as the property is known
+ emitJumpSlowCaseIfNotJSCell(base, regT3);
+ emitLoadPayload(iter, regT1);
+
+ // Test base's structure
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
+ addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
+ load32(addressFor(i), regT3);
+ sub32(Imm32(1), regT3);
+ addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
+ compileGetDirectOffset(regT2, regT1, regT0, regT0, regT3);
+
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
+}
+
+void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ linkSlowCaseIfNotJSCell(iter, property);
+ linkSlowCase(iter);
+ linkSlowCaseIfNotJSCell(iter, base);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_get_by_val);
+ stubCall.addArgument(base);
+ stubCall.addArgument(property);
+ stubCall.call(dst);
+}
+
#else // USE(JSVALUE32_64)
void JIT::emit_op_get_by_val(Instruction* currentInstruction)
@@ -963,6 +1051,62 @@ void JIT::emit_op_get_by_val(Instruction* currentInstruction)
emitPutVirtualRegister(dst);
}
+void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID structure, RegisterID offset, RegisterID scratch)
+{
+ ASSERT(sizeof(((Structure*)0)->m_propertyStorageCapacity) == sizeof(int32_t));
+ ASSERT(sizeof(JSObject::inlineStorageCapacity) == sizeof(int32_t));
+
+ Jump notUsingInlineStorage = branch32(NotEqual, Address(structure, OBJECT_OFFSETOF(Structure, m_propertyStorageCapacity)), Imm32(JSObject::inlineStorageCapacity));
+ loadPtr(BaseIndex(base, offset, ScalePtr, OBJECT_OFFSETOF(JSObject, m_inlineStorage)), result);
+ Jump finishedLoad = jump();
+ notUsingInlineStorage.link(this);
+ loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), scratch);
+ loadPtr(BaseIndex(scratch, offset, ScalePtr, 0), result);
+ finishedLoad.link(this);
+}
+
+void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+ unsigned expected = currentInstruction[4].u.operand;
+ unsigned iter = currentInstruction[5].u.operand;
+ unsigned i = currentInstruction[6].u.operand;
+
+ emitGetVirtualRegister(property, regT0);
+ addSlowCase(branchPtr(NotEqual, regT0, addressFor(expected)));
+ emitGetVirtualRegisters(base, regT0, iter, regT1);
+ emitJumpSlowCaseIfNotJSCell(regT0, base);
+
+ // Test base's structure
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ addSlowCase(branchPtr(NotEqual, regT2, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
+ load32(addressFor(i), regT3);
+ sub32(Imm32(1), regT3);
+ addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
+ compileGetDirectOffset(regT0, regT0, regT2, regT3, regT1);
+
+ emitPutVirtualRegister(dst, regT0);
+}
+
+void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ linkSlowCase(iter);
+ linkSlowCaseIfNotJSCell(iter, base);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_get_by_val);
+ stubCall.addArgument(base, regT2);
+ stubCall.addArgument(property, regT2);
+ stubCall.call(dst);
+}
+
void JIT::emit_op_put_by_val(Instruction* currentInstruction)
{
unsigned base = currentInstruction[1].u.operand;
@@ -1128,9 +1272,9 @@ void JIT::emit_op_method_check(Instruction* currentInstruction)
Jump match = jump();
- ASSERT(differenceBetween(info.structureToCompare, protoObj) == patchOffsetMethodCheckProtoObj);
- ASSERT(differenceBetween(info.structureToCompare, protoStructureToCompare) == patchOffsetMethodCheckProtoStruct);
- ASSERT(differenceBetween(info.structureToCompare, putFunction) == patchOffsetMethodCheckPutFunction);
+ ASSERT_JIT_OFFSET(differenceBetween(info.structureToCompare, protoObj), patchOffsetMethodCheckProtoObj);
+ ASSERT_JIT_OFFSET(differenceBetween(info.structureToCompare, protoStructureToCompare), patchOffsetMethodCheckProtoStruct);
+ ASSERT_JIT_OFFSET(differenceBetween(info.structureToCompare, putFunction), patchOffsetMethodCheckPutFunction);
// Link the failure cases here.
notCell.link(this);
@@ -1197,22 +1341,22 @@ void JIT::compileGetByIdHotPath(int, int baseVReg, Identifier*, unsigned propert
DataLabelPtr structureToCompare;
Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
addSlowCase(structureCheck);
- ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetGetByIdStructure);
- ASSERT(differenceBetween(hotPathBegin, structureCheck) == patchOffsetGetByIdBranchToSlowCase);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetGetByIdStructure);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureCheck), patchOffsetGetByIdBranchToSlowCase)
Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT0);
Label externalLoadComplete(this);
- ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetGetByIdExternalLoad);
- ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthGetByIdExternalLoad);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, externalLoad), patchOffsetGetByIdExternalLoad);
+ ASSERT_JIT_OFFSET(differenceBetween(externalLoad, externalLoadComplete), patchLengthGetByIdExternalLoad);
DataLabel32 displacementLabel = loadPtrWithAddressOffsetPatch(Address(regT0, patchGetByIdDefaultOffset), regT0);
- ASSERT(differenceBetween(hotPathBegin, displacementLabel) == patchOffsetGetByIdPropertyMapOffset);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, displacementLabel), patchOffsetGetByIdPropertyMapOffset);
Label putResult(this);
END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
- ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, putResult), patchOffsetGetByIdPutResult);
}
void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
@@ -1247,7 +1391,7 @@ void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident
END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
- ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall);
+ ASSERT_JIT_OFFSET(differenceBetween(coldPathBegin, call), patchOffsetGetByIdSlowCaseCall);
// Track the location of the call; this will be used to recover patch information.
m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
@@ -1278,19 +1422,19 @@ void JIT::emit_op_put_by_id(Instruction* currentInstruction)
// It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
DataLabelPtr structureToCompare;
addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
- ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetPutByIdStructure);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, structureToCompare), patchOffsetPutByIdStructure);
// Plant a load from a bogus ofset in the object's property map; we will patch this later, if it is to be used.
Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT0);
Label externalLoadComplete(this);
- ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetPutByIdExternalLoad);
- ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthPutByIdExternalLoad);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, externalLoad), patchOffsetPutByIdExternalLoad);
+ ASSERT_JIT_OFFSET(differenceBetween(externalLoad, externalLoadComplete), patchLengthPutByIdExternalLoad);
DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(regT1, Address(regT0, patchGetByIdDefaultOffset));
END_UNINTERRUPTED_SEQUENCE(sequencePutById);
- ASSERT(differenceBetween(hotPathBegin, displacementLabel) == patchOffsetPutByIdPropertyMapOffset);
+ ASSERT_JIT_OFFSET(differenceBetween(hotPathBegin, displacementLabel), patchOffsetPutByIdPropertyMapOffset);
}
void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
@@ -1347,35 +1491,27 @@ void JIT::compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID res
}
}
+void JIT::testPrototype(Structure* structure, JumpList& failureCases)
+{
+ if (structure->m_prototype.isNull())
+ return;
+
+ move(ImmPtr(&asCell(structure->m_prototype)->m_structure), regT2);
+ move(ImmPtr(asCell(structure->m_prototype)->m_structure), regT3);
+ failureCases.append(branchPtr(NotEqual, Address(regT2), regT3));
+}
+
void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
{
JumpList failureCases;
// Check eax is an object of the right Structure.
failureCases.append(emitJumpIfNotJSCell(regT0));
failureCases.append(branchPtr(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(oldStructure)));
- JumpList successCases;
-
- // ecx = baseObject
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- // proto(ecx) = baseObject->structure()->prototype()
- failureCases.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
+ testPrototype(oldStructure, failureCases);
- loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
-
// ecx = baseObject->m_structure
- for (RefPtr<Structure>* it = chain->head(); *it; ++it) {
- // null check the prototype
- successCases.append(branchPtr(Equal, regT2, ImmPtr(JSValue::encode(jsNull()))));
-
- // Check the structure id
- failureCases.append(branchPtr(NotEqual, Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(it->get())));
-
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- failureCases.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
- loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype)), regT2);
- }
-
- successCases.link(this);
+ for (RefPtr<Structure>* it = chain->head(); *it; ++it)
+ testPrototype(it->get(), failureCases);
Call callTarget;
@@ -1540,7 +1676,7 @@ void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* str
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(prototypeStructure), regT3);
Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
#else
@@ -1615,7 +1751,7 @@ void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, Polymorphi
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(prototypeStructure), regT3);
Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
#else
@@ -1668,7 +1804,7 @@ void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, Polymorphi
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(currStructure), regT3);
bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
#else
@@ -1721,7 +1857,7 @@ void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* str
// Check the prototype object's Structure had not changed.
Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if PLATFORM(X86_64)
+#if CPU(X86_64)
move(ImmPtr(currStructure), regT3);
bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
#else