Diff
Modified: trunk/Source/_javascript_Core/CMakeLists.txt (283388 => 283389)
--- trunk/Source/_javascript_Core/CMakeLists.txt 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/CMakeLists.txt 2021-10-01 19:06:05 UTC (rev 283389)
@@ -839,6 +839,7 @@
jit/AssemblyHelpers.h
jit/AssemblyHelpersSpoolers.h
jit/BaselineJITCode.h
+ jit/CallFrameShuffleData.h
jit/CCallHelpers.h
jit/ExecutableAllocator.h
jit/ExecutableMemoryHandle.h
@@ -859,6 +860,7 @@
jit/Reg.h
jit/RegisterAtOffset.h
jit/RegisterAtOffsetList.h
+ jit/RegisterMap.h
jit/RegisterSet.h
jit/Snippet.h
jit/SnippetParams.h
Modified: trunk/Source/_javascript_Core/ChangeLog (283388 => 283389)
--- trunk/Source/_javascript_Core/ChangeLog 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/ChangeLog 2021-10-01 19:06:05 UTC (rev 283389)
@@ -1,3 +1,49 @@
+2021-10-01 Xan López <x...@igalia.com>
+
+ [JSC][32bit] Fix build after unlinked baseline JIT
+ https://bugs.webkit.org/show_bug.cgi?id=230803
+
+ Reviewed by Saam Barati.
+
+ This is enough to make things build, but pretty much nothing works
+ yet. Will fix in a follow-up, this way we at least give people a
+ chance at not introducing more build regressions.
+
+ (With some code by Mikhail R. Gadelha and Geza Lore)
+
+ * CMakeLists.txt:
+ * assembler/MacroAssemblerARMv7.h:
+ (JSC::MacroAssemblerARMv7::branch32):
+ (JSC::MacroAssemblerARMv7::branchAdd32):
+ * assembler/MacroAssemblerMIPS.h:
+ (JSC::MacroAssemblerMIPS::branchAdd32):
+ * bytecode/CallLinkInfo.h:
+ (JSC::CallLinkInfo::addressOfMaxArgumentCountIncludingThis):
+ * jit/JIT.h:
+ * jit/JITCall32_64.cpp:
+ (JSC::JIT::compileOpCall):
+ * jit/JITInlines.h:
+ (JSC::JIT::emitValueProfilingSite):
+ (JSC::JIT::emitValueProfilingSiteIfProfiledOpcode):
+ (JSC::JIT::emitArrayProfilingSiteWithCell):
+ * jit/JITOpcodes.cpp:
+ (JSC::JIT::emit_op_loop_hint):
+ * jit/JITPropertyAccess.cpp:
+ (JSC::JIT::emitVarInjectionCheck):
+ * jit/JITPropertyAccess32_64.cpp:
+ (JSC::JIT::emitHasPrivateSlow):
+ (JSC::JIT::emitSlow_op_has_private_name):
+ (JSC::JIT::emitSlow_op_has_private_brand):
+ (JSC::JIT::emitResolveClosure):
+ (JSC::JIT::emit_op_resolve_scope):
+ (JSC::JIT::emit_op_get_from_scope):
+ (JSC::JIT::emitPutGlobalVariableIndirect):
+ (JSC::JIT::emit_op_put_to_scope):
+ (JSC::JIT::emitSlow_op_put_to_scope):
+ (JSC::JIT::emitVarInjectionCheck): Deleted.
+ * llint/LowLevelInterpreter.asm:
+ * llint/LowLevelInterpreter32_64.asm:
+
2021-10-01 Yusuke Suzuki <ysuz...@apple.com>
[JSC] Remove CodeBlock::m_numberOfNonArgumentValueProfiles since we can get the same value from UnlinkedCodeBlock
Modified: trunk/Source/_javascript_Core/assembler/MacroAssemblerARMv7.h (283388 => 283389)
--- trunk/Source/_javascript_Core/assembler/MacroAssemblerARMv7.h 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/assembler/MacroAssemblerARMv7.h 2021-10-01 19:06:05 UTC (rev 283389)
@@ -1610,7 +1610,14 @@
Jump branch32(RelationalCondition cond, RegisterID left, RegisterID right)
{
- m_assembler.cmp(left, right);
+ if (left == ARMRegisters::sp) {
+ move(left, dataTempRegister);
+ m_assembler.cmp(dataTempRegister, right);
+ } else if (right == ARMRegisters::sp) {
+ move(right, dataTempRegister);
+ m_assembler.cmp(left, dataTempRegister);
+ } else
+ m_assembler.cmp(left, right);
return Jump(makeBranch(cond));
}
@@ -1840,27 +1847,13 @@
Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, AbsoluteAddress dest)
{
- // Move the high bits of the address into addressTempRegister,
- // and load the value into dataTempRegister.
- move(TrustedImmPtr(dest.m_ptr), addressTempRegister);
- m_assembler.ldr(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt16(0));
+ add32(imm, dest);
+ return Jump(makeBranch(cond));
+ }
- // Do the add.
- ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(imm.m_value);
- if (armImm.isValid())
- m_assembler.add_S(dataTempRegister, dataTempRegister, armImm);
- else {
- // If the operand does not fit into an immediate then load it temporarily
- // into addressTempRegister; since we're overwriting addressTempRegister
- // we'll need to reload it with the high bits of the address afterwards.
- move(imm, addressTempRegister);
- m_assembler.add_S(dataTempRegister, dataTempRegister, addressTempRegister);
- move(TrustedImmPtr(dest.m_ptr), addressTempRegister);
- }
-
- // Store the result.
- m_assembler.str(dataTempRegister, addressTempRegister, ARMThumbImmediate::makeUInt16(0));
-
+ Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, Address dest)
+ {
+ add32(imm, dest);
return Jump(makeBranch(cond));
}
Modified: trunk/Source/_javascript_Core/assembler/MacroAssemblerMIPS.h (283388 => 283389)
--- trunk/Source/_javascript_Core/assembler/MacroAssemblerMIPS.h 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/assembler/MacroAssemblerMIPS.h 2021-10-01 19:06:05 UTC (rev 283389)
@@ -2354,6 +2354,95 @@
return Jump();
}
+ Jump branchAdd32(ResultCondition cond, TrustedImm32 imm, Address dest)
+ {
+ ASSERT((cond == Overflow) || (cond == Signed) || (cond == PositiveOrZero) || (cond == Zero) || (cond == NonZero));
+ if (cond == Overflow) {
+ if (m_fixedWidth) {
+ /*
+ load dest, dataTemp
+ move imm, immTemp
+ xor cmpTemp, dataTemp, immTemp
+ addu dataTemp, dataTemp, immTemp
+ store dataTemp, dest
+ bltz cmpTemp, No_overflow # diff sign bit -> no overflow
+ xor cmpTemp, dataTemp, immTemp
+ bgez cmpTemp, No_overflow # same sign big -> no overflow
+ nop
+ b Overflow
+ nop
+ b No_overflow
+ nop
+ nop
+ nop
+ No_overflow:
+ */
+ load32(dest, dataTempRegister);
+ move(imm, immTempRegister);
+ m_assembler.xorInsn(cmpTempRegister, dataTempRegister, immTempRegister);
+ m_assembler.addu(dataTempRegister, dataTempRegister, immTempRegister);
+ store32(dataTempRegister, dest);
+ m_assembler.bltz(cmpTempRegister, 9);
+ m_assembler.xorInsn(cmpTempRegister, dataTempRegister, immTempRegister);
+ m_assembler.bgez(cmpTempRegister, 7);
+ m_assembler.nop();
+ } else {
+ m_assembler.lw(dataTempRegister, dest.base, dest.offset);
+ if (imm.m_value >= 0 && imm.m_value <= 32767) {
+ move(dataTempRegister, cmpTempRegister);
+ m_assembler.addiu(dataTempRegister, dataTempRegister, imm.m_value);
+ m_assembler.bltz(cmpTempRegister, 9);
+ m_assembler.sw(dataTempRegister, dest.base, dest.offset);
+ m_assembler.bgez(dataTempRegister, 7);
+ m_assembler.nop();
+ } else if (imm.m_value >= -32768 && imm.m_value < 0) {
+ move(dataTempRegister, cmpTempRegister);
+ m_assembler.addiu(dataTempRegister, dataTempRegister, imm.m_value);
+ m_assembler.bgez(cmpTempRegister, 9);
+ m_assembler.sw(dataTempRegister, dest.base, dest.offset);
+ m_assembler.bltz(cmpTempRegister, 7);
+ m_assembler.nop();
+ } else {
+ move(imm, immTempRegister);
+ m_assembler.xorInsn(cmpTempRegister, dataTempRegister, immTempRegister);
+ m_assembler.addu(dataTempRegister, dataTempRegister, immTempRegister);
+ m_assembler.bltz(cmpTempRegister, 10);
+ m_assembler.sw(dataTempRegister, dest.base, dest.offset);
+ m_assembler.xorInsn(cmpTempRegister, dataTempRegister, immTempRegister);
+ m_assembler.bgez(cmpTempRegister, 7);
+ m_assembler.nop();
+ }
+ }
+ return jump();
+ }
+ if (m_fixedWidth) {
+ move(imm, immTempRegister);
+ load32(dest, dataTempRegister);
+ add32(immTempRegister, dataTempRegister);
+ store32(dataTempRegister, dest);
+ } else {
+ m_assembler.lw(dataTempRegister, dest.base, dest.offset);
+ add32(imm, dataTempRegister);
+ m_assembler.sw(dataTempRegister, dest.base, dest.offset);
+ }
+ if (cond == Signed) {
+ // Check if dest is negative.
+ m_assembler.slt(cmpTempRegister, dataTempRegister, MIPSRegisters::zero);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == PositiveOrZero) {
+ // Check if dest is not negative.
+ m_assembler.slt(cmpTempRegister, dataTempRegister, MIPSRegisters::zero);
+ return branchEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == Zero)
+ return branchEqual(dataTempRegister, MIPSRegisters::zero);
+ if (cond == NonZero)
+ return branchNotEqual(dataTempRegister, MIPSRegisters::zero);
+ ASSERT(0);
+ return Jump();
+ }
+
Jump branchMul32(ResultCondition cond, RegisterID src1, RegisterID src2, RegisterID dest)
{
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
Modified: trunk/Source/_javascript_Core/bytecode/CallLinkInfo.h (283388 => 283389)
--- trunk/Source/_javascript_Core/bytecode/CallLinkInfo.h 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/bytecode/CallLinkInfo.h 2021-10-01 19:06:05 UTC (rev 283389)
@@ -25,6 +25,7 @@
#pragma once
+#include "CallFrameShuffleData.h"
#include "CallMode.h"
#include "CodeLocation.h"
#include "CodeSpecializationKind.h"
@@ -41,7 +42,6 @@
class FunctionCodeBlock;
class JSFunction;
enum OpcodeID : unsigned;
-struct CallFrameShuffleData;
struct UnlinkedCallLinkInfo;
@@ -314,6 +314,13 @@
return OBJECT_OFFSETOF(CallLinkInfo, m_maxArgumentCountIncludingThis);
}
+#if USE(JSVALUE32_64)
+ uint32_t* addressOfMaxArgumentCountIncludingThis()
+ {
+ return &m_maxArgumentCountIncludingThis;
+ }
+#endif
+
uint32_t maxArgumentCountIncludingThis()
{
return m_maxArgumentCountIncludingThis;
Modified: trunk/Source/_javascript_Core/jit/JIT.h (283388 => 283389)
--- trunk/Source/_javascript_Core/jit/JIT.h 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/jit/JIT.h 2021-10-01 19:06:05 UTC (rev 283389)
@@ -345,12 +345,13 @@
// This assumes that the value to profile is in regT0 and that regT3 is available for
// scratch.
-#if USE(JSVALUE64)
template<typename Bytecode> void emitValueProfilingSite(const Bytecode&, GPRReg);
template<typename Bytecode> void emitValueProfilingSite(const Bytecode&, JSValueRegs);
-#else
+#if USE(JSVALUE32_64)
void emitValueProfilingSite(ValueProfile&, JSValueRegs);
- template<typename Metadata> void emitValueProfilingSite(Metadata&, JSValueRegs);
+ template<typename Metadata>
+ std::enable_if_t<std::is_same<decltype(Metadata::m_profile), ValueProfile>::value, void>
+ emitValueProfilingSite(Metadata&, JSValueRegs);
#endif
void emitValueProfilingSiteIfProfiledOpcode(...);
@@ -363,6 +364,11 @@
template <typename Bytecode>
void emitArrayProfilingSiteWithCell(const Bytecode&, ptrdiff_t, RegisterID cellGPR, RegisterID scratchGPR);
+#if USE(JSVALUE32_64)
+ void emitArrayProfilingSiteWithCell(RegisterID, ArrayProfile* , RegisterID);
+ void emitArrayProfilingSiteWithCell(RegisterID, RegisterID , RegisterID);
+#endif
+
template<typename Op>
ECMAMode ecmaMode(Op);
Modified: trunk/Source/_javascript_Core/jit/JITCall32_64.cpp (283388 => 283389)
--- trunk/Source/_javascript_Core/jit/JITCall32_64.cpp 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/jit/JITCall32_64.cpp 2021-10-01 19:06:05 UTC (rev 283389)
@@ -310,7 +310,7 @@
checkStackPointerAlignment();
if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs || opcodeID == op_tail_call_forward_arguments) {
- auto slowPaths = info->emitTailCallFastPath(*this, regT0, regT2, CallLinkInfo::UseDataIC::Yes, [&] {
+ auto slowPaths = info->emitTailCallDataICFastPath(*this, regT0, regT2, [&] {
emitRestoreCalleeSaves();
prepareForTailCallSlow(regT2);
});
Modified: trunk/Source/_javascript_Core/jit/JITInlines.h (283388 => 283389)
--- trunk/Source/_javascript_Core/jit/JITInlines.h 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/jit/JITInlines.h 2021-10-01 19:06:05 UTC (rev 283389)
@@ -334,6 +334,12 @@
store32(value.payloadGPR(), &descriptor->asBits.payload);
store32(value.tagGPR(), &descriptor->asBits.tag);
}
+
+template<typename Metadata>
+std::enable_if_t<std::is_same<decltype(Metadata::m_profile), ValueProfile>::value, void> JIT::emitValueProfilingSite(Metadata& metadata, JSValueRegs value)
+{
+ emitValueProfilingSite(valueProfileFor(metadata, m_bytecodeIndex.checkpoint()), value);
+}
#endif
template<typename Op>
@@ -342,13 +348,12 @@
#if USE(JSVALUE64)
emitValueProfilingSite(bytecode, regT0);
#else
- emitValueProfilingSite(bytecode.metadata(m_codeBlock), JSValueRegs(regT1, regT0));
+ emitValueProfilingSite(bytecode, JSValueRegs(regT1, regT0));
#endif
}
inline void JIT::emitValueProfilingSiteIfProfiledOpcode(...) { }
-#if USE(JSVALUE64)
template<typename Bytecode>
inline void JIT::emitValueProfilingSite(const Bytecode& bytecode, JSValueRegs value)
{
@@ -355,8 +360,14 @@
if (!shouldEmitProfiling())
return;
+#if USE(JSVALUE64)
ptrdiff_t offset = m_unlinkedCodeBlock->metadata().offsetInMetadataTable(bytecode) + valueProfileOffsetFor<Bytecode>(m_bytecodeIndex.checkpoint()) + ValueProfile::offsetOfFirstBucket();
store64(value.gpr(), Address(s_metadataGPR, offset));
+#else
+ UNUSED_PARAM(value);
+ UNUSED_PARAM(bytecode);
+ // FIXME.
+#endif
}
template<typename Bytecode>
@@ -364,7 +375,6 @@
{
emitValueProfilingSite(bytecode, JSValueRegs(resultReg));
}
-#endif
template <typename Bytecode>
inline void JIT::emitArrayProfilingSiteWithCell(const Bytecode& bytecode, ptrdiff_t offsetOfArrayProfile, RegisterID cellGPR, RegisterID scratchGPR)
@@ -381,6 +391,24 @@
emitArrayProfilingSiteWithCell(bytecode, Bytecode::Metadata::offsetOfArrayProfile() + ArrayProfile::offsetOfLastSeenStructureID(), cellGPR, scratchGPR);
}
+#if USE(JSVALUE32_64)
+inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cellGPR, ArrayProfile* arrayProfile, RegisterID scratchGPR)
+{
+ if (shouldEmitProfiling()) {
+ load32(MacroAssembler::Address(cellGPR, JSCell::structureIDOffset()), scratchGPR);
+ store32(scratchGPR, arrayProfile->addressOfLastSeenStructureID());
+ }
+}
+
+inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cellGPR, RegisterID arrayProfileGPR, RegisterID scratchGPR)
+{
+ if (shouldEmitProfiling()) {
+ load32(MacroAssembler::Address(cellGPR, JSCell::structureIDOffset()), scratchGPR);
+ store32(scratchGPR, Address(arrayProfileGPR, ArrayProfile::offsetOfLastSeenStructureID()));
+ }
+}
+#endif
+
ALWAYS_INLINE int32_t JIT::getOperandConstantInt(VirtualRegister src)
{
return getConstantOperand(src).asInt32();
Modified: trunk/Source/_javascript_Core/jit/JITOpcodes.cpp (283388 => 283389)
--- trunk/Source/_javascript_Core/jit/JITOpcodes.cpp 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/jit/JITOpcodes.cpp 2021-10-01 19:06:05 UTC (rev 283389)
@@ -1466,10 +1466,12 @@
#if USE(JSVALUE64)
JSValueRegs resultRegs(GPRInfo::returnValueGPR);
+ loadGlobalObject(resultRegs.gpr());
#else
JSValueRegs resultRegs(GPRInfo::returnValueGPR2, GPRInfo::returnValueGPR);
+ loadGlobalObject(resultRegs.payloadGPR());
+ move(TrustedImm32(JSValue::CellTag), resultRegs.tagGPR());
#endif
- loadGlobalObject(resultRegs.gpr());
checkStackPointerAlignment();
emitRestoreCalleeSaves();
emitFunctionEpilogue();
Modified: trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp (283388 => 283389)
--- trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/jit/JITPropertyAccess.cpp 2021-10-01 19:06:05 UTC (rev 283389)
@@ -1821,16 +1821,6 @@
emitHasPrivateSlow(bytecode.m_dst, bytecode.m_base, bytecode.m_brand, AccessType::HasPrivateBrand);
}
-void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks, GPRReg scratchGPR)
-{
- if (!needsVarInjectionChecks)
- return;
-
- loadGlobalObject(scratchGPR);
- loadPtr(Address(scratchGPR, OBJECT_OFFSETOF(JSGlobalObject, m_varInjectionWatchpoint)), scratchGPR);
- addSlowCase(branch8(Equal, Address(scratchGPR, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated)));
-}
-
void JIT::emitResolveClosure(VirtualRegister dst, VirtualRegister scope, bool needsVarInjectionChecks, unsigned depth)
{
emitVarInjectionCheck(needsVarInjectionChecks, regT0);
@@ -3191,6 +3181,16 @@
ownerIsRememberedOrInEden.link(this);
}
+void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks, GPRReg scratchGPR)
+{
+ if (!needsVarInjectionChecks)
+ return;
+
+ loadGlobalObject(scratchGPR);
+ loadPtr(Address(scratchGPR, OBJECT_OFFSETOF(JSGlobalObject, m_varInjectionWatchpoint)), scratchGPR);
+ addSlowCase(branch8(Equal, Address(scratchGPR, WatchpointSet::offsetOfState()), TrustedImm32(IsInvalidated)));
+}
+
} // namespace JSC
#endif // ENABLE(JIT)
Modified: trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp (283388 => 283389)
--- trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/jit/JITPropertyAccess32_64.cpp 2021-10-01 19:06:05 UTC (rev 283389)
@@ -846,9 +846,12 @@
emitStore(dst, regT1, regT0);
}
-void JIT::emitHasPrivateSlow(VirtualRegister dst, AccessType type)
+void JIT::emitHasPrivateSlow(VirtualRegister dst, VirtualRegister base, VirtualRegister property, AccessType type)
{
+ // FIXME: 64-bit seems to also ignore base/property.
ASSERT(type == AccessType::HasPrivateName || type == AccessType::HasPrivateBrand);
+ UNUSED_PARAM(base);
+ UNUSED_PARAM(property);
JITInByValGenerator& gen = m_inByVals[m_inByValIndex++];
Label coldPathBegin = label();
@@ -869,7 +872,7 @@
linkAllSlowCases(iter);
auto bytecode = currentInstruction->as<OpHasPrivateName>();
- emitHasPrivateSlow(bytecode.m_dst, AccessType::HasPrivateName);
+ emitHasPrivateSlow(bytecode.m_dst, bytecode.m_base, bytecode.m_property, AccessType::HasPrivateName);
}
void JIT::emit_op_has_private_brand(const Instruction* currentInstruction)
@@ -883,19 +886,12 @@
linkAllSlowCases(iter);
auto bytecode = currentInstruction->as<OpHasPrivateBrand>();
- emitHasPrivateSlow(bytecode.m_dst, AccessType::HasPrivateBrand);
+ emitHasPrivateSlow(bytecode.m_dst, bytecode.m_base, bytecode.m_brand, AccessType::HasPrivateBrand);
}
-void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
-{
- if (!needsVarInjectionChecks)
- return;
- addSlowCase(branch8(Equal, AbsoluteAddress(m_profiledCodeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
-}
-
void JIT::emitResolveClosure(VirtualRegister dst, VirtualRegister scope, bool needsVarInjectionChecks, unsigned depth)
{
- emitVarInjectionCheck(needsVarInjectionChecks);
+ emitVarInjectionCheck(needsVarInjectionChecks, regT0);
move(TrustedImm32(JSValue::CellTag), regT1);
emitLoadPayload(scope, regT0);
for (unsigned i = 0; i < depth; ++i)
@@ -918,7 +914,7 @@
case GlobalPropertyWithVarInjectionChecks: {
JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_profiledCodeBlock);
RELEASE_ASSERT(constantScope);
- emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
+ emitVarInjectionCheck(needsVarInjectionChecks(resolveType), regT0);
load32(&metadata.m_globalLexicalBindingEpoch, regT1);
addSlowCase(branch32(NotEqual, AbsoluteAddress(m_profiledCodeBlock->globalObject()->addressOfGlobalLexicalBindingEpoch()), regT1));
move(TrustedImm32(JSValue::CellTag), regT1);
@@ -933,7 +929,7 @@
case GlobalLexicalVarWithVarInjectionChecks: {
JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_profiledCodeBlock);
RELEASE_ASSERT(constantScope);
- emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
+ emitVarInjectionCheck(needsVarInjectionChecks(resolveType), regT0);
move(TrustedImm32(JSValue::CellTag), regT1);
move(TrustedImmPtr(constantScope), regT0);
emitStore(dst, regT1, regT0);
@@ -1073,7 +1069,7 @@
case GlobalVarWithVarInjectionChecks:
case GlobalLexicalVar:
case GlobalLexicalVarWithVarInjectionChecks:
- emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
+ emitVarInjectionCheck(needsVarInjectionChecks(resolveType), regT0);
if (indirectLoadForOperand)
emitGetVarFromIndirectPointer(bitwise_cast<JSValue**>(operandSlot), regT1, regT0);
else
@@ -1083,7 +1079,7 @@
break;
case ClosureVar:
case ClosureVarWithVarInjectionChecks:
- emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
+ emitVarInjectionCheck(needsVarInjectionChecks(resolveType), regT0);
emitGetClosureVar(scope, *operandSlot);
break;
case Dynamic:
@@ -1172,7 +1168,7 @@
{
emitLoad(value, regT1, regT0);
loadPtr(indirectWatchpointSet, regT2);
- emitNotifyWrite(regT2);
+ emitNotifyWrite(*indirectWatchpointSet); // FIXME: ??
loadPtr(addressOfOperand, regT2);
store32(regT1, Address(regT2, TagOffset));
store32(regT0, Address(regT2, PayloadOffset));
@@ -1193,7 +1189,7 @@
auto& metadata = bytecode.metadata(m_profiledCodeBlock);
VirtualRegister scope = bytecode.m_scope;
VirtualRegister value = bytecode.m_value;
- GetPutInfo getPutInfo = copiedGetPutInfo(bytecode);
+ GetPutInfo getPutInfo = bytecode.metadata(m_profiledCodeBlock).m_getPutInfo;
ResolveType resolveType = getPutInfo.resolveType();
Structure** structureSlot = metadata.m_structure.slot();
uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&metadata.m_operand);
@@ -1220,8 +1216,8 @@
JSScope* constantScope = JSScope::constantScopeForCodeBlock(resolveType, m_profiledCodeBlock);
RELEASE_ASSERT(constantScope);
emitWriteBarrier(constantScope, value, ShouldFilterValue);
- emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
- emitVarReadOnlyCheck(resolveType);
+ emitVarInjectionCheck(needsVarInjectionChecks(resolveType), regT0);
+ emitVarReadOnlyCheck(resolveType, regT0);
if (!isInitialization(getPutInfo.initializationMode()) && (resolveType == GlobalLexicalVar || resolveType == GlobalLexicalVarWithVarInjectionChecks)) {
// We need to do a TDZ check here because we can't always prove we need to emit TDZ checks statically.
if (indirectLoadForOperand)
@@ -1240,7 +1236,7 @@
case ClosureVar:
case ClosureVarWithVarInjectionChecks:
emitWriteBarrier(scope, value, ShouldFilterValue);
- emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
+ emitVarInjectionCheck(needsVarInjectionChecks(resolveType), regT0);
emitPutClosureVar(scope, *operandSlot, value, metadata.m_watchpointSet);
break;
case ModuleVar:
@@ -1313,7 +1309,7 @@
linkAllSlowCases(iter);
auto bytecode = currentInstruction->as<OpPutToScope>();
- ResolveType resolveType = copiedGetPutInfo(bytecode).resolveType();
+ ResolveType resolveType = bytecode.metadata(m_profiledCodeBlock).m_getPutInfo.resolveType();
if (resolveType == ModuleVar) {
JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_strict_mode_readonly_property_write_error);
slowPathCall.call();
Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm (283388 => 283389)
--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm 2021-10-01 19:06:05 UTC (rev 283389)
@@ -2504,6 +2504,9 @@
end
else
+ macro loadBaselineJITConstantPool()
+ end
+
macro setupReturnToBaselineAfterCheckpointExitIfNeeded()
end
end
Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter32_64.asm (283388 => 283389)
--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter32_64.asm 2021-10-01 19:01:08 UTC (rev 283388)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter32_64.asm 2021-10-01 19:06:05 UTC (rev 283389)
@@ -499,6 +499,9 @@
cCall2(_llint_loop_osr)
btpz r0, .recover
move r1, sp
+
+ loadBaselineJITConstantPool()
+
jmp r0
.recover:
loadi ArgumentCountIncludingThis + TagOffset[cfr], PC