Diff
Modified: trunk/Source/_javascript_Core/ChangeLog (229443 => 229444)
--- trunk/Source/_javascript_Core/ChangeLog 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/ChangeLog 2018-03-09 01:38:57 UTC (rev 229444)
@@ -1,3 +1,37 @@
+2018-03-08 Michael Saboff <msab...@apple.com>
+
+ Emit code to zero the stack frame on function entry
+ Nhttps://bugs.webkit.org/show_bug.cgi?id=183391
+
+ Reviewed by Mark Lam.
+
+ Added code to zero incoming stack frame behind a new JSC option, zeroStackFrame.
+ The default setting of the option is off.
+
+ Did some minor refactoring of the YarrJIT stack alignment code.
+
+ * b3/air/AirCode.cpp:
+ (JSC::B3::Air::defaultPrologueGenerator):
+ * dfg/DFGJITCompiler.cpp:
+ (JSC::DFG::JITCompiler::compile):
+ (JSC::DFG::JITCompiler::compileFunction):
+ * dfg/DFGSpeculativeJIT.cpp:
+ (JSC::DFG::SpeculativeJIT::compileCurrentBlock):
+ * dfg/DFGThunks.cpp:
+ (JSC::DFG::osrEntryThunkGenerator):
+ * ftl/FTLLowerDFGToB3.cpp:
+ (JSC::FTL::DFG::LowerDFGToB3::lower):
+ * jit/AssemblyHelpers.h:
+ (JSC::AssemblyHelpers::clearStackFrame):
+ * jit/JIT.cpp:
+ (JSC::JIT::compileWithoutLinking):
+ * llint/LowLevelInterpreter.asm:
+ * runtime/Options.h:
+ * yarr/YarrJIT.cpp:
+ (JSC::Yarr::YarrGenerator::ialignCallFrameSizeInBytesnitCallFrame):
+ (JSC::Yarr::YarrGenerator::initCallFrame):
+ (JSC::Yarr::YarrGenerator::removeCallFrame):
+
2018-03-08 Keith Miller <keith_mil...@apple.com>
Unreviewed, another attempt at fixing the Windows build.
Modified: trunk/Source/_javascript_Core/b3/air/AirCode.cpp (229443 => 229444)
--- trunk/Source/_javascript_Core/b3/air/AirCode.cpp 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/b3/air/AirCode.cpp 2018-03-09 01:38:57 UTC (rev 229444)
@@ -44,7 +44,9 @@
jit.emitFunctionPrologue();
if (code.frameSize()) {
AllowMacroScratchRegisterUsageIf allowScratch(jit, isARM64());
- jit.addPtr(CCallHelpers::TrustedImm32(-code.frameSize()), MacroAssembler::stackPointerRegister);
+ jit.addPtr(MacroAssembler::TrustedImm32(-code.frameSize()), MacroAssembler::framePointerRegister, MacroAssembler::stackPointerRegister);
+ if (Options::zeroStackFrame())
+ jit.clearStackFrame(MacroAssembler::framePointerRegister, MacroAssembler::stackPointerRegister, GPRInfo::nonArgGPR0, code.frameSize());
}
jit.emitSave(code.calleeSaveRegisterAtOffsetList());
Modified: trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp (229443 => 229444)
--- trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/dfg/DFGJITCompiler.cpp 2018-03-09 01:38:57 UTC (rev 229444)
@@ -388,7 +388,9 @@
JumpList stackOverflow;
emitStackOverflowCheck(*this, stackOverflow);
- addPtr(TrustedImm32(m_graph.stackPointerOffset() * sizeof(Register)), GPRInfo::callFrameRegister, stackPointerRegister);
+ addPtr(TrustedImm32(-(m_graph.frameRegisterCount() * sizeof(Register))), GPRInfo::callFrameRegister, stackPointerRegister);
+ if (Options::zeroStackFrame())
+ clearStackFrame(GPRInfo::callFrameRegister, stackPointerRegister, GPRInfo::regT0, m_graph.frameRegisterCount() * sizeof(Register));
checkStackPointerAlignment();
compileSetupRegistersForEntry();
compileEntryExecutionFlag();
@@ -454,7 +456,9 @@
emitStackOverflowCheck(*this, stackOverflow);
// Move the stack pointer down to accommodate locals
- addPtr(TrustedImm32(m_graph.stackPointerOffset() * sizeof(Register)), GPRInfo::callFrameRegister, stackPointerRegister);
+ addPtr(TrustedImm32(-(m_graph.frameRegisterCount() * sizeof(Register))), GPRInfo::callFrameRegister, stackPointerRegister);
+ if (Options::zeroStackFrame())
+ clearStackFrame(GPRInfo::callFrameRegister, stackPointerRegister, GPRInfo::regT0, m_graph.frameRegisterCount() * sizeof(Register));
checkStackPointerAlignment();
compileSetupRegistersForEntry();
Modified: trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp (229443 => 229444)
--- trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/dfg/DFGSpeculativeJIT.cpp 2018-03-09 01:38:57 UTC (rev 229444)
@@ -1761,7 +1761,9 @@
}
if (m_block->isCatchEntrypoint) {
- m_jit.addPtr(CCallHelpers::TrustedImm32(m_jit.graph().stackPointerOffset() * sizeof(Register)), GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+ m_jit.addPtr(CCallHelpers::TrustedImm32(-(m_jit.graph().frameRegisterCount() * sizeof(Register))), GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+ if (Options::zeroStackFrame())
+ m_jit.clearStackFrame(GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister, GPRInfo::regT0, m_jit.graph().frameRegisterCount() * sizeof(Register));
m_jit.emitSaveCalleeSaves();
m_jit.emitMaterializeTagCheckRegisters();
m_jit.emitPutToCallFrameHeader(m_jit.codeBlock(), CallFrameSlot::codeBlock);
Modified: trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp (229443 => 229444)
--- trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/ftl/FTLLowerDFGToB3.cpp 2018-03-09 01:38:57 UTC (rev 229444)
@@ -174,6 +174,9 @@
[codeBlock] (CCallHelpers& jit, B3::Air::Code& code) {
AllowMacroScratchRegisterUsage allowScratch(jit);
jit.addPtr(CCallHelpers::TrustedImm32(-code.frameSize()), GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister);
+ if (Options::zeroStackFrame())
+ jit.clearStackFrame(GPRInfo::callFrameRegister, CCallHelpers::stackPointerRegister, GPRInfo::regT0, code.frameSize());
+
jit.emitSave(code.calleeSaveRegisterAtOffsetList());
jit.emitPutToCallFrameHeader(codeBlock, CallFrameSlot::codeBlock);
});
Modified: trunk/Source/_javascript_Core/jit/AssemblyHelpers.h (229443 => 229444)
--- trunk/Source/_javascript_Core/jit/AssemblyHelpers.h 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/jit/AssemblyHelpers.h 2018-03-09 01:38:57 UTC (rev 229444)
@@ -38,6 +38,7 @@
#include "MarkedSpace.h"
#include "RegisterAtOffsetList.h"
#include "RegisterSet.h"
+#include "StackAlignment.h"
#include "TagRegistersMode.h"
#include "TypeofType.h"
#include "VM.h"
@@ -455,6 +456,30 @@
#endif
}
+ void clearStackFrame(GPRReg currentTop, GPRReg newTop, GPRReg temp, unsigned frameSize)
+ {
+ ASSERT(frameSize % stackAlignmentBytes() == 0);
+ if (frameSize <= 128) {
+ for (unsigned offset = 0; offset < frameSize; offset += sizeof(intptr_t))
+ storePtr(TrustedImm32(0), Address(currentTop, -8 - offset));
+ } else {
+ constexpr unsigned storeBytesPerIteration = stackAlignmentBytes();
+ constexpr unsigned storesPerIteration = storeBytesPerIteration / sizeof(intptr_t);
+
+ move(currentTop, temp);
+ Label zeroLoop = label();
+ subPtr(TrustedImm32(storeBytesPerIteration), temp);
+#if CPU(ARM64)
+ static_assert(storesPerIteration == 2, "clearStackFrame() for ARM64 assumes stack is 16 byte aligned");
+ storePair64(ARM64Registers::zr, ARM64Registers::zr, temp);
+#else
+ for (unsigned i = storesPerIteration; i-- != 0;)
+ storePtr(TrustedImm32(0), Address(temp, sizeof(intptr_t) * i));
+#endif
+ branchPtr(NotEqual, temp, newTop).linkTo(zeroLoop, this);
+ }
+ }
+
#if CPU(X86_64) || CPU(X86)
static size_t prologueStackPointerDelta()
{
Modified: trunk/Source/_javascript_Core/jit/JIT.cpp (229443 => 229444)
--- trunk/Source/_javascript_Core/jit/JIT.cpp 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/jit/JIT.cpp 2018-03-09 01:38:57 UTC (rev 229444)
@@ -688,6 +688,8 @@
move(regT1, stackPointerRegister);
checkStackPointerAlignment();
+ if (Options::zeroStackFrame())
+ clearStackFrame(callFrameRegister, stackPointerRegister, regT0, maxFrameSize);
emitSaveCalleeSaves();
emitMaterializeTagCheckRegisters();
Modified: trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm (229443 => 229444)
--- trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/llint/LowLevelInterpreter.asm 2018-03-09 01:38:57 UTC (rev 229444)
@@ -1107,7 +1107,19 @@
subp cfr, t0, t0
.stackHeightOK:
- move t0, sp
+ if X86_64 or ARM64
+ # We need to start zeroing from sp as it has been adjusted after saving callee saves.
+ move sp, t2
+ move t0, sp
+.zeroStackLoop:
+ bpeq sp, t2, .zeroStackDone
+ subp PtrSize, t2
+ storep 0, [t2]
+ jmp .zeroStackLoop
+.zeroStackDone:
+ else
+ move t0, sp
+ end
if JSVALUE64
move TagTypeNumber, tagTypeNumber
Modified: trunk/Source/_javascript_Core/runtime/Options.h (229443 => 229444)
--- trunk/Source/_javascript_Core/runtime/Options.h 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/runtime/Options.h 2018-03-09 01:38:57 UTC (rev 229444)
@@ -464,6 +464,7 @@
v(bool, enableSpectreMitigations, true, Restricted, "Enable Spectre mitigations.") \
v(bool, enableSpectreGadgets, false, Restricted, "enable gadgets to test Spectre mitigations.") \
v(bool, usePoisoning, true, Normal, "Poison is randomized at load time when true, and initialized to 0 if false which defeats some Spectre and type confusion mitigations, but allows tools such as leak detectors to function better.") \
+ v(bool, zeroStackFrame, true, Normal, "Zero stack frame on entry to a function.") \
\
v(bool, useAsyncIterator, enableAsyncIteration, Normal, "Allow to use Async Iterator in JS.") \
\
Modified: trunk/Source/_javascript_Core/yarr/YarrJIT.cpp (229443 => 229444)
--- trunk/Source/_javascript_Core/yarr/YarrJIT.cpp 2018-03-09 01:32:51 UTC (rev 229443)
+++ trunk/Source/_javascript_Core/yarr/YarrJIT.cpp 2018-03-09 01:38:57 UTC (rev 229444)
@@ -609,25 +609,49 @@
unsigned alignCallFrameSizeInBytes(unsigned callFrameSize)
{
+ if (!callFrameSize)
+ return 0;
+
callFrameSize *= sizeof(void*);
if (callFrameSize / sizeof(void*) != m_pattern.m_body->m_callFrameSize)
CRASH();
callFrameSize = (callFrameSize + 0x3f) & ~0x3f;
- if (!callFrameSize)
- CRASH();
return callFrameSize;
}
void initCallFrame()
{
- unsigned callFrameSize = m_pattern.m_body->m_callFrameSize;
- if (callFrameSize)
- subPtr(Imm32(alignCallFrameSizeInBytes(callFrameSize)), stackPointerRegister);
+ unsigned callFrameSizeInBytes = alignCallFrameSizeInBytes(m_pattern.m_body->m_callFrameSize);
+ if (callFrameSizeInBytes) {
+#if CPU(X86_64) || CPU(ARM64)
+ if (Options::zeroStackFrame()) {
+ // We need to start from the stack pointer, because we could have spilled callee saves
+ move(stackPointerRegister, regT0);
+ subPtr(Imm32(callFrameSizeInBytes), stackPointerRegister);
+ if (callFrameSizeInBytes <= 128) {
+ for (unsigned offset = 0; offset < callFrameSizeInBytes; offset += sizeof(intptr_t))
+ storePtr(TrustedImm32(0), Address(regT0, -8 - offset));
+ } else {
+ Label zeroLoop = label();
+ subPtr(TrustedImm32(sizeof(intptr_t) * 2), regT0);
+#if CPU(ARM64)
+ storePair64(ARM64Registers::zr, ARM64Registers::zr, regT0);
+#else
+ storePtr(TrustedImm32(0), Address(regT0));
+ storePtr(TrustedImm32(0), Address(regT0, sizeof(intptr_t)));
+#endif
+ branchPtr(NotEqual, regT0, stackPointerRegister).linkTo(zeroLoop, this);
+ }
+ } else
+#endif
+ subPtr(Imm32(callFrameSizeInBytes), stackPointerRegister);
+
+ }
}
void removeCallFrame()
{
- unsigned callFrameSize = m_pattern.m_body->m_callFrameSize;
- if (callFrameSize)
- addPtr(Imm32(alignCallFrameSizeInBytes(callFrameSize)), stackPointerRegister);
+ unsigned callFrameSizeInBytes = alignCallFrameSizeInBytes(m_pattern.m_body->m_callFrameSize);
+ if (callFrameSizeInBytes)
+ addPtr(Imm32(callFrameSizeInBytes), stackPointerRegister);
}
void generateFailReturn()