Reviewers: Toon Verwaest,
Message:
PTAL.
Description:
Adjust various constants, zero out the high 32bit of PC and FP for x32 port
Please review this at https://codereview.chromium.org/216513003/
SVN Base: https://v8.googlecode.com/svn/branches/bleeding_edge
Affected files (+63, -15 lines):
M src/x64/assembler-x64.h
M src/x64/assembler-x64-inl.h
M src/x64/builtins-x64.cc
M src/x64/code-stubs-x64.cc
M src/x64/codegen-x64.cc
M src/x64/deoptimizer-x64.cc
M src/x64/frames-x64.h
M src/x64/full-codegen-x64.cc
Index: src/x64/assembler-x64-inl.h
diff --git a/src/x64/assembler-x64-inl.h b/src/x64/assembler-x64-inl.h
index
a559b62758ac88b4ff4ce0f6561cd96eefd172bb..1c52f009441ca134011143d95108b630683ab56c
100644
--- a/src/x64/assembler-x64-inl.h
+++ b/src/x64/assembler-x64-inl.h
@@ -43,7 +43,8 @@ namespace internal {
static const byte kCallOpcode = 0xE8;
-static const int kNoCodeAgeSequenceLength = 6;
+// The length of pushq(rbp), movp(rbp, rsp), Push(rsi) and Push(rdi).
+static const int kNoCodeAgeSequenceLength = kPointerSize == kInt64Size ?
6 : 17;
void Assembler::emitl(uint32_t x) {
Index: src/x64/assembler-x64.h
diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h
index
1caa640b178a044bdd2562470645dd29d1f2822b..6081ed2634400190800eb60649d824e5ee34f2f0
100644
--- a/src/x64/assembler-x64.h
+++ b/src/x64/assembler-x64.h
@@ -357,6 +357,10 @@ inline Condition ReverseCondition(Condition cc) {
class Immediate BASE_EMBEDDED {
public:
explicit Immediate(int32_t value) : value_(value) {}
+ explicit Immediate(Smi* value) {
+ ASSERT(SmiValuesAre31Bits()); // Only available for 31-bit SMI.
+ value_ = reinterpret_cast<intptr_t>(value);
+ }
private:
int32_t value_;
Index: src/x64/builtins-x64.cc
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index
66ac55670d0f4000ca596a7e09a6cd99f8cd6c72..ef1f9db0d7e28ac4dce5cb3830ee3aeebb775c5e
100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -749,7 +749,15 @@ static void
Generate_NotifyStubFailureHelper(MacroAssembler* masm,
// Tear down internal frame.
}
- __ Pop(MemOperand(rsp, 0)); // Ignore state offset
+ if (kPointerSize == kRegisterSize) {
+ __ Pop(MemOperand(rsp, 0)); // Ignore state offset
+ } else {
+ ASSERT(kRegisterSize == 2 * kPointerSize);
+ __ PopReturnAddressTo(kScratchRegister);
+ __ leap(rsp, Operand(rsp, 4)); // Drop state
+ __ PushReturnAddressFrom(kScratchRegister);
+ }
+
__ ret(0); // Return to IC Miss stub, continuation still on stack.
}
Index: src/x64/code-stubs-x64.cc
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index
ff5bf1d03104d2e715219948f182cf3d16640357..14ab957b69fcc8f9b22c95817aab0d5dcca73130
100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -2800,17 +2800,19 @@ void InstanceofStub::Generate(MacroAssembler* masm)
{
// indicate that the value is not an instance.
static const int kOffsetToMapCheckValue = 2;
- static const int kOffsetToResultValue = 18;
+ static const int kOffsetToResultValue = kPointerSize == kInt64Size ?
18 : 14;
// The last 4 bytes of the instruction sequence
- // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset))
+ // movp(rdi, FieldOperand(rax, HeapObject::kMapOffset))
// Move(kScratchRegister, Factory::the_hole_value())
// in front of the hole value address.
- static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
+ static const unsigned int kWordBeforeMapCheckValue =
+ kPointerSize == kInt64Size ? 0xBA49FF78 : 0xBA41FF78;
// The last 4 bytes of the instruction sequence
// __ j(not_equal, &cache_miss);
// __ LoadRoot(ToRegister(instr->result()),
Heap::kTheHoleValueRootIndex);
// before the offset of the hole value in the root array.
- static const unsigned int kWordBeforeResultValue = 0x458B4906;
+ static const unsigned int kWordBeforeResultValue =
+ kPointerSize == kInt64Size ? 0x458B4906 : 0x458B4106;
// Only the inline check flag is supported on X64.
ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
Index: src/x64/codegen-x64.cc
diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc
index
afcf58171b49ee4010a756707668f703000969e1..aadedf242e150d40ece01faea40c12c38fd972bb
100644
--- a/src/x64/codegen-x64.cc
+++ b/src/x64/codegen-x64.cc
@@ -259,9 +259,14 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
// Check backing store for COW-ness. For COW arrays we have to
// allocate a new backing store.
__ SmiToInteger32(r9, FieldOperand(r8, FixedDoubleArray::kLengthOffset));
- __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset),
- Heap::kFixedCOWArrayMapRootIndex);
- __ j(equal, &new_backing_store);
+ if (kPointerSize == kDoubleSize) {
+ __ CompareRoot(FieldOperand(r8, HeapObject::kMapOffset),
+ Heap::kFixedCOWArrayMapRootIndex);
+ __ j(equal, &new_backing_store);
+ } else {
+ ASSERT(kDoubleSize == 2 * kPointerSize);
+ __ jmp(&new_backing_store);
+ }
// Check if the backing store is in new-space. If not, we need to
allocate
// a new one since the old one is in pointer-space.
// If in new space, we can reuse the old backing store because it is
Index: src/x64/deoptimizer-x64.cc
diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc
index
4bc644defefe8924695d861d49c2bf523f6c874b..1b957d05a81cf358b17bb0e8426bec9eb93c84b6
100644
--- a/src/x64/deoptimizer-x64.cc
+++ b/src/x64/deoptimizer-x64.cc
@@ -230,7 +230,13 @@ void Deoptimizer::EntryGenerator::Generate() {
// Fill in the input registers.
for (int i = kNumberOfRegisters -1; i >= 0; i--) {
int offset = (i * kPointerSize) + FrameDescription::registers_offset();
- __ Pop(Operand(rbx, offset));
+ if (kPointerSize == kRegisterSize) {
+ __ Pop(Operand(rbx, offset));
+ } else {
+ ASSERT(kRegisterSize == 2 * kPointerSize);
+ __ popq(kScratchRegister);
+ __ movp(Operand(rbx, offset), kScratchRegister);
+ }
}
// Fill in the double input registers.
@@ -307,13 +313,25 @@ void Deoptimizer::EntryGenerator::Generate() {
// Push state, pc, and continuation from the last output frame.
__ Push(Operand(rbx, FrameDescription::state_offset()));
+ if (kPCOnStackSize == 2 * kPointerSize) {
+ __ Push(Immediate(0));
+ }
__ Push(Operand(rbx, FrameDescription::pc_offset()));
+ if (kFPOnStackSize == 2 * kPointerSize) {
+ __ Push(Immediate(0));
+ }
__ Push(Operand(rbx, FrameDescription::continuation_offset()));
// Push the registers from the last output frame.
for (int i = 0; i < kNumberOfRegisters; i++) {
int offset = (i * kPointerSize) + FrameDescription::registers_offset();
- __ Push(Operand(rbx, offset));
+ if (kPointerSize == kRegisterSize) {
+ __ Push(Operand(rbx, offset));
+ } else {
+ ASSERT(kRegisterSize == 2 * kPointerSize);
+ __ movp(kScratchRegister, Operand(rbx, offset));
+ __ pushq(kScratchRegister);
+ }
}
// Restore the registers from the stack.
@@ -352,11 +370,17 @@ void
Deoptimizer::TableEntryGenerator::GeneratePrologue() {
void FrameDescription::SetCallerPc(unsigned offset, intptr_t value) {
+ if (kPCOnStackSize == 2 * kPointerSize) {
+ SetFrameSlot(offset + kPointerSize, 0);
+ }
SetFrameSlot(offset, value);
}
void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) {
+ if (kFPOnStackSize == 2 * kPointerSize) {
+ SetFrameSlot(offset + kPointerSize, 0);
+ }
SetFrameSlot(offset, value);
}
Index: src/x64/frames-x64.h
diff --git a/src/x64/frames-x64.h b/src/x64/frames-x64.h
index
0faa3497ece68f652b74b4506330b77b0e19c218..89dab1a699460d3e0005c79998acfc24df2e14fd
100644
--- a/src/x64/frames-x64.h
+++ b/src/x64/frames-x64.h
@@ -133,6 +133,9 @@ inline Object* JavaScriptFrame::function_slot_object()
const {
inline void StackHandler::SetFp(Address slot, Address fp) {
+ if (kFPOnStackSize == 2 * kPointerSize) {
+ Memory::Address_at(slot + kPointerSize) = 0;
+ }
Memory::Address_at(slot) = fp;
}
Index: src/x64/full-codegen-x64.cc
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index
b57cf6887fa694ef91aef1eb35d78eb45a3eacd9..487cc2892dffa4a50d52885ed250501255cad9ba
100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -434,9 +434,10 @@ void FullCodeGenerator::EmitReturnSequence() {
#ifdef ENABLE_DEBUGGER_SUPPORT
// Add padding that will be overwritten by a debugger breakpoint. We
- // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret
k"
- // (3 + 1 + 3).
- const int kPadding = Assembler::kJSReturnSequenceLength - 7;
+ // have just generated at least 7 bytes: "movp rsp, rbp; pop rbp; ret
k"
+ // (3 + 1 + 3) for x64 and at least 6 bytes for x32.
+ const int kPadding = Assembler::kJSReturnSequenceLength -
+ kPointerSize == kInt64Size ? 7 : 6;
for (int i = 0; i < kPadding; ++i) {
masm_->int3();
}
@@ -4859,7 +4860,7 @@ FullCodeGenerator::NestedStatement*
FullCodeGenerator::TryFinally::Exit(
static const byte kJnsInstruction = 0x79;
-static const byte kJnsOffset = 0x1d;
+static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
static const byte kNopByteOne = 0x66;
static const byte kNopByteTwo = 0x90;
#ifdef DEBUG
--
--
v8-dev mailing list
[email protected]
http://groups.google.com/group/v8-dev
---
You received this message because you are subscribed to the Google Groups "v8-dev" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
For more options, visit https://groups.google.com/d/optout.